From: Martin Stockhammer Date: Sun, 28 Jul 2019 13:24:13 +0000 (+0200) Subject: Refactoring to StorageAsset access X-Git-Url: https://source.dussan.org/?a=commitdiff_plain;h=bb3b074aaf5a2be0d81c950ecf1588fe8efa3316;p=archiva.git Refactoring to StorageAsset access --- diff --git a/archiva-modules/archiva-base/archiva-configuration/pom.xml b/archiva-modules/archiva-base/archiva-configuration/pom.xml index fd4565cce..5961fd259 100644 --- a/archiva-modules/archiva-base/archiva-configuration/pom.xml +++ b/archiva-modules/archiva-base/archiva-configuration/pom.xml @@ -33,6 +33,10 @@ + + org.apache.archiva + archiva-policies + org.apache.archiva.redback.components.registry spring-registry-api diff --git a/archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/AbstractRepositoryPurge.java b/archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/AbstractRepositoryPurge.java index 44b7daa19..6354e3a2a 100644 --- a/archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/AbstractRepositoryPurge.java +++ b/archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/AbstractRepositoryPurge.java @@ -31,6 +31,8 @@ import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.repository.ContentNotFoundException; import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.events.RepositoryListener; +import org.apache.archiva.repository.storage.StorageAsset; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -211,22 +213,22 @@ public abstract class AbstractRepositoryPurge log.error( "Error during metadata retrieval {}: {}", metaBaseId, e.getMessage( ) ); } } - Path artifactFile = repository.toFile( reference ); + StorageAsset artifactFile = repository.toFile( reference ); for ( RepositoryListener listener : listeners ) { listener.deleteArtifact( metadataRepository, repository.getId( ), reference.getGroupId( ), reference.getArtifactId( ), reference.getVersion( ), - artifactFile.getFileName( ).toString( ) ); + artifactFile.getName( )); } try { - Files.delete( artifactFile ); - log.debug( "File deleted: {}", artifactFile.toAbsolutePath( ) ); + artifactFile.getStorage().removeAsset(artifactFile); + log.debug( "File deleted: {}", artifactFile ); } catch ( IOException e ) { - log.error( "Could not delete file {}: {}", artifactFile.toAbsolutePath( ), e.getMessage( ), e ); + log.error( "Could not delete file {}: {}", artifactFile.toString(), e.getMessage( ), e ); continue; } try @@ -364,11 +366,11 @@ public abstract class AbstractRepositoryPurge } } - private void deleteSilently( Path path ) + private void deleteSilently( StorageAsset path ) { try { - Files.deleteIfExists( path ); + path.getStorage().removeAsset(path); triggerAuditEvent( repository.getRepository( ).getId( ), path.toString( ), AuditEvent.PURGE_FILE ); } catch ( IOException e ) @@ -387,22 +389,23 @@ public abstract class AbstractRepositoryPurge * * @param artifactFile the file to base off of. */ - private void purgeSupportFiles( Path artifactFile ) + private void purgeSupportFiles( StorageAsset artifactFile ) { - Path parentDir = artifactFile.getParent( ); + StorageAsset parentDir = artifactFile.getParent( ); - if ( !Files.exists( parentDir ) ) + if ( !parentDir.exists() ) { return; } - final String artifactName = artifactFile.getFileName( ).toString( ); + final String artifactName = artifactFile.getName( ); try { - Files.find( parentDir, 3, - ( path, basicFileAttributes ) -> path.getFileName( ).toString( ).startsWith( artifactName ) - && Files.isRegularFile( path ) ).forEach( this::deleteSilently ); + + StorageUtil.recurse(parentDir, a -> { + if (!artifactFile.isContainer() && artifactFile.getName().startsWith(artifactName)) deleteSilently(a); + }, true, 3 ); } catch ( IOException e ) { diff --git a/archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/DaysOldRepositoryPurge.java b/archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/DaysOldRepositoryPurge.java index 6c6607d2d..9ff31ec60 100644 --- a/archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/DaysOldRepositoryPurge.java +++ b/archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/DaysOldRepositoryPurge.java @@ -28,6 +28,7 @@ import org.apache.archiva.repository.ContentNotFoundException; import org.apache.archiva.repository.LayoutException; import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.events.RepositoryListener; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.time.DateUtils; import java.io.IOException; @@ -115,12 +116,12 @@ public class DaysOldRepositoryPurge artifactFile.toAbsolutePath( ).toString() ); newArtifactReference.setVersion( version ); - Path newArtifactFile = repository.toFile( newArtifactReference ); + StorageAsset newArtifactFile = repository.toFile( newArtifactReference ); // Is this a generic snapshot "1.0-SNAPSHOT" ? if ( VersionUtil.isGenericSnapshot( newArtifactReference.getVersion( ) ) ) { - if ( Files.getLastModifiedTime( newArtifactFile ).toMillis() < olderThanThisDate.getTimeInMillis( ) ) + if ( newArtifactFile.getModificationTime().toEpochMilli() < olderThanThisDate.getTimeInMillis( ) ) { artifactsToDelete.addAll( repository.getRelatedArtifacts( newArtifactReference ) ); } @@ -138,7 +139,7 @@ public class DaysOldRepositoryPurge } purge( artifactsToDelete ); } - catch ( ContentNotFoundException | IOException e ) + catch ( ContentNotFoundException e ) { throw new RepositoryPurgeException( e.getMessage( ), e ); } diff --git a/archiva-modules/archiva-base/archiva-policies/pom.xml b/archiva-modules/archiva-base/archiva-policies/pom.xml index 63acd6750..ab5f64081 100644 --- a/archiva-modules/archiva-base/archiva-policies/pom.xml +++ b/archiva-modules/archiva-base/archiva-policies/pom.xml @@ -38,6 +38,10 @@ org.apache.archiva archiva-common + + org.apache.archiva + archiva-storage-api + org.apache.archiva archiva-checksum @@ -81,6 +85,11 @@ + + org.apache.archiva + archiva-storage-fs + test + org.apache.archiva archiva-test-utils diff --git a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/AbstractUpdatePolicy.java b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/AbstractUpdatePolicy.java index cf1c0da4a..394917cdd 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/AbstractUpdatePolicy.java +++ b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/AbstractUpdatePolicy.java @@ -20,16 +20,13 @@ package org.apache.archiva.policies; */ import org.apache.archiva.common.utils.VersionUtil; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.nio.file.Files; import java.util.ArrayList; import java.util.Calendar; -import java.util.Date; import java.util.List; import java.util.Properties; diff --git a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/CachedFailuresPolicy.java b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/CachedFailuresPolicy.java index ac20dbc8e..7cd1294d0 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/CachedFailuresPolicy.java +++ b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/CachedFailuresPolicy.java @@ -20,7 +20,7 @@ package org.apache.archiva.policies; */ import org.apache.archiva.policies.urlcache.UrlFailureCache; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/ChecksumPolicy.java b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/ChecksumPolicy.java index 8f6885a6b..99c5ddab4 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/ChecksumPolicy.java +++ b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/ChecksumPolicy.java @@ -22,7 +22,7 @@ package org.apache.archiva.policies; import org.apache.archiva.checksum.ChecksumAlgorithm; import org.apache.archiva.checksum.ChecksummedFile; import org.apache.archiva.checksum.UpdateStatus; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadErrorPolicy.java b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadErrorPolicy.java index 28de5336b..43ae03e0f 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadErrorPolicy.java +++ b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadErrorPolicy.java @@ -19,7 +19,7 @@ package org.apache.archiva.policies; * under the License. */ -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import java.util.Map; import java.util.Properties; diff --git a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadPolicy.java b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadPolicy.java index f9a144331..1ee2713db 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadPolicy.java +++ b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadPolicy.java @@ -19,7 +19,7 @@ package org.apache.archiva.policies; * under the License. */ -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import java.util.Properties; diff --git a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsDownloadPolicy.java b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsDownloadPolicy.java index 7a0baefe1..7726ebb86 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsDownloadPolicy.java +++ b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsDownloadPolicy.java @@ -19,7 +19,7 @@ package org.apache.archiva.policies; * under the License. */ -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsOnUpdateDownloadPolicy.java b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsOnUpdateDownloadPolicy.java index 9b22a9b23..a50c1b9e0 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsOnUpdateDownloadPolicy.java +++ b/archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsOnUpdateDownloadPolicy.java @@ -19,11 +19,10 @@ package org.apache.archiva.policies; * under the License. */ -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import org.springframework.stereotype.Service; -import java.nio.file.Files; import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/CachedFailuresPolicyTest.java b/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/CachedFailuresPolicyTest.java index 2c9995c0b..d27c392be 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/CachedFailuresPolicyTest.java +++ b/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/CachedFailuresPolicyTest.java @@ -20,7 +20,10 @@ package org.apache.archiva.policies; */ import junit.framework.TestCase; +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.policies.urlcache.UrlFailureCache; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.junit.Test; import org.junit.runner.RunWith; @@ -28,6 +31,7 @@ import org.springframework.test.context.ContextConfiguration; import javax.inject.Inject; import javax.inject.Named; +import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; @@ -47,6 +51,8 @@ public class CachedFailuresPolicyTest @Inject private UrlFailureCache urlFailureCache; + private FilesystemStorage filesystemStorage; + @Inject @Named( value = "preDownloadPolicy#cache-failures" ) DownloadPolicy downloadPolicy; @@ -57,9 +63,11 @@ public class CachedFailuresPolicyTest return downloadPolicy; } - private Path getFile() - { - return Paths.get( "target/cache-failures/" + getName() + ".txt" ); + private StorageAsset getFile() throws IOException { + if (filesystemStorage==null) { + filesystemStorage = new FilesystemStorage(Paths.get("target/cache-failures"), new DefaultFileLockManager()); + } + return filesystemStorage.getAsset( getName() + ".txt" ); } private Properties createRequest() @@ -74,7 +82,7 @@ public class CachedFailuresPolicyTest throws Exception { DownloadPolicy policy = lookupPolicy(); - Path localFile = getFile(); + StorageAsset localFile = getFile(); Properties request = createRequest(); request.setProperty( "url", "http://a.bad.hostname.maven.org/path/to/resource.txt" ); @@ -88,7 +96,7 @@ public class CachedFailuresPolicyTest { DownloadPolicy policy = lookupPolicy(); - Path localFile = getFile(); + StorageAsset localFile = getFile(); Properties request = createRequest(); // make unique name String url = "http://a.bad.hostname.maven.org/path/to/resource"+ System.currentTimeMillis() +".txt"; diff --git a/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ChecksumPolicyTest.java b/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ChecksumPolicyTest.java index 3faad0b97..4a8c6cb81 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ChecksumPolicyTest.java +++ b/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ChecksumPolicyTest.java @@ -19,6 +19,9 @@ package org.apache.archiva.policies; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.commons.io.FileUtils; import org.junit.Rule; @@ -31,6 +34,7 @@ import javax.inject.Inject; import javax.inject.Named; import java.io.BufferedReader; import java.io.FileReader; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -51,6 +55,8 @@ public class ChecksumPolicyTest private static final String BAD = "bad"; + private static FilesystemStorage filesystemStorage; + @Inject @Named( value = "postDownloadPolicy#checksum" ) PostDownloadPolicy downloadPolicy; @@ -195,7 +201,7 @@ public class ChecksumPolicyTest throws Exception { PostDownloadPolicy policy = lookupPolicy(); - Path localFile = createTestableFiles( null, null ); + StorageAsset localFile = createTestableFiles( null, null ); Properties request = createRequest(); policy.applyPolicy( ChecksumPolicy.IGNORE, request, localFile ); @@ -205,7 +211,7 @@ public class ChecksumPolicyTest throws Exception { PostDownloadPolicy policy = lookupPolicy(); - Path localFile = createTestableFiles( md5State, sha1State ); + StorageAsset localFile = createTestableFiles( md5State, sha1State ); Properties request = createRequest(); boolean actualResult; @@ -220,9 +226,9 @@ public class ChecksumPolicyTest actualResult = false; String msg = createMessage( ChecksumPolicy.FAIL, md5State, sha1State ); - assertFalse( msg + " local file should not exist:", Files.exists(localFile) ); - Path md5File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".sha1" ); - Path sha1File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".md5" ); + assertFalse( msg + " local file should not exist:", localFile.exists() ); + Path md5File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".sha1" ); + Path sha1File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".md5" ); assertFalse( msg + " local md5 file should not exist:", Files.exists(md5File) ); assertFalse( msg + " local sha1 file should not exist:", Files.exists(sha1File) ); } @@ -234,7 +240,7 @@ public class ChecksumPolicyTest throws Exception { PostDownloadPolicy policy = lookupPolicy(); - Path localFile = createTestableFiles( md5State, sha1State ); + StorageAsset localFile = createTestableFiles( md5State, sha1State ); Properties request = createRequest(); boolean actualResult; @@ -252,8 +258,8 @@ public class ChecksumPolicyTest assertEquals( createMessage( ChecksumPolicy.FIX, md5State, sha1State ), expectedResult, actualResult ); // End result should be legitimate SHA1 and MD5 files. - Path md5File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".md5" ); - Path sha1File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".sha1" ); + Path md5File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".md5" ); + Path sha1File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".sha1" ); assertTrue( "ChecksumPolicy.apply(FIX) md5 should exist.", Files.exists(md5File) && Files.isRegularFile(md5File) ); assertTrue( "ChecksumPolicy.apply(FIX) sha1 should exist.", Files.exists(sha1File) && Files.isRegularFile(sha1File) ); @@ -336,37 +342,41 @@ public class ChecksumPolicyTest return request; } - private Path createTestableFiles( String md5State, String sha1State ) + private StorageAsset createTestableFiles(String md5State, String sha1State ) throws Exception { - Path sourceDir = getTestFile( "src/test/resources/checksums/" ); - Path destDir = getTestFile( "target/checksum-tests/" + name.getMethodName() + "/" ); + FilesystemStorage fs = new FilesystemStorage(Paths.get("target/checksum-tests"), new DefaultFileLockManager()); + StorageAsset sourceDir = getTestFile( "src/test/resources/checksums/" ); + StorageAsset destDir = getTestFile( "target/checksum-tests/" + name.getMethodName() + "/" ); - FileUtils.copyFileToDirectory( sourceDir.resolve("artifact.jar" ).toFile(), destDir.toFile() ); + FileUtils.copyFileToDirectory( sourceDir.getFilePath().resolve("artifact.jar" ).toFile(), destDir.getFilePath().toFile() ); if ( md5State != null ) { - Path md5File = sourceDir.resolve("artifact.jar.md5-" + md5State ); + Path md5File = sourceDir.getFilePath().resolve("artifact.jar.md5-" + md5State ); assertTrue( "Testable file exists: " + md5File.getFileName() + ":", Files.exists(md5File) && Files.isRegularFile(md5File) ); - Path destFile = destDir.resolve("artifact.jar.md5" ); + Path destFile = destDir.getFilePath().resolve("artifact.jar.md5" ); FileUtils.copyFile( md5File.toFile(), destFile.toFile() ); } if ( sha1State != null ) { - Path sha1File = sourceDir.resolve("artifact.jar.sha1-" + sha1State ); + Path sha1File = sourceDir.getFilePath().resolve("artifact.jar.sha1-" + sha1State ); assertTrue( "Testable file exists: " + sha1File.getFileName() + ":", Files.exists(sha1File) && Files.isRegularFile(sha1File) ); - Path destFile = destDir.resolve("artifact.jar.sha1" ); + Path destFile = destDir.getFilePath().resolve("artifact.jar.sha1" ); FileUtils.copyFile( sha1File.toFile(), destFile.toFile() ); } - Path localFile = destDir.resolve("artifact.jar" ); - return localFile; + + StorageAsset localAsset = fs.getAsset("artifact.jar"); + return localAsset; } - public static Path getTestFile( String path ) - { - return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path ); + public static StorageAsset getTestFile( String path ) throws IOException { + if (filesystemStorage==null) { + filesystemStorage = new FilesystemStorage(Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()), new DefaultFileLockManager()); + } + return filesystemStorage.getAsset( path ); } } diff --git a/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ReleasePolicyTest.java b/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ReleasePolicyTest.java index adf341b29..3c5bae066 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ReleasePolicyTest.java +++ b/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ReleasePolicyTest.java @@ -20,6 +20,7 @@ package org.apache.archiva.policies; */ import junit.framework.TestCase; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.junit.Before; import org.junit.Test; @@ -338,17 +339,17 @@ public class ReleasePolicyTest request.setProperty( "version", "2.0" ); } - Path targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" ); - Path localFile = targetDir.resolve( path ); + StorageAsset targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" ); + StorageAsset localFile = targetDir.resolve( path ); - Files.deleteIfExists( localFile ); + Files.deleteIfExists( localFile.getFilePath() ); if ( createLocalFile ) { - Files.createDirectories( localFile.getParent()); - org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile, FILE_ENCODING, "random-junk" ); - Files.setLastModifiedTime( localFile, - FileTime.fromMillis(Files.getLastModifiedTime(localFile).toMillis() - generatedLocalFileUpdateDelta)); + Files.createDirectories( localFile.getParent().getFilePath()); + org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile.getFilePath(), FILE_ENCODING, "random-junk" ); + Files.setLastModifiedTime( localFile.getFilePath(), + FileTime.fromMillis(Files.getLastModifiedTime(localFile.getFilePath()).toMillis() - generatedLocalFileUpdateDelta)); } policy.applyPolicy( setting, request, localFile ); diff --git a/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/SnapshotsPolicyTest.java b/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/SnapshotsPolicyTest.java index 5272777fe..a68ae8dd9 100644 --- a/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/SnapshotsPolicyTest.java +++ b/archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/SnapshotsPolicyTest.java @@ -20,6 +20,8 @@ package org.apache.archiva.policies; */ import junit.framework.TestCase; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.junit.Before; import org.junit.Test; @@ -80,6 +82,8 @@ public class SnapshotsPolicyTest @Inject @Named(value="preDownloadPolicy#snapshots") PreDownloadPolicy policy; + private FilesystemStorage filesystemStorage; + private PreDownloadPolicy lookupPolicy() throws Exception { @@ -337,17 +341,17 @@ public class SnapshotsPolicyTest request.setProperty( "version", "2.0" ); } - Path targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" ); - Path localFile = targetDir.resolve( path ); + StorageAsset targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" ); + StorageAsset localFile = targetDir.resolve( path ); - Files.deleteIfExists( localFile ); + Files.deleteIfExists( localFile.getFilePath() ); if ( createLocalFile ) { - Files.createDirectories( localFile.getParent()); - org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile, FILE_ENCODING, "random-junk" ); - Files.setLastModifiedTime( localFile, - FileTime.fromMillis( Files.getLastModifiedTime( localFile ).toMillis() - generatedLocalFileUpdateDelta )); + Files.createDirectories( localFile.getParent().getFilePath() ); + org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile.getFilePath(), FILE_ENCODING, "random-junk" ); + Files.setLastModifiedTime( localFile.getFilePath(), + FileTime.fromMillis( Files.getLastModifiedTime( localFile.getFilePath() ).toMillis() - generatedLocalFileUpdateDelta )); } policy.applyPolicy( setting, request, localFile ); diff --git a/archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/ProxyFetchResult.java b/archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/ProxyFetchResult.java index 700e4a560..b4faf2da7 100644 --- a/archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/ProxyFetchResult.java +++ b/archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/ProxyFetchResult.java @@ -20,9 +20,7 @@ package org.apache.archiva.proxy.model; */ -import org.apache.archiva.repository.content.StorageAsset; - -import java.nio.file.Path; +import org.apache.archiva.repository.storage.StorageAsset; /** * A result from a proxy fetch operation. diff --git a/archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/RepositoryProxyHandler.java b/archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/RepositoryProxyHandler.java index f82d55514..efcd901f0 100644 --- a/archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/RepositoryProxyHandler.java +++ b/archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/RepositoryProxyHandler.java @@ -23,7 +23,7 @@ import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.policies.ProxyDownloadException; import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.RepositoryType; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import java.util.List; import java.util.Map; diff --git a/archiva-modules/archiva-base/archiva-proxy/src/main/java/org/apache/archiva/proxy/DefaultRepositoryProxyHandler.java b/archiva-modules/archiva-base/archiva-proxy/src/main/java/org/apache/archiva/proxy/DefaultRepositoryProxyHandler.java index fac557ed4..d5f6f94a9 100644 --- a/archiva-modules/archiva-base/archiva-proxy/src/main/java/org/apache/archiva/proxy/DefaultRepositoryProxyHandler.java +++ b/archiva-modules/archiva-base/archiva-proxy/src/main/java/org/apache/archiva/proxy/DefaultRepositoryProxyHandler.java @@ -22,10 +22,7 @@ package org.apache.archiva.proxy; import org.apache.archiva.checksum.ChecksumAlgorithm; import org.apache.archiva.checksum.ChecksumUtil; import org.apache.archiva.proxy.model.ProxyConnectorRuleType; -import org.apache.archiva.common.filelock.FileLockException; import org.apache.archiva.common.filelock.FileLockManager; -import org.apache.archiva.common.filelock.FileLockTimeoutException; -import org.apache.archiva.common.filelock.Lock; import org.apache.archiva.configuration.*; import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.model.Keys; @@ -39,9 +36,9 @@ import org.apache.archiva.redback.components.registry.Registry; import org.apache.archiva.redback.components.registry.RegistryListener; import org.apache.archiva.redback.components.taskqueue.TaskQueueException; import org.apache.archiva.repository.*; -import org.apache.archiva.repository.content.FilesystemStorage; -import org.apache.archiva.repository.content.StorageAsset; -import org.apache.archiva.repository.content.StorageUtil; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.RepositoryMetadataException; import org.apache.archiva.scheduler.ArchivaTaskScheduler; @@ -61,7 +58,6 @@ import javax.inject.Named; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; diff --git a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-api/src/main/java/org/apache/archiva/admin/model/group/RepositoryGroupAdmin.java b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-api/src/main/java/org/apache/archiva/admin/model/group/RepositoryGroupAdmin.java index 8cef4f30b..e98e8321a 100644 --- a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-api/src/main/java/org/apache/archiva/admin/model/group/RepositoryGroupAdmin.java +++ b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-api/src/main/java/org/apache/archiva/admin/model/group/RepositoryGroupAdmin.java @@ -21,8 +21,8 @@ package org.apache.archiva.admin.model.group; import org.apache.archiva.admin.model.AuditInformation; import org.apache.archiva.admin.model.RepositoryAdminException; import org.apache.archiva.admin.model.beans.RepositoryGroup; +import org.apache.archiva.repository.storage.StorageAsset; -import java.nio.file.Path; import java.util.List; import java.util.Map; @@ -75,5 +75,5 @@ public interface RepositoryGroupAdmin Map> getRepositoryToGroupMap() throws RepositoryAdminException; - Path getMergedIndexDirectory(String repositoryGroupId ); + StorageAsset getMergedIndexDirectory(String repositoryGroupId ); } diff --git a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/main/java/org/apache/archiva/admin/repository/group/DefaultRepositoryGroupAdmin.java b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/main/java/org/apache/archiva/admin/repository/group/DefaultRepositoryGroupAdmin.java index 757aa12a8..39919a88e 100644 --- a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/main/java/org/apache/archiva/admin/repository/group/DefaultRepositoryGroupAdmin.java +++ b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/main/java/org/apache/archiva/admin/repository/group/DefaultRepositoryGroupAdmin.java @@ -29,11 +29,11 @@ import org.apache.archiva.configuration.Configuration; import org.apache.archiva.configuration.RepositoryGroupConfiguration; import org.apache.archiva.metadata.model.facets.AuditEvent; import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler; -import org.apache.archiva.repository.EditableRepository; import org.apache.archiva.repository.EditableRepositoryGroup; import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryRegistry; import org.apache.archiva.repository.features.IndexCreationFeature; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,9 +47,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -112,9 +109,14 @@ public class DefaultRepositoryGroupAdmin @Override - public Path getMergedIndexDirectory( String repositoryGroupId ) + public StorageAsset getMergedIndexDirectory(String repositoryGroupId ) { - return groupsDirectory.resolve( repositoryGroupId ); + org.apache.archiva.repository.RepositoryGroup group = repositoryRegistry.getRepositoryGroup(repositoryGroupId); + if (group!=null) { + return group.getFeature(IndexCreationFeature.class).get().getLocalIndexPath(); + } else { + return null; + } } @Override diff --git a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/ArchivaIndexManagerMock.java b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/ArchivaIndexManagerMock.java index 064045de9..1bd7e4c10 100644 --- a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/ArchivaIndexManagerMock.java +++ b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/ArchivaIndexManagerMock.java @@ -19,7 +19,7 @@ package org.apache.archiva.admin.mock; * under the License. */ -import org.apache.archiva.admin.model.RepositoryAdminException; +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.configuration.ArchivaConfiguration; @@ -40,10 +40,12 @@ import org.apache.archiva.repository.RemoteRepository; import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.UnsupportedRepositoryTypeException; -import org.apache.archiva.repository.content.FilesystemAsset; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.RemoteIndexFeature; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.commons.lang.StringUtils; import org.apache.maven.index.ArtifactContext; import org.apache.maven.index.ArtifactContextProducer; @@ -143,7 +145,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { private Path getIndexPath( ArchivaIndexingContext ctx ) { - return PathUtil.getPathFromUri( ctx.getPath( ) ); + return ctx.getPath( ).getFilePath(); } @FunctionalInterface @@ -378,9 +380,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { @Override public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection artifactReference ) throws IndexUpdateFailedException { - final URI ctxUri = context.getPath(); + final StorageAsset ctxUri = context.getPath(); executeUpdateFunction(context, indexingContext -> { - Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList()); + Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList()); try { indexer.addArtifactsToIndex(artifacts, indexingContext); } catch (IOException e) { @@ -394,9 +396,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { @Override public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection artifactReference ) throws IndexUpdateFailedException { - final URI ctxUri = context.getPath(); + final StorageAsset ctxUri = context.getPath(); executeUpdateFunction(context, indexingContext -> { - Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList()); + Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList()); try { indexer.deleteArtifactsFromIndex(artifacts, indexingContext); } catch (IOException e) { @@ -455,7 +457,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { log.warn("Index close failed"); } try { - FileUtils.deleteDirectory(Paths.get(context.getPath())); + StorageUtil.deleteRecursively(context.getPath()); } catch (IOException e) { throw new IndexUpdateFailedException("Could not delete index files"); } @@ -530,6 +532,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { URI indexDir = icf.getIndexPath(); String indexPath = indexDir.getPath(); Path indexDirectory = null; + FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getAsset("").getStorage(); if ( ! StringUtils.isEmpty(indexDir.toString( ) ) ) { @@ -538,6 +541,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { if ( indexDirectory.isAbsolute( ) ) { indexPath = indexDirectory.getFileName().toString(); + filesystemStorage = new FilesystemStorage(indexDirectory.getParent(), new DefaultFileLockManager()); } else { @@ -554,7 +558,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { { Files.createDirectories( indexDirectory ); } - return new FilesystemAsset( indexPath, indexDirectory); + return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory); } private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException diff --git a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MavenIndexContextMock.java b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MavenIndexContextMock.java index 3bed60293..4b4528bc8 100644 --- a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MavenIndexContextMock.java +++ b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MavenIndexContextMock.java @@ -19,8 +19,12 @@ package org.apache.archiva.admin.mock; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.repository.Repository; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.maven.index.context.IndexingContext; import java.io.IOException; @@ -38,10 +42,16 @@ public class MavenIndexContextMock implements ArchivaIndexingContext { private IndexingContext delegate; private Repository repository; + private FilesystemStorage filesystemStorage; MavenIndexContextMock(Repository repository, IndexingContext delegate) { this.delegate = delegate; this.repository = repository; + try { + this.filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath(), new DefaultFileLockManager()); + } catch (IOException e) { + e.printStackTrace(); + } } @@ -56,8 +66,9 @@ public class MavenIndexContextMock implements ArchivaIndexingContext { } @Override - public URI getPath() { - return delegate.getIndexDirectoryFile().toURI(); + public StorageAsset getPath() { + return + new FilesystemAsset(filesystemStorage, "", delegate.getIndexDirectoryFile().toPath()); } @Override diff --git a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MockMergedRemoteIndexesScheduler.java b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MockMergedRemoteIndexesScheduler.java index 4cb8ca78d..6222e7c51 100644 --- a/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MockMergedRemoteIndexesScheduler.java +++ b/archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MockMergedRemoteIndexesScheduler.java @@ -21,10 +21,9 @@ package org.apache.archiva.admin.mock; import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler; import org.apache.archiva.repository.RepositoryGroup; +import org.apache.archiva.repository.storage.StorageAsset; import org.springframework.stereotype.Service; -import java.nio.file.Path; - /** * @author Olivier Lamy */ @@ -34,7 +33,7 @@ public class MockMergedRemoteIndexesScheduler { @Override - public void schedule( RepositoryGroup repositoryGroup, Path directory ) + public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory ) { // no op } diff --git a/archiva-modules/archiva-base/archiva-repository-api/pom.xml b/archiva-modules/archiva-base/archiva-repository-api/pom.xml index b5f10a7db..7251ab2a6 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/pom.xml +++ b/archiva-modules/archiva-base/archiva-repository-api/pom.xml @@ -38,6 +38,10 @@ org.apache.archiva archiva-common + + org.apache.archiva + archiva-storage-api + commons-lang diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexManager.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexManager.java index 5eb57705f..8e38b829a 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexManager.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexManager.java @@ -21,10 +21,8 @@ package org.apache.archiva.indexer; import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.RepositoryType; -import org.apache.archiva.repository.content.StorageAsset; import java.net.URI; -import java.nio.file.Path; import java.util.Collection; import java.util.List; diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java index 795354598..2dee4412d 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java @@ -20,6 +20,7 @@ package org.apache.archiva.indexer; */ import org.apache.archiva.repository.Repository; +import org.apache.archiva.repository.storage.StorageAsset; import java.io.IOException; import java.net.URI; @@ -48,7 +49,7 @@ public interface ArchivaIndexingContext { * The path where the index is stored. * @return */ - URI getPath(); + StorageAsset getPath(); /** * Returns true, if the index has no entries or is not initialized. diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/IndexMergerRequest.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/IndexMergerRequest.java index 787e02d5b..9a8bae159 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/IndexMergerRequest.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/IndexMergerRequest.java @@ -18,6 +18,8 @@ package org.apache.archiva.indexer.merger; * under the License. */ +import org.apache.archiva.repository.storage.StorageAsset; + import java.nio.file.Path; import java.util.Collection; @@ -47,7 +49,7 @@ public class IndexMergerRequest private int mergedIndexTtl; - private Path mergedIndexDirectory; + private StorageAsset mergedIndexDirectory; private boolean temporary; @@ -121,17 +123,17 @@ public class IndexMergerRequest this.mergedIndexTtl = mergedIndexTtl; } - public Path getMergedIndexDirectory() + public StorageAsset getMergedIndexDirectory() { return mergedIndexDirectory; } - public void setMergedIndexDirectory( Path mergedIndexDirectory ) + public void setMergedIndexDirectory( StorageAsset mergedIndexDirectory ) { this.mergedIndexDirectory = mergedIndexDirectory; } - public IndexMergerRequest mergedIndexDirectory( Path mergedIndexDirectory ) + public IndexMergerRequest mergedIndexDirectory( StorageAsset mergedIndexDirectory ) { this.mergedIndexDirectory = mergedIndexDirectory; return this; diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/MergedRemoteIndexesScheduler.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/MergedRemoteIndexesScheduler.java index 8125610b6..79eaaac3a 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/MergedRemoteIndexesScheduler.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/MergedRemoteIndexesScheduler.java @@ -21,8 +21,7 @@ package org.apache.archiva.indexer.merger; import org.apache.archiva.repository.RepositoryGroup; - -import java.nio.file.Path; +import org.apache.archiva.repository.storage.StorageAsset; /** * @author Olivier Lamy @@ -35,8 +34,9 @@ public interface MergedRemoteIndexesScheduler * will check if this repository group need to a schedule a cron to download/merge * remote indexes * @param repositoryGroup + * @param directory */ - void schedule(RepositoryGroup repositoryGroup, Path directory ); + void schedule(RepositoryGroup repositoryGroup, StorageAsset directory ); void unschedule( RepositoryGroup repositoryGroup ); diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/TemporaryGroupIndex.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/TemporaryGroupIndex.java index d0b576b74..8f5588893 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/TemporaryGroupIndex.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/TemporaryGroupIndex.java @@ -18,6 +18,8 @@ package org.apache.archiva.indexer.merger; * under the License. */ +import org.apache.archiva.repository.storage.StorageAsset; + import java.io.Serializable; import java.nio.file.Path; import java.util.Date; @@ -30,7 +32,7 @@ public class TemporaryGroupIndex { private long creationTime = new Date().getTime(); - private Path directory; + private StorageAsset directory; private String indexId; @@ -38,7 +40,7 @@ public class TemporaryGroupIndex private int mergedIndexTtl; - public TemporaryGroupIndex(Path directory, String indexId, String groupId, int mergedIndexTtl) + public TemporaryGroupIndex(StorageAsset directory, String indexId, String groupId, int mergedIndexTtl) { this.directory = directory; this.indexId = indexId; @@ -57,12 +59,12 @@ public class TemporaryGroupIndex return this; } - public Path getDirectory() + public StorageAsset getDirectory() { return directory; } - public TemporaryGroupIndex setDirectory( Path directory ) + public TemporaryGroupIndex setDirectory( StorageAsset directory ) { this.directory = directory; return this; diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepository.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepository.java index 2c77cadc5..3acd16198 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepository.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepository.java @@ -20,8 +20,6 @@ package org.apache.archiva.repository; */ -import org.apache.archiva.repository.content.RepositoryStorage; - import java.util.Set; /** diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java index c50a89cf2..5cabc3f6a 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java @@ -23,7 +23,7 @@ import org.apache.archiva.model.ArchivaArtifact; import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.model.ProjectReference; import org.apache.archiva.model.VersionedReference; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import java.util.Set; diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/Repository.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/Repository.java index 453badc2d..d6c38a04d 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/Repository.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/Repository.java @@ -20,7 +20,7 @@ package org.apache.archiva.repository; */ import org.apache.archiva.indexer.ArchivaIndexingContext; -import org.apache.archiva.repository.content.RepositoryStorage; +import org.apache.archiva.repository.storage.RepositoryStorage; import org.apache.archiva.repository.features.RepositoryFeature; import java.net.URI; diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RepositoryGroup.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RepositoryGroup.java index 8386c5838..5b50dc0a7 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RepositoryGroup.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RepositoryGroup.java @@ -19,8 +19,7 @@ package org.apache.archiva.repository; * under the License. */ -import org.apache.archiva.repository.content.RepositoryStorage; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.RepositoryStorage; import java.util.List; diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/RepositoryStorage.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/RepositoryStorage.java deleted file mode 100644 index 3dea0b294..000000000 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/RepositoryStorage.java +++ /dev/null @@ -1,148 +0,0 @@ -package org.apache.archiva.repository.content; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.channels.ReadableByteChannel; -import java.nio.channels.WritableByteChannel; -import java.nio.file.CopyOption; -import java.util.function.Consumer; - -/** - * Repository storage gives access to the files and directories on the storage. - * The storage may be on a filesystem but can be any other storage system. - * - * This API is low level repository access. If you use this API you must - * either have knowledge about the specific repository layout or use the structure - * as it is, e.g. for browsing. - * - * It is the decision of the implementation, if this API provides access to all elements, or - * just a selected view. - * - * Checking access is not part of this API. - */ -public interface RepositoryStorage { - /** - * Returns information about a specific storage asset. - * @param path - * @return - */ - StorageAsset getAsset(String path); - - /** - * Consumes the data and sets a lock for the file during the operation. - * - * @param asset The asset from which the data is consumed. - * @param consumerFunction The consumer that reads the data - * @param readLock If true, a read lock is acquired on the asset. - * @throws IOException - */ - void consumeData(StorageAsset asset, Consumer consumerFunction, boolean readLock) throws IOException; - - /** - * Consumes the data and sets a lock for the file during the operation. - * - * @param asset The asset from which the data is consumed. - * @param consumerFunction The consumer that reads the data - * @param readLock If true, a read lock is acquired on the asset. - * @throws IOException - */ - void consumeDataFromChannel( StorageAsset asset, Consumer consumerFunction, boolean readLock) throws IOException; - - /** - * Writes data to the asset using a write lock. - * - * @param asset The asset to which the data is written. - * @param consumerFunction The function that provides the data. - * @param writeLock If true, a write lock is acquired on the destination. - */ - void writeData( StorageAsset asset, Consumer consumerFunction, boolean writeLock) throws IOException;; - - /** - * Writes data and sets a lock during the operation. - * - * @param asset The asset to which the data is written. - * @param consumerFunction The function that provides the data. - * @param writeLock If true, a write lock is acquired on the destination. - * @throws IOException - */ - void writeDataToChannel( StorageAsset asset, Consumer consumerFunction, boolean writeLock) throws IOException; - - /** - * Adds a new asset to the underlying storage. - * @param path The path to the asset. - * @param container True, if the asset should be a container, false, if it is a file. - * @return - */ - StorageAsset addAsset(String path, boolean container); - - /** - * Removes the given asset from the storage. - * - * @param asset - * @throws IOException - */ - void removeAsset(StorageAsset asset) throws IOException; - - /** - * Moves the asset to the given location and returns the asset object for the destination. - * - * @param origin The original asset - * @param destination The destination path pointing to the new asset. - * @param copyOptions The copy options. - * @return The asset representation of the moved object. - */ - StorageAsset moveAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException; - - /** - * Moves the asset to the new path. - * - * @param origin The original asset - * @param destination The destination asset. - * @param copyOptions The copy options (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING} - * @throws IOException If it was not possible to copy the asset. - */ - void moveAsset(StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException; - - /** - * Copies the given asset to the new destination. - * - * @param origin The original asset - * @param destination The path to the new asset - * @param copyOptions The copy options, e.g. (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING} - * @return The asset representation of the copied object - * @throws IOException If it was not possible to copy the asset - */ - StorageAsset copyAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException; - - /** - * Copies the given asset to the new destination. - * - * @param origin The original asset - * @param destination The path to the new asset - * @param copyOptions The copy options, e.g. (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING} - * @throws IOException If it was not possible to copy the asset - */ - void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException; - - -} diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java deleted file mode 100644 index 38ef2a895..000000000 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java +++ /dev/null @@ -1,180 +0,0 @@ -package org.apache.archiva.repository.content; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.channels.ReadableByteChannel; -import java.nio.channels.WritableByteChannel; -import java.nio.file.Path; -import java.time.Instant; -import java.util.List; -import java.util.function.Consumer; - -/** - * A instance of this interface represents information about an specific asset in a repository. - * The asset may be an real artifact, a directory, or a virtual asset. - * - * Each asset has a unique path relative to the repository. - * - * The implementation may read the data directly from the filesystem or underlying storage implementation. - * - * @author Martin Stockhammer - */ -public interface StorageAsset -{ - - /** - * Returns the storage this asset belongs to. - * @return - */ - RepositoryStorage getStorage(); - - /** - * Returns the complete path relative to the repository to the given asset. - * - * @return A path starting with '/' that uniquely identifies the asset in the repository. - */ - String getPath(); - - /** - * Returns the name of the asset. It may be just the filename. - * @return - */ - String getName(); - - /** - * Returns the time of the last modification. - * - * @return - */ - Instant getModificationTime(); - - /** - * Returns true, if this asset is a container type and contains further child assets. - * @return - */ - boolean isContainer(); - - /** - * List the child assets. - * - * @return The list of children. If there are no children and if the asset is not a container, a empty list will be returned. - */ - List list(); - - /** - * The size in bytes of the asset. If the asset does not have a size, -1 should be returned. - * - * @return The size if the asset has a size, otherwise -1 - */ - long getSize(); - - /** - * Returns the input stream of the artifact content. - * It will throw a IOException, if the stream could not be created. - * Implementations should create a new stream instance for each invocation and make sure that the - * stream is proper closed after usage. - * - * @return The InputStream representing the content of the artifact. - * @throws IOException - */ - InputStream getReadStream() throws IOException; - - /** - * Returns a NIO representation of the data. - * - * @return A channel to the asset data. - * @throws IOException - */ - ReadableByteChannel getReadChannel() throws IOException; - - /** - * - * Returns an output stream where you can write data to the asset. The operation is not locked or synchronized. - * User of this method have to make sure, that the stream is proper closed after usage. - * - * @param replace If true, the original data will be replaced, otherwise the data will be appended. - * @return The OutputStream where the data can be written. - * @throws IOException - */ - OutputStream getWriteStream( boolean replace) throws IOException; - - /** - * Returns a NIO representation of the asset where you can write the data. - * - * @param replace True, if the content should be replaced by the data written to the stream. - * @return The Channel for writing the data. - * @throws IOException - */ - WritableByteChannel getWriteChannel( boolean replace) throws IOException; - - /** - * Replaces the content. The implementation may do an atomic move operation, or keep a backup. If - * the operation fails, the implementation should try to restore the old data, if possible. - * - * The original file may be deleted, if the storage was successful. - * - * @param newData Replaces the data by the content of the given file. - */ - boolean replaceDataFromFile( Path newData) throws IOException; - - /** - * Returns true, if the asset exists. - * - * @return True, if the asset exists, otherwise false. - */ - boolean exists(); - - /** - * Creates the asset in the underlying storage, if it does not exist. - */ - void create() throws IOException; - - /** - * Returns the real path to the asset, if it exist. Not all implementations may implement this method. - * The method throws {@link UnsupportedOperationException}, if and only if {@link #isFileBased()} returns false. - * - * @return The filesystem path to the asset. - * @throws UnsupportedOperationException If the underlying storage is not file based. - */ - Path getFilePath() throws UnsupportedOperationException; - - /** - * Returns true, if the asset can return a file path for the given asset. If this is true, the {@link #getFilePath()} - * will not throw a {@link UnsupportedOperationException} - * - * @return - */ - boolean isFileBased(); - - /** - * Returns true, if there is a parent to this asset. - * @return - */ - boolean hasParent(); - - /** - * Returns the parent of this asset. - * @return The asset, or null, if it does not exist. - */ - StorageAsset getParent(); -} diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/features/IndexCreationFeature.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/features/IndexCreationFeature.java index 636e2cb40..ff10b2d39 100644 --- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/features/IndexCreationFeature.java +++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/features/IndexCreationFeature.java @@ -22,12 +22,11 @@ package org.apache.archiva.repository.features; import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.RepositoryEventListener; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import java.net.URI; import java.net.URISyntaxException; -import java.nio.file.Path; import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH; import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_PACKED_INDEX_PATH; diff --git a/archiva-modules/archiva-base/archiva-repository-layer/pom.xml b/archiva-modules/archiva-base/archiva-repository-layer/pom.xml index 7d2c1a667..3e9335650 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/pom.xml +++ b/archiva-modules/archiva-base/archiva-repository-layer/pom.xml @@ -41,6 +41,10 @@ org.apache.archiva archiva-model + + org.apache.archiva + archiva-storage-fs + org.apache.archiva archiva-checksum diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultIndexMerger.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultIndexMerger.java index 293b73b71..b04005fa6 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultIndexMerger.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultIndexMerger.java @@ -28,6 +28,8 @@ import org.apache.archiva.indexer.merger.IndexMergerRequest; import org.apache.archiva.indexer.merger.TemporaryGroupIndex; import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.RepositoryRegistry; +import org.apache.archiva.repository.storage.StorageAsset; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.commons.lang.time.StopWatch; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -87,7 +89,7 @@ public class DefaultIndexMerger stopWatch.reset(); stopWatch.start(); - Path mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory(); + StorageAsset mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory(); Repository destinationRepository = repositoryRegistry.getRepository(indexMergerRequest.getGroupId()); ArchivaIndexManager idxManager = repositoryRegistry.getIndexManager(destinationRepository.getType()); @@ -131,10 +133,10 @@ public class DefaultIndexMerger ctx.close(true); temporaryGroupIndexes.remove( temporaryGroupIndex ); temporaryContextes.remove( ctx ); - Path directory = temporaryGroupIndex.getDirectory(); - if ( directory != null && Files.exists(directory) ) + StorageAsset directory = temporaryGroupIndex.getDirectory(); + if ( directory != null && directory.exists() ) { - FileUtils.deleteDirectory( directory ); + StorageUtil.deleteRecursively( directory ); } } } diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultMergedRemoteIndexesScheduler.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultMergedRemoteIndexesScheduler.java index b2be61184..93dbaeaad 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultMergedRemoteIndexesScheduler.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultMergedRemoteIndexesScheduler.java @@ -22,6 +22,7 @@ package org.apache.archiva.indexer.merger; import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.RepositoryGroup; import org.apache.archiva.repository.features.IndexCreationFeature; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,7 +60,7 @@ public class DefaultMergedRemoteIndexesScheduler private Map scheduledFutureMap = new ConcurrentHashMap<>(); @Override - public void schedule(RepositoryGroup repositoryGroup, Path directory ) + public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory ) { if ( StringUtils.isEmpty( repositoryGroup.getSchedulingDefinition() ) ) { diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractManagedRepository.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractManagedRepository.java index 41d703ab2..45d95ade3 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractManagedRepository.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractManagedRepository.java @@ -20,17 +20,12 @@ package org.apache.archiva.repository; */ -import org.apache.archiva.repository.content.RepositoryStorage; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.RepositoryStorage; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Path; import java.util.Collections; import java.util.HashSet; import java.util.Locale; import java.util.Set; -import java.util.function.Consumer; /** * Simple implementation of a managed repository. diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRemoteRepository.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRemoteRepository.java index ff2ac062a..048ad227a 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRemoteRepository.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRemoteRepository.java @@ -20,7 +20,7 @@ package org.apache.archiva.repository; */ -import org.apache.archiva.repository.content.RepositoryStorage; +import org.apache.archiva.repository.storage.RepositoryStorage; import java.nio.file.Path; import java.time.Duration; diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepository.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepository.java index 87b5420b1..c9fc92660 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepository.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepository.java @@ -23,10 +23,9 @@ import com.cronutils.model.CronType; import com.cronutils.model.definition.CronDefinition; import com.cronutils.model.definition.CronDefinitionBuilder; import com.cronutils.parser.CronParser; -import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.indexer.ArchivaIndexingContext; -import org.apache.archiva.repository.content.RepositoryStorage; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.RepositoryStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.repository.features.RepositoryFeature; import org.apache.archiva.repository.features.StagingRepositoryFeature; import org.apache.commons.lang.StringUtils; diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepositoryGroup.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepositoryGroup.java index 2011d4a5c..2f78b7bf5 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepositoryGroup.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepositoryGroup.java @@ -19,18 +19,13 @@ package org.apache.archiva.repository; * under the License. */ -import org.apache.archiva.repository.content.RepositoryStorage; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.RepositoryStorage; import org.apache.commons.collections4.map.ListOrderedMap; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Path; import java.util.List; import java.util.Locale; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.function.Consumer; /** * Abstract repository group implementation. diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicManagedRepository.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicManagedRepository.java index a16630b4a..8f65cb4d3 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicManagedRepository.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicManagedRepository.java @@ -21,9 +21,8 @@ package org.apache.archiva.repository; import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.FileLockManager; -import org.apache.archiva.repository.content.FilesystemStorage; -import org.apache.archiva.repository.content.RepositoryStorage; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.RepositoryStorage; import org.apache.archiva.repository.features.ArtifactCleanupFeature; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.StagingRepositoryFeature; @@ -31,10 +30,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; -import java.io.InputStream; import java.nio.file.Path; import java.util.Locale; -import java.util.function.Consumer; /** * diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicRemoteRepository.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicRemoteRepository.java index acca83a76..0675402a0 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicRemoteRepository.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicRemoteRepository.java @@ -21,8 +21,8 @@ package org.apache.archiva.repository; import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.FileLockManager; -import org.apache.archiva.repository.content.FilesystemStorage; -import org.apache.archiva.repository.content.RepositoryStorage; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.RepositoryStorage; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.RemoteIndexFeature; import org.slf4j.Logger; diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/RepositoryRegistry.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/RepositoryRegistry.java index e78130e33..e0e61c54a 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/RepositoryRegistry.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/RepositoryRegistry.java @@ -26,8 +26,6 @@ import org.apache.archiva.indexer.IndexCreationFailedException; import org.apache.archiva.indexer.IndexManagerFactory; import org.apache.archiva.indexer.IndexUpdateFailedException; import org.apache.archiva.redback.components.registry.RegistryException; -import org.apache.archiva.repository.content.RepositoryStorage; -import org.apache.archiva.repository.content.StorageAsset; import org.apache.archiva.repository.features.IndexCreationEvent; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.StagingRepositoryFeature; @@ -44,11 +42,9 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; import java.util.stream.Stream; diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/ArtifactUtil.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/ArtifactUtil.java index 7755b53d7..8bcfa97d6 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/ArtifactUtil.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/ArtifactUtil.java @@ -23,6 +23,7 @@ import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.RepositoryContentFactory; import org.apache.archiva.repository.RepositoryException; +import org.apache.archiva.repository.storage.StorageAsset; import org.springframework.stereotype.Service; import javax.inject.Inject; @@ -53,4 +54,19 @@ public class ArtifactUtil { return Paths.get(repository.getLocation()).resolve(artifactPath); } + /** + * Returns the physical location of a given artifact in the repository. There is no check for the + * existence of the returned file. + * + * @param repository The repository, where the artifact is stored. + * @param artifactReference The artifact reference. + * @return The asset representation of the artifact. + * @throws RepositoryException + */ + public StorageAsset getArtifactAsset(ManagedRepository repository, ArtifactReference artifactReference) throws RepositoryException { + final ManagedRepositoryContent content = repositoryContentFactory.getManagedRepositoryContent(repository); + final String artifactPath = content.toPath( artifactReference ); + return repository.getAsset(artifactPath); + } + } diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java deleted file mode 100644 index e0d10f863..000000000 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java +++ /dev/null @@ -1,481 +0,0 @@ -package org.apache.archiva.repository.content; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.channels.FileChannel; -import java.nio.channels.ReadableByteChannel; -import java.nio.channels.WritableByteChannel; -import java.nio.file.*; -import java.nio.file.attribute.*; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -/** - * Implementation of an asset that is stored on the filesystem. - *

- * The implementation does not check the given paths. Caller should normalize the asset path - * and check, if the base path is a parent of the resulting path. - *

- * The file must not exist for all operations. - * - * @author Martin Stockhammer - */ -public class FilesystemAsset implements StorageAsset { - - private final static Logger log = LoggerFactory.getLogger(FilesystemAsset.class); - - private final Path basePath; - private final Path assetPath; - private final String relativePath; - - public static final String DEFAULT_POSIX_FILE_PERMS = "rw-rw----"; - public static final String DEFAULT_POSIX_DIR_PERMS = "rwxrwx---"; - - public static final Set DEFAULT_POSIX_FILE_PERMISSIONS; - public static final Set DEFAULT_POSIX_DIR_PERMISSIONS; - - public static final AclEntryPermission[] DEFAULT_ACL_FILE_PERMISSIONS = new AclEntryPermission[]{ - AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL, - AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA - }; - - public static final AclEntryPermission[] DEFAULT_ACL_DIR_PERMISSIONS = new AclEntryPermission[]{ - AclEntryPermission.ADD_FILE, AclEntryPermission.ADD_SUBDIRECTORY, AclEntryPermission.DELETE_CHILD, - AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL, - AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA - }; - - static { - - DEFAULT_POSIX_FILE_PERMISSIONS = PosixFilePermissions.fromString(DEFAULT_POSIX_FILE_PERMS); - DEFAULT_POSIX_DIR_PERMISSIONS = PosixFilePermissions.fromString(DEFAULT_POSIX_DIR_PERMS); - } - - Set defaultPosixFilePermissions = DEFAULT_POSIX_FILE_PERMISSIONS; - Set defaultPosixDirectoryPermissions = DEFAULT_POSIX_DIR_PERMISSIONS; - - List defaultFileAcls; - List defaultDirectoryAcls; - - boolean supportsAcl = false; - boolean supportsPosix = false; - final boolean setPermissionsForNew; - final RepositoryStorage storage; - - boolean directoryHint = false; - - private static final OpenOption[] REPLACE_OPTIONS = new OpenOption[]{StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE}; - private static final OpenOption[] APPEND_OPTIONS = new OpenOption[]{StandardOpenOption.APPEND}; - - - FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath) { - this.assetPath = assetPath; - this.relativePath = path; - this.setPermissionsForNew=false; - this.basePath = basePath; - this.storage = storage; - init(); - } - - /** - * Creates an asset for the given path. The given paths are not checked. - * The base path should be an absolute path. - * - * @param path The logical path for the asset relative to the repository. - * @param assetPath The asset path. - */ - public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath) { - this.assetPath = assetPath; - this.relativePath = path; - this.setPermissionsForNew = false; - this.basePath = null; - this.storage = storage; - init(); - } - - /** - * Creates an asset for the given path. The given paths are not checked. - * The base path should be an absolute path. - * - * @param path The logical path for the asset relative to the repository - * @param assetPath The asset path. - * @param directory This is only relevant, if the represented file or directory does not exist yet and - * is a hint. - */ - public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory) { - this.assetPath = assetPath; - this.relativePath = path; - this.directoryHint = directory; - this.setPermissionsForNew = false; - this.basePath = basePath; - this.storage = storage; - init(); - } - - /** - * Creates an asset for the given path. The given paths are not checked. - * The base path should be an absolute path. - * - * @param path The logical path for the asset relative to the repository - * @param assetPath The asset path. - * @param directory This is only relevant, if the represented file or directory does not exist yet and - * is a hint. - */ - public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) { - this.assetPath = assetPath; - this.relativePath = path; - this.directoryHint = directory; - this.setPermissionsForNew = setPermissionsForNew; - this.basePath = basePath; - this.storage = storage; - init(); - } - - private void init() { - - if (setPermissionsForNew) { - try { - supportsAcl = Files.getFileStore(assetPath.getRoot()).supportsFileAttributeView(AclFileAttributeView.class); - } catch (IOException e) { - log.error("Could not check filesystem capabilities {}", e.getMessage()); - } - try { - supportsPosix = Files.getFileStore(assetPath.getRoot()).supportsFileAttributeView(PosixFileAttributeView.class); - } catch (IOException e) { - log.error("Could not check filesystem capabilities {}", e.getMessage()); - } - - if (supportsAcl) { - AclFileAttributeView aclView = Files.getFileAttributeView(assetPath.getParent(), AclFileAttributeView.class); - UserPrincipal owner = null; - try { - owner = aclView.getOwner(); - setDefaultFileAcls(processPermissions(owner, DEFAULT_ACL_FILE_PERMISSIONS)); - setDefaultDirectoryAcls(processPermissions(owner, DEFAULT_ACL_DIR_PERMISSIONS)); - - } catch (IOException e) { - supportsAcl = false; - } - - - } - } - } - - private List processPermissions(UserPrincipal owner, AclEntryPermission[] defaultAclFilePermissions) { - AclEntry.Builder aclBuilder = AclEntry.newBuilder(); - aclBuilder.setPermissions(defaultAclFilePermissions); - aclBuilder.setType(AclEntryType.ALLOW); - aclBuilder.setPrincipal(owner); - ArrayList aclList = new ArrayList<>(); - aclList.add(aclBuilder.build()); - return aclList; - } - - - @Override - public RepositoryStorage getStorage( ) - { - return storage; - } - - @Override - public String getPath() { - return relativePath; - } - - @Override - public String getName() { - return assetPath.getFileName().toString(); - } - - @Override - public Instant getModificationTime() { - try { - return Files.getLastModifiedTime(assetPath).toInstant(); - } catch (IOException e) { - log.error("Could not read modification time of {}", assetPath); - return Instant.now(); - } - } - - /** - * Returns true, if the path of this asset points to a directory - * - * @return - */ - @Override - public boolean isContainer() { - if (Files.exists(assetPath)) { - return Files.isDirectory(assetPath); - } else { - return directoryHint; - } - } - - /** - * Returns the list of directory entries, if this asset represents a directory. - * Otherwise a empty list will be returned. - * - * @return The list of entries in the directory, if it exists. - */ - @Override - public List list() { - try { - return Files.list(assetPath).map(p -> new FilesystemAsset(storage, relativePath + "/" + p.getFileName().toString(), assetPath.resolve(p))) - .collect(Collectors.toList()); - } catch (IOException e) { - return Collections.EMPTY_LIST; - } - } - - /** - * Returns the size of the represented file. If it cannot be determined, -1 is returned. - * - * @return - */ - @Override - public long getSize() { - try { - return Files.size(assetPath); - } catch (IOException e) { - return -1; - } - } - - /** - * Returns a input stream to the underlying file, if it exists. The caller has to make sure, that - * the stream is closed after it was used. - * - * @return - * @throws IOException - */ - @Override - public InputStream getReadStream() throws IOException { - if (isContainer()) { - throw new IOException("Can not create input stream for container"); - } - return Files.newInputStream(assetPath); - } - - @Override - public ReadableByteChannel getReadChannel( ) throws IOException - { - return FileChannel.open( assetPath, StandardOpenOption.READ ); - } - - private OpenOption[] getOpenOptions(boolean replace) { - return replace ? REPLACE_OPTIONS : APPEND_OPTIONS; - } - - @Override - public OutputStream getWriteStream( boolean replace) throws IOException { - OpenOption[] options = getOpenOptions( replace ); - if (!Files.exists( assetPath )) { - create(); - } - return Files.newOutputStream(assetPath, options); - } - - @Override - public WritableByteChannel getWriteChannel( boolean replace ) throws IOException - { - OpenOption[] options = getOpenOptions( replace ); - return FileChannel.open( assetPath, options ); - } - - @Override - public boolean replaceDataFromFile( Path newData) throws IOException { - final boolean createNew = !Files.exists(assetPath); - Path backup = null; - if (!createNew) { - backup = findBackupFile(assetPath); - } - try { - if (!createNew) { - Files.move(assetPath, backup); - } - Files.move(newData, assetPath, StandardCopyOption.REPLACE_EXISTING); - applyDefaultPermissions(assetPath); - return true; - } catch (IOException e) { - log.error("Could not overwrite file {}", assetPath); - // Revert if possible - if (backup != null && Files.exists(backup)) { - Files.move(backup, assetPath, StandardCopyOption.REPLACE_EXISTING); - } - throw e; - } finally { - if (backup != null) { - try { - Files.deleteIfExists(backup); - } catch (IOException e) { - log.error("Could not delete backup file {}", backup); - } - } - } - - } - - private void applyDefaultPermissions(Path filePath) { - try { - if (supportsPosix) { - Set perms; - if (Files.isDirectory(filePath)) { - perms = defaultPosixFilePermissions; - } else { - perms = defaultPosixDirectoryPermissions; - } - Files.setPosixFilePermissions(filePath, perms); - } else if (supportsAcl) { - List perms; - if (Files.isDirectory(filePath)) { - perms = getDefaultDirectoryAcls(); - } else { - perms = getDefaultFileAcls(); - } - AclFileAttributeView aclAttr = Files.getFileAttributeView(filePath, AclFileAttributeView.class); - aclAttr.setAcl(perms); - } - } catch (IOException e) { - log.error("Could not set permissions for {}: {}", filePath, e.getMessage()); - } - } - - private Path findBackupFile(Path file) { - String ext = ".bak"; - Path backupPath = file.getParent().resolve(file.getFileName().toString() + ext); - int idx = 0; - while (Files.exists(backupPath)) { - backupPath = file.getParent().resolve(file.getFileName().toString() + ext + idx++); - } - return backupPath; - } - - @Override - public boolean exists() { - return Files.exists(assetPath); - } - - @Override - public Path getFilePath() throws UnsupportedOperationException { - return assetPath; - } - - @Override - public boolean isFileBased( ) - { - return true; - } - - @Override - public boolean hasParent( ) - { - if (basePath!=null && assetPath.equals(basePath)) { - return false; - } - return assetPath.getParent()!=null; - } - - @Override - public StorageAsset getParent( ) - { - Path parentPath; - if (basePath!=null && assetPath.equals( basePath )) { - parentPath=null; - } else - { - parentPath = assetPath.getParent( ); - } - String relativeParent = StringUtils.substringBeforeLast( relativePath,"/"); - if (parentPath!=null) { - return new FilesystemAsset(storage, relativeParent, parentPath, basePath, true, setPermissionsForNew ); - } else { - return null; - } - } - - - public void setDefaultFileAcls(List acl) { - defaultFileAcls = acl; - } - - public List getDefaultFileAcls() { - return defaultFileAcls; - } - - public void setDefaultPosixFilePermissions(Set perms) { - defaultPosixFilePermissions = perms; - } - - public Set getDefaultPosixFilePermissions() { - return defaultPosixFilePermissions; - } - - public void setDefaultDirectoryAcls(List acl) { - defaultDirectoryAcls = acl; - } - - public List getDefaultDirectoryAcls() { - return defaultDirectoryAcls; - } - - public void setDefaultPosixDirectoryPermissions(Set perms) { - defaultPosixDirectoryPermissions = perms; - } - - public Set getDefaultPosixDirectoryPermissions() { - return defaultPosixDirectoryPermissions; - } - - @Override - public void create() throws IOException { - if (!Files.exists(assetPath)) { - if (directoryHint) { - Files.createDirectories(assetPath); - } else { - if (!Files.exists( assetPath.getParent() )) { - Files.createDirectories( assetPath.getParent( ) ); - } - Files.createFile(assetPath); - } - if (setPermissionsForNew) { - applyDefaultPermissions(assetPath); - } - } - } - - @Override - public String toString() { - return relativePath+":"+assetPath; - } - -} diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemStorage.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemStorage.java deleted file mode 100644 index 92044fa86..000000000 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemStorage.java +++ /dev/null @@ -1,376 +0,0 @@ -package org.apache.archiva.repository.content; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.archiva.common.filelock.FileLockException; -import org.apache.archiva.common.filelock.FileLockManager; -import org.apache.archiva.common.filelock.FileLockTimeoutException; -import org.apache.archiva.common.filelock.Lock; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.channels.FileChannel; -import java.nio.channels.ReadableByteChannel; -import java.nio.channels.WritableByteChannel; -import java.nio.file.CopyOption; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.nio.file.StandardOpenOption; -import java.util.function.Consumer; - -/** - * Implementation of {@link RepositoryStorage} where data is stored in the filesystem. - * - * All files are relative to a given base path. Path values are separated by '/', '..' is allowed to navigate - * to a parent directory, but navigation out of the base path will lead to a exception. - */ -public class FilesystemStorage implements RepositoryStorage { - - private static final Logger log = LoggerFactory.getLogger(FilesystemStorage.class); - - private final Path basePath; - private final FileLockManager fileLockManager; - - public FilesystemStorage(Path basePath, FileLockManager fileLockManager) throws IOException { - if (!Files.exists(basePath)) { - Files.createDirectories(basePath); - } - this.basePath = basePath.normalize().toRealPath(); - this.fileLockManager = fileLockManager; - } - - private Path normalize(final String path) { - String nPath = path; - while (nPath.startsWith("/")) { - nPath = nPath.substring(1); - } - return Paths.get(nPath); - } - - private Path getAssetPath(String path) throws IOException { - Path assetPath = basePath.resolve(normalize(path)).normalize(); - if (!assetPath.startsWith(basePath)) - { - throw new IOException("Path navigation out of allowed scope: "+path); - } - return assetPath; - } - - @Override - public void consumeData( StorageAsset asset, Consumer consumerFunction, boolean readLock ) throws IOException - { - final Path path = asset.getFilePath(); - try { - if (readLock) { - consumeDataLocked( path, consumerFunction ); - } else - { - try ( InputStream is = Files.newInputStream( path ) ) - { - consumerFunction.accept( is ); - } - catch ( IOException e ) - { - log.error("Could not read the input stream from file {}", path); - throw e; - } - } - } catch (RuntimeException e) - { - log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() ); - throw new IOException( e ); - } - - } - - @Override - public void consumeDataFromChannel( StorageAsset asset, Consumer consumerFunction, boolean readLock ) throws IOException - { - final Path path = asset.getFilePath(); - try { - if (readLock) { - consumeDataFromChannelLocked( path, consumerFunction ); - } else - { - try ( FileChannel is = FileChannel.open( path, StandardOpenOption.READ ) ) - { - consumerFunction.accept( is ); - } - catch ( IOException e ) - { - log.error("Could not read the input stream from file {}", path); - throw e; - } - } - } catch (RuntimeException e) - { - log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() ); - throw new IOException( e ); - } - } - - @Override - public void writeData( StorageAsset asset, Consumer consumerFunction, boolean writeLock ) throws IOException - { - final Path path = asset.getFilePath(); - try { - if (writeLock) { - writeDataLocked( path, consumerFunction ); - } else - { - try ( OutputStream is = Files.newOutputStream( path ) ) - { - consumerFunction.accept( is ); - } - catch ( IOException e ) - { - log.error("Could not write the output stream to file {}", path); - throw e; - } - } - } catch (RuntimeException e) - { - log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() ); - throw new IOException( e ); - } - - } - - @Override - public void writeDataToChannel( StorageAsset asset, Consumer consumerFunction, boolean writeLock ) throws IOException - { - final Path path = asset.getFilePath(); - try { - if (writeLock) { - writeDataToChannelLocked( path, consumerFunction ); - } else - { - try ( FileChannel os = FileChannel.open( path, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE )) - { - consumerFunction.accept( os ); - } - catch ( IOException e ) - { - log.error("Could not write the data to file {}", path); - throw e; - } - } - } catch (RuntimeException e) - { - log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() ); - throw new IOException( e ); - } - } - - private void consumeDataLocked( Path file, Consumer consumerFunction) throws IOException - { - - final Lock lock; - try - { - lock = fileLockManager.readFileLock( file ); - try ( InputStream is = Files.newInputStream( lock.getFile())) - { - consumerFunction.accept( is ); - } - catch ( IOException e ) - { - log.error("Could not read the input stream from file {}", file); - throw e; - } finally - { - fileLockManager.release( lock ); - } - } - catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e) - { - log.error("Locking error on file {}", file); - throw new IOException(e); - } - } - - private void consumeDataFromChannelLocked( Path file, Consumer consumerFunction) throws IOException - { - - final Lock lock; - try - { - lock = fileLockManager.readFileLock( file ); - try ( FileChannel is = FileChannel.open( lock.getFile( ), StandardOpenOption.READ )) - { - consumerFunction.accept( is ); - } - catch ( IOException e ) - { - log.error("Could not read the input stream from file {}", file); - throw e; - } finally - { - fileLockManager.release( lock ); - } - } - catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e) - { - log.error("Locking error on file {}", file); - throw new IOException(e); - } - } - - - private void writeDataLocked( Path file, Consumer consumerFunction) throws IOException - { - - final Lock lock; - try - { - lock = fileLockManager.writeFileLock( file ); - try ( OutputStream is = Files.newOutputStream( lock.getFile())) - { - consumerFunction.accept( is ); - } - catch ( IOException e ) - { - log.error("Could not write the output stream to file {}", file); - throw e; - } finally - { - fileLockManager.release( lock ); - } - } - catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e) - { - log.error("Locking error on file {}", file); - throw new IOException(e); - } - } - - private void writeDataToChannelLocked( Path file, Consumer consumerFunction) throws IOException - { - - final Lock lock; - try - { - lock = fileLockManager.writeFileLock( file ); - try ( FileChannel is = FileChannel.open( lock.getFile( ), StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE )) - { - consumerFunction.accept( is ); - } - catch ( IOException e ) - { - log.error("Could not write to file {}", file); - throw e; - } finally - { - fileLockManager.release( lock ); - } - } - catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e) - { - log.error("Locking error on file {}", file); - throw new IOException(e); - } - } - - @Override - public StorageAsset getAsset( String path ) - { - try { - return new FilesystemAsset(this, path, getAssetPath(path)); - } catch (IOException e) { - throw new IllegalArgumentException("Path navigates outside of base directory "+path); - } - } - - @Override - public StorageAsset addAsset( String path, boolean container ) - { - try { - return new FilesystemAsset(this, path, getAssetPath(path), basePath, container); - } catch (IOException e) { - throw new IllegalArgumentException("Path navigates outside of base directory "+path); - } - } - - @Override - public void removeAsset( StorageAsset asset ) throws IOException - { - Files.delete(asset.getFilePath()); - } - - @Override - public StorageAsset moveAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException - { - boolean container = origin.isContainer(); - FilesystemAsset newAsset = new FilesystemAsset(this, destination, getAssetPath(destination), basePath, container ); - moveAsset( origin, newAsset, copyOptions ); - return newAsset; - } - - @Override - public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException - { - Files.move(origin.getFilePath(), destination.getFilePath(), copyOptions); - } - - @Override - public StorageAsset copyAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException - { - boolean container = origin.isContainer(); - FilesystemAsset newAsset = new FilesystemAsset(this, destination, getAssetPath(destination), basePath, container ); - copyAsset( origin, newAsset, copyOptions ); - return newAsset; - } - - @Override - public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException - { - Path destinationPath = destination.getFilePath(); - boolean overwrite = false; - for (int i=0; i - */ -public class StorageUtil -{ - private static final int DEFAULT_BUFFER_SIZE = 4096; - - /** - * Copies the source asset to the target. The assets may be from different RepositoryStorage instances. - * - * @param source The source asset - * @param target The target asset - * @param locked If true, a readlock is set on the source and a write lock is set on the target. - * @param copyOptions Copy options - * @throws IOException - */ - public static final void copyAsset( final StorageAsset source, - final StorageAsset target, - boolean locked, - final CopyOption... copyOptions ) throws IOException - { - if (source.isFileBased() && target.isFileBased()) { - // Short cut for FS operations - final Path sourcePath = source.getFilePath(); - final Path targetPath = target.getFilePath( ); - if (locked) { - final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager(); - final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager(); - try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) ) - { - Files.copy( sourcePath, targetPath, copyOptions ); - } - catch ( FileLockException e ) - { - throw new IOException( e ); - } - catch ( FileLockTimeoutException e ) - { - throw new IOException( e ); - } - } else - { - Files.copy( sourcePath, targetPath, copyOptions ); - } - } else { - try { - final RepositoryStorage sourceStorage = source.getStorage(); - final RepositoryStorage targetStorage = target.getStorage(); - sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked); - } catch (IOException e) { - throw e; - } catch (Throwable e) { - Throwable cause = e.getCause(); - if (cause instanceof IOException) { - throw (IOException)cause; - } else - { - throw new IOException( e ); - } - } - } - } - - /** - * - * @param source - * @param target - * @param locked - * @param copyOptions - * @throws IOException - */ - public static void moveAsset(StorageAsset source, StorageAsset target, boolean locked, CopyOption... copyOptions) throws IOException - { - if (source.isFileBased() && target.isFileBased()) { - // Short cut for FS operations - // Move is atomic operation - Files.move( source.getFilePath(), target.getFilePath(), copyOptions ); - } else { - try { - final RepositoryStorage sourceStorage = source.getStorage(); - final RepositoryStorage targetStorage = target.getStorage(); - sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked); - sourceStorage.removeAsset( source ); - } catch (IOException e) { - throw e; - } catch (Throwable e) { - Throwable cause = e.getCause(); - if (cause instanceof IOException) { - throw (IOException)cause; - } else - { - throw new IOException( e ); - } - } - } - - } - - private static void wrapWriteFunction(ReadableByteChannel is, RepositoryStorage targetStorage, StorageAsset target, boolean locked) { - try { - targetStorage.writeDataToChannel( target, os -> copy(is, os), locked ); - } catch (Exception e) { - throw new RuntimeException( e ); - } - } - - - private static void copy( final ReadableByteChannel is, final WritableByteChannel os ) { - if (is instanceof FileChannel) { - copy( (FileChannel) is, os ); - } else if (os instanceof FileChannel) { - copy(is, (FileChannel)os); - } else - { - try - { - ByteBuffer buffer = ByteBuffer.allocate( DEFAULT_BUFFER_SIZE ); - while ( is.read( buffer ) != -1 ) - { - buffer.flip( ); - while ( buffer.hasRemaining( ) ) - { - os.write( buffer ); - } - buffer.clear( ); - } - } - catch ( IOException e ) - { - throw new RuntimeException( e ); - } - } - } - - private static void copy( final FileChannel is, final WritableByteChannel os ) { - try - { - is.transferTo( 0, is.size( ), os ); - } - catch ( IOException e ) - { - throw new RuntimeException( e ); - } - } - - private static void copy( final ReadableByteChannel is, final FileChannel os ) { - try - { - os.transferFrom( is, 0, Long.MAX_VALUE ); - } - catch ( IOException e ) - { - throw new RuntimeException( e ); - } - } - -} diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/MetadataTools.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/MetadataTools.java index 9df3f49c6..57cc2c331 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/MetadataTools.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/MetadataTools.java @@ -42,6 +42,7 @@ import org.apache.archiva.repository.ContentNotFoundException; import org.apache.archiva.repository.LayoutException; import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.RemoteRepositoryContent; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.xml.XMLException; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang.StringUtils; @@ -369,9 +370,9 @@ public class MetadataTools ProjectReference reference, String proxyId ) { String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) ); - Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath ); + StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath ); - if ( !Files.exists(metadataFile) || !Files.isRegularFile( metadataFile )) + if ( !metadataFile.exists() || metadataFile.isContainer()) { // Nothing to do. return null. return null; @@ -381,11 +382,11 @@ public class MetadataTools { return MavenMetadataReader.read( metadataFile ); } - catch ( XMLException e ) + catch (XMLException | IOException e ) { // TODO: [monitor] consider a monitor for this event. // TODO: consider a read-redo on monitor return code? - log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e ); + log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e ); return null; } } @@ -394,9 +395,9 @@ public class MetadataTools String logicalResource, String proxyId ) { String metadataPath = getRepositorySpecificName( proxyId, logicalResource ); - Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath ); + StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath ); - if ( !Files.exists(metadataFile) || !Files.isRegularFile( metadataFile)) + if ( !metadataFile.exists() || metadataFile.isContainer()) { // Nothing to do. return null. return null; @@ -406,11 +407,11 @@ public class MetadataTools { return MavenMetadataReader.read( metadataFile ); } - catch ( XMLException e ) + catch (XMLException | IOException e ) { // TODO: [monitor] consider a monitor for this event. // TODO: consider a read-redo on monitor return code? - log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e ); + log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e ); return null; } } @@ -419,9 +420,9 @@ public class MetadataTools VersionedReference reference, String proxyId ) { String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) ); - Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath ); + StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath ); - if ( !Files.exists(metadataFile) || !Files.isRegularFile(metadataFile)) + if ( !metadataFile.exists() || metadataFile.isContainer()) { // Nothing to do. return null. return null; @@ -431,11 +432,11 @@ public class MetadataTools { return MavenMetadataReader.read( metadataFile ); } - catch ( XMLException e ) + catch (XMLException | IOException e ) { // TODO: [monitor] consider a monitor for this event. // TODO: consider a read-redo on monitor return code? - log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e ); + log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e ); return null; } } @@ -443,7 +444,7 @@ public class MetadataTools public void updateMetadata( ManagedRepositoryContent managedRepository, String logicalResource ) throws RepositoryMetadataException { - final Path metadataFile = Paths.get( managedRepository.getRepoRoot(), logicalResource ); + final StorageAsset metadataFile = managedRepository.getRepository().getAsset( logicalResource ); ArchivaRepositoryMetadata metadata = null; //Gather and merge all metadata available @@ -480,7 +481,7 @@ public class MetadataTools RepositoryMetadataWriter.write( metadata, metadataFile ); - ChecksummedFile checksum = new ChecksummedFile( metadataFile ); + ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() ); checksum.fixChecksums( algorithms ); } @@ -491,30 +492,17 @@ public class MetadataTools * @param metadataParentDirectory * @return origional set plus newly found versions */ - private Set findPossibleVersions( Set versions, Path metadataParentDirectory ) + private Set findPossibleVersions( Set versions, StorageAsset metadataParentDirectory ) { Set result = new HashSet( versions ); - try (Stream stream = Files.list( metadataParentDirectory )) { - stream.filter( Files::isDirectory ).filter( - p -> - { - try(Stream substream = Files.list(p)) - { - return substream.anyMatch( f -> Files.isRegularFile( f ) && f.toString().endsWith( ".pom" )); - } - catch ( IOException e ) - { - return false; - } + metadataParentDirectory.list().stream().filter(asset -> + asset.isContainer()).filter(asset -> { + return asset.list().stream().anyMatch(f -> !f.isContainer() && f.getName().endsWith(".pom")); } - ).forEach( - p -> result.add(p.getFileName().toString()) - ); - } catch (IOException e) { - // - } + ).forEach( p -> result.add(p.getName())); + return result; } @@ -522,8 +510,9 @@ public class MetadataTools ManagedRepositoryContent managedRepository, String logicalResource ) { List metadatas = new ArrayList<>(); - Path file = Paths.get( managedRepository.getRepoRoot(), logicalResource ); - if ( Files.exists(file) ) + StorageAsset file = managedRepository.getRepository().getAsset( logicalResource ); + + if ( file.exists() ) { try { @@ -533,10 +522,14 @@ public class MetadataTools metadatas.add( existingMetadata ); } } - catch ( XMLException e ) + catch (XMLException | IOException e ) { - log.debug( "Could not read metadata at {}. Metadata will be removed.", file.toAbsolutePath() ); - FileUtils.deleteQuietly( file ); + log.debug( "Could not read metadata at {}. Metadata will be removed.", file.getPath() ); + try { + file.getStorage().removeAsset(file); + } catch (IOException ex) { + log.error("Could not remove asset {}", file.getPath()); + } } } @@ -578,7 +571,8 @@ public class MetadataTools public void updateMetadata( ManagedRepositoryContent managedRepository, ProjectReference reference ) throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException { - Path metadataFile = Paths.get( managedRepository.getRepoRoot(), toPath( reference ) ); + + StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) ); long lastUpdated = getExistingLastUpdated( metadataFile ); @@ -593,7 +587,7 @@ public class MetadataTools // TODO: do we know this information instead? // Set allPlugins = managedRepository.getPlugins( reference ); Set allPlugins; - if ( Files.exists(metadataFile)) + if ( metadataFile.exists()) { try { @@ -653,7 +647,7 @@ public class MetadataTools // Save the metadata model to disk. RepositoryMetadataWriter.write( metadata, metadataFile ); - ChecksummedFile checksum = new ChecksummedFile( metadataFile ); + ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() ); checksum.fixChecksums( algorithms ); } @@ -748,9 +742,9 @@ public class MetadataTools } } - private long getExistingLastUpdated( Path metadataFile ) + private long getExistingLastUpdated( StorageAsset metadataFile ) { - if ( !Files.exists(metadataFile) ) + if ( !metadataFile.exists() ) { // Doesn't exist. return 0; @@ -762,7 +756,7 @@ public class MetadataTools return getLastUpdated( metadata ); } - catch ( XMLException e ) + catch (XMLException | IOException e ) { // Error. return 0; @@ -788,7 +782,7 @@ public class MetadataTools public void updateMetadata( ManagedRepositoryContent managedRepository, VersionedReference reference ) throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException { - Path metadataFile = Paths.get( managedRepository.getRepoRoot(), toPath( reference ) ); + StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) ); long lastUpdated = getExistingLastUpdated( metadataFile ); @@ -893,7 +887,7 @@ public class MetadataTools // Save the metadata model to disk. RepositoryMetadataWriter.write( metadata, metadataFile ); - ChecksummedFile checksum = new ChecksummedFile( metadataFile ); + ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() ); checksum.fixChecksums( algorithms ); } diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/RepositoryMetadataWriter.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/RepositoryMetadataWriter.java index 4585d59ff..6f6bf6608 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/RepositoryMetadataWriter.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/RepositoryMetadataWriter.java @@ -22,6 +22,7 @@ package org.apache.archiva.repository.metadata; import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.model.ArchivaRepositoryMetadata; import org.apache.archiva.model.Plugin; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.xml.XMLException; import org.apache.archiva.xml.XMLWriter; import org.apache.commons.collections4.CollectionUtils; @@ -29,9 +30,12 @@ import org.apache.commons.lang.StringUtils; import org.dom4j.Document; import org.dom4j.DocumentHelper; import org.dom4j.Element; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.FileWriter; import java.io.IOException; +import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.file.Path; import java.util.Collections; @@ -44,11 +48,13 @@ import java.util.List; */ public class RepositoryMetadataWriter { - public static void write( ArchivaRepositoryMetadata metadata, Path outputFile ) + private static final Logger log = LoggerFactory.getLogger(RepositoryMetadataWriter.class); + + public static void write( ArchivaRepositoryMetadata metadata, StorageAsset outputFile ) throws RepositoryMetadataException { boolean thrown = false; - try (FileWriter writer = new FileWriter( outputFile.toFile() )) + try (OutputStreamWriter writer = new OutputStreamWriter( outputFile.getWriteStream(true))) { write( metadata, writer ); writer.flush(); @@ -57,13 +63,17 @@ public class RepositoryMetadataWriter { thrown = true; throw new RepositoryMetadataException( - "Unable to write metadata file: " + outputFile.toAbsolutePath() + " - " + e.getMessage(), e ); + "Unable to write metadata file: " + outputFile.getPath() + " - " + e.getMessage(), e ); } finally { if ( thrown ) { - FileUtils.deleteQuietly( outputFile ); + try { + outputFile.getStorage().removeAsset(outputFile); + } catch (IOException e) { + log.error("Could not remove asset {}", outputFile); + } } } } diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemAssetTest.java b/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemAssetTest.java deleted file mode 100644 index 8e98e59bb..000000000 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemAssetTest.java +++ /dev/null @@ -1,202 +0,0 @@ -package org.apache.archiva.repository.content; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.*; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.time.Instant; - -import static org.junit.Assert.*; - -public class FilesystemAssetTest { - - Path assetPathFile; - Path assetPathDir; - - @Before - public void init() throws IOException { - assetPathFile = Files.createTempFile("assetFile", "dat"); - assetPathDir = Files.createTempDirectory("assetDir"); - } - - @After - public void cleanup() { - - try { - Files.deleteIfExists(assetPathFile); - } catch (IOException e) { - e.printStackTrace(); - } - try { - Files.deleteIfExists(assetPathDir); - } catch (IOException e) { - e.printStackTrace(); - } - } - - - @Test - public void getPath() { - FilesystemAsset asset = new FilesystemAsset("/"+assetPathFile.getFileName().toString(), assetPathFile); - assertEquals("/"+assetPathFile.getFileName().toString(), asset.getPath()); - } - - @Test - public void getName() { - FilesystemAsset asset = new FilesystemAsset("/"+assetPathFile.getFileName().toString(), assetPathFile); - assertEquals(assetPathFile.getFileName().toString(), asset.getName()); - - } - - @Test - public void getModificationTime() throws IOException { - Instant modTime = Files.getLastModifiedTime(assetPathFile).toInstant(); - FilesystemAsset asset = new FilesystemAsset("/test123", assetPathFile); - assertTrue(modTime.equals(asset.getModificationTime())); - } - - @Test - public void isContainer() { - FilesystemAsset asset = new FilesystemAsset("/test1323", assetPathFile); - assertFalse(asset.isContainer()); - FilesystemAsset asset2 = new FilesystemAsset("/test1234", assetPathDir); - assertTrue(asset2.isContainer()); - } - - @Test - public void list() throws IOException { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile); - assertEquals(0, asset.list().size()); - - FilesystemAsset asset2 = new FilesystemAsset("/test1235", assetPathDir); - assertEquals(0, asset2.list().size()); - Path f1 = Files.createTempFile(assetPathDir, "testfile", "dat"); - Path f2 = Files.createTempFile(assetPathDir, "testfile", "dat"); - Path d1 = Files.createTempDirectory(assetPathDir, "testdir"); - assertEquals(3, asset2.list().size()); - assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f1.getFileName().toString()))); - assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f2.getFileName().toString()))); - assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(d1.getFileName().toString()))); - Files.deleteIfExists(f1); - Files.deleteIfExists(f2); - Files.deleteIfExists(d1); - - - } - - @Test - public void getSize() throws IOException { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile); - assertEquals(0, asset.getSize()); - - Files.write(assetPathFile, new String("abcdef").getBytes("ASCII")); - assertTrue(asset.getSize()>=6); - - - } - - @Test - public void getData() throws IOException { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile); - Files.write(assetPathFile, "abcdef".getBytes("ASCII")); - try(InputStream is = asset.getReadStream()) { - assertEquals("abcdef", IOUtils.toString(is, "ASCII")); - } - - } - - @Test - public void getDataExceptionOnDir() throws IOException { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathDir); - Files.write(assetPathFile, "abcdef".getBytes("ASCII")); - try { - InputStream is = asset.getReadStream(); - assertFalse("Exception expected for data on dir", true); - } catch (IOException e) { - // fine - } - - } - - @Test - public void writeData() throws IOException { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile); - Files.write(assetPathFile, "abcdef".getBytes("ASCII")); - try(OutputStream os = asset.getWriteStream(true)) { - IOUtils.write("test12345", os, "ASCII"); - } - assertEquals("test12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII")); - } - - @Test - public void writeDataAppend() throws IOException { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile); - Files.write(assetPathFile, "abcdef".getBytes("ASCII")); - try(OutputStream os = asset.getWriteStream(false)) { - IOUtils.write("test12345", os, "ASCII"); - } - assertEquals("abcdeftest12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII")); - } - - @Test - public void writeDataExceptionOnDir() throws IOException { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathDir); - try { - - OutputStream os = asset.getWriteStream(true); - assertTrue("Writing to a directory should throw a IOException", false); - } catch (IOException e) { - // Fine - } - } - - @Test - public void storeDataFile() throws IOException { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile); - Path dataFile = Files.createTempFile("testdata", "dat"); - try(OutputStream os = Files.newOutputStream(dataFile)) { - IOUtils.write("testkdkdkd", os, "ASCII"); - } - asset.replaceDataFromFile(dataFile); - assertEquals("testkdkdkd", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII")); - } - - @Test - public void exists() { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile); - assertTrue(asset.exists()); - FilesystemAsset asset2 = new FilesystemAsset("/test1234", Paths.get("abcdefgkdkdk")); - assertFalse(asset2.exists()); - - } - - @Test - public void getFilePath() { - FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile); - assertEquals(assetPathFile, asset.getFilePath()); - } -} \ No newline at end of file diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemStorageTest.java b/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemStorageTest.java deleted file mode 100644 index 309c755e6..000000000 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemStorageTest.java +++ /dev/null @@ -1,208 +0,0 @@ -package org.apache.archiva.repository.content; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.archiva.common.filelock.DefaultFileLockManager; -import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.file.Files; -import java.nio.file.Path; - -import static org.junit.Assert.*; - -public class FilesystemStorageTest { - - private FilesystemStorage fsStorage; - private FilesystemAsset file1Asset; - private FilesystemAsset dir1Asset; - private Path baseDir; - private Path file1; - private Path dir1; - - @Before - public void init() throws IOException { - baseDir = Files.createTempDirectory("FsStorageTest"); - DefaultFileLockManager fl = new DefaultFileLockManager(); - fsStorage = new FilesystemStorage(baseDir,fl); - Files.createDirectories(baseDir.resolve("dir1")); - Files.createDirectories(baseDir.resolve("dir2")); - file1 = Files.createFile(baseDir.resolve("dir1/testfile1.dat")); - dir1 = Files.createDirectories(baseDir.resolve("dir1/testdir")); - file1Asset = new FilesystemAsset("/dir1/testfile1.dat", file1); - dir1Asset = new FilesystemAsset("/dir1/testdir", dir1); - } - - private class StringResult { - public String getData() { - return data; - } - - public void setData(String data) { - this.data = data; - } - - String data; - } - - - @After - public void cleanup() { - try { - Files.deleteIfExists(file1); - } catch (IOException e) { - e.printStackTrace(); - } - try { - Files.deleteIfExists(dir1); - } catch (IOException e) { - e.printStackTrace(); - } - try { - Files.deleteIfExists(baseDir.resolve("dir1")); - } catch (IOException e) { - e.printStackTrace(); - } - try { - Files.deleteIfExists(baseDir.resolve("dir2")); - } catch (IOException e) { - e.printStackTrace(); - } - try { - Files.deleteIfExists(baseDir); - } catch (IOException e) { - e.printStackTrace(); - } - } - - - - - @Test - public void consumeData() throws IOException { - try(OutputStream os = Files.newOutputStream(file1)) { - IOUtils.write("abcdefghijkl", os, "ASCII"); - } - StringResult result = new StringResult(); - fsStorage.consumeData(file1Asset, is -> consume(is, result), false ); - assertEquals("abcdefghijkl" ,result.getData()); - } - - private void consume(InputStream is, StringResult result) { - try { - result.setData(IOUtils.toString(is, "ASCII")); - } catch (IOException e) { - e.printStackTrace(); - } - } - - - @Test - public void getAsset() { - StorageAsset asset = fsStorage.getAsset("/dir1/testfile1.dat"); - assertEquals(file1, asset.getFilePath()); - } - - @Test - public void addAsset() { - StorageAsset newAsset = fsStorage.addAsset("dir2/test", false); - assertNotNull(newAsset); - assertFalse(newAsset.isContainer()); - assertFalse(newAsset.exists()); - - StorageAsset newDirAsset = fsStorage.addAsset("/dir2/testdir2", true); - assertNotNull(newDirAsset); - assertTrue(newDirAsset.isContainer()); - assertFalse(newDirAsset.exists()); - } - - @Test - public void removeAsset() throws IOException { - assertTrue(Files.exists(file1)); - fsStorage.removeAsset(file1Asset); - assertFalse(Files.exists(file1)); - - assertTrue(Files.exists(dir1)); - fsStorage.removeAsset(dir1Asset); - assertFalse(Files.exists(dir1)); - } - - @Test - public void moveAsset() throws IOException { - Path newFile=null; - Path newDir=null; - try { - assertTrue(Files.exists(file1)); - try (OutputStream os = Files.newOutputStream(file1)) { - IOUtils.write("testakdkkdkdkdk", os, "ASCII"); - } - long fileSize = Files.size(file1); - fsStorage.moveAsset(file1Asset, "/dir2/testfile2.dat"); - assertFalse(Files.exists(file1)); - newFile = baseDir.resolve("dir2/testfile2.dat"); - assertTrue(Files.exists(newFile)); - assertEquals(fileSize, Files.size(newFile)); - - - assertTrue(Files.exists(dir1)); - newDir = baseDir.resolve("dir2/testdir2"); - fsStorage.moveAsset(dir1Asset, "dir2/testdir2"); - assertFalse(Files.exists(dir1)); - assertTrue(Files.exists(newDir)); - } finally { - if (newFile!=null) Files.deleteIfExists(newFile); - if (newDir!=null) Files.deleteIfExists(newDir); - } - } - - @Test - public void copyAsset() throws IOException { - Path newFile=null; - Path newDir=null; - try { - assertTrue(Files.exists(file1)); - try (OutputStream os = Files.newOutputStream(file1)) { - IOUtils.write("testakdkkdkdkdk", os, "ASCII"); - } - long fileSize = Files.size(file1); - fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat"); - assertTrue(Files.exists(file1)); - assertEquals(fileSize, Files.size(file1)); - newFile = baseDir.resolve("dir2/testfile2.dat"); - assertTrue(Files.exists(newFile)); - assertEquals(fileSize, Files.size(newFile)); - - - assertTrue(Files.exists(dir1)); - newDir = baseDir.resolve("dir2/testdir2"); - fsStorage.copyAsset(dir1Asset, "dir2/testdir2"); - assertTrue(Files.exists(dir1)); - assertTrue(Files.exists(newDir)); - } finally { - if (newFile!=null) Files.deleteIfExists(newFile); - if (newDir!=null) Files.deleteIfExists(newDir); - } - } -} \ No newline at end of file diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java b/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java index 328f58a5a..27b1e1f45 100644 --- a/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java +++ b/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java @@ -28,7 +28,7 @@ import org.apache.archiva.repository.LayoutException; import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.RepositoryException; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.springframework.stereotype.Service; import java.util.Set; diff --git a/archiva-modules/archiva-base/archiva-repository-scanner/src/main/java/org/apache/archiva/repository/scanner/DefaultRepositoryScanner.java b/archiva-modules/archiva-base/archiva-repository-scanner/src/main/java/org/apache/archiva/repository/scanner/DefaultRepositoryScanner.java index b26bea9bd..30abcb437 100644 --- a/archiva-modules/archiva-base/archiva-repository-scanner/src/main/java/org/apache/archiva/repository/scanner/DefaultRepositoryScanner.java +++ b/archiva-modules/archiva-base/archiva-repository-scanner/src/main/java/org/apache/archiva/repository/scanner/DefaultRepositoryScanner.java @@ -25,7 +25,7 @@ import org.apache.archiva.consumers.InvalidRepositoryContentConsumer; import org.apache.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.archiva.consumers.RepositoryContentConsumer; import org.apache.archiva.repository.ManagedRepository; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.collections4.CollectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/RepositoryScannerTest.java b/archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/RepositoryScannerTest.java index 5f6f7e0d5..dec4e8dc9 100644 --- a/archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/RepositoryScannerTest.java +++ b/archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/RepositoryScannerTest.java @@ -29,7 +29,7 @@ import org.apache.archiva.repository.BasicRemoteRepository; import org.apache.archiva.repository.EditableManagedRepository; import org.apache.archiva.repository.EditableRemoteRepository; import org.apache.archiva.repository.ManagedRepository; -import org.apache.archiva.repository.content.FilesystemStorage; +import org.apache.archiva.repository.storage.FilesystemStorage; import org.apache.archiva.repository.scanner.mock.ManagedRepositoryContentMock; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.commons.io.FileUtils; diff --git a/archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/mock/ManagedRepositoryContentMock.java b/archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/mock/ManagedRepositoryContentMock.java index ade81214a..dadf98a35 100644 --- a/archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/mock/ManagedRepositoryContentMock.java +++ b/archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/mock/ManagedRepositoryContentMock.java @@ -19,6 +19,7 @@ package org.apache.archiva.repository.scanner.mock; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.utils.VersionUtil; import org.apache.archiva.metadata.model.ArtifactMetadata; import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet; @@ -27,9 +28,11 @@ import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.model.ProjectReference; import org.apache.archiva.model.VersionedReference; import org.apache.archiva.repository.*; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; +import java.io.IOException; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; @@ -48,6 +51,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent private ManagedRepository repository; + private FilesystemStorage fsStorage; public ManagedRepositoryContentMock(ManagedRepository repo) { this.repository = repo; @@ -92,7 +96,18 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent @Override public String getRepoRoot( ) { - return Paths.get("", "target", "test-repository", "managed").toString(); + return getRepoRootAsset().getFilePath().toString(); + } + + private StorageAsset getRepoRootAsset() { + if (fsStorage==null) { + try { + fsStorage = new FilesystemStorage(Paths.get("", "target", "test-repository", "managed"), new DefaultFileLockManager()); + } catch (IOException e) { + e.printStackTrace(); + } + } + return fsStorage.getAsset(""); } @Override @@ -329,7 +344,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent @Override public StorageAsset toFile( ArtifactReference reference ) { - return Paths.get(getRepoRoot(), refs.get(reference)); + return getRepoRootAsset().resolve(refs.get(reference)); } @Override diff --git a/archiva-modules/archiva-base/archiva-storage-api/pom.xml b/archiva-modules/archiva-base/archiva-storage-api/pom.xml new file mode 100644 index 000000000..06c853dda --- /dev/null +++ b/archiva-modules/archiva-base/archiva-storage-api/pom.xml @@ -0,0 +1,35 @@ + + + + archiva-base + org.apache.archiva + 3.0.0-SNAPSHOT + + 4.0.0 + + archiva-storage-api + + Archiva Base :: Repository API + + + ${project.parent.parent.basedir} + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + ${basedir} + + + + + + + \ No newline at end of file diff --git a/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/RepositoryStorage.java b/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/RepositoryStorage.java new file mode 100644 index 000000000..68ad39b81 --- /dev/null +++ b/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/RepositoryStorage.java @@ -0,0 +1,159 @@ +package org.apache.archiva.repository.storage; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.file.CopyOption; +import java.util.function.Consumer; + +/** + * + * This is the low level API to access artifacts in a repository. Each artifact is represented + * by one storage asset. Each asset can be accessed by a path that is independent on the underlying storage + * implementation. Paths always use '/' as path separator. The path is local to the repository and + * is unique for each asset. + * The storage API knows nothing about the repository layout or repository specific metadata. + * If you use this API you must either have knowledge about the specific repository layout or use the structure + * as it is, e.g. for browsing. + * + * The base implementation for the storage uses a directory structure on the local filesystem. + * + * + * It is the decision of the repository type specific implementation, if this API provides access to all elements, that + * is really stored or just a selected view. + * + * Checking access is not part of this API. + */ +public interface RepositoryStorage { + /** + * Returns information about a specific storage asset. + * @param path + * @return + */ + StorageAsset getAsset(String path); + + /** + * Consumes the data and sets a lock for the file during the operation. + * + * @param asset The asset from which the data is consumed. + * @param consumerFunction The consumer that reads the data + * @param readLock If true, a read lock is acquired on the asset. + * @throws IOException + */ + void consumeData(StorageAsset asset, Consumer consumerFunction, boolean readLock) throws IOException; + + /** + * Consumes the data and sets a lock for the file during the operation. + * + * @param asset The asset from which the data is consumed. + * @param consumerFunction The consumer that reads the data + * @param readLock If true, a read lock is acquired on the asset. + * @throws IOException + */ + void consumeDataFromChannel( StorageAsset asset, Consumer consumerFunction, boolean readLock) throws IOException; + + /** + * Writes data to the asset using a write lock. + * + * @param asset The asset to which the data is written. + * @param consumerFunction The function that provides the data. + * @param writeLock If true, a write lock is acquired on the destination. + */ + void writeData( StorageAsset asset, Consumer consumerFunction, boolean writeLock) throws IOException;; + + /** + * Writes data and sets a lock during the operation. + * + * @param asset The asset to which the data is written. + * @param consumerFunction The function that provides the data. + * @param writeLock If true, a write lock is acquired on the destination. + * @throws IOException + */ + void writeDataToChannel( StorageAsset asset, Consumer consumerFunction, boolean writeLock) throws IOException; + + /** + * Adds a new asset to the underlying storage. + * @param path The path to the asset. + * @param container True, if the asset should be a container, false, if it is a file. + * @return + */ + StorageAsset addAsset(String path, boolean container); + + /** + * Removes the given asset from the storage. + * + * @param asset + * @throws IOException + */ + void removeAsset(StorageAsset asset) throws IOException; + + /** + * Moves the asset to the given location and returns the asset object for the destination. Moves only assets that + * belong to the same storage instance. It will throw a IOException if the assets are from differents storage + * instances. + * + * @param origin The original asset + * @param destination The destination path pointing to the new asset. + * @param copyOptions The copy options. + * @return The asset representation of the moved object. + */ + StorageAsset moveAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException; + + /** + * Moves the asset to the given location and returns the asset object for the destination. Moves only assets that + * belong to the same storage instance. It will throw a IOException if the assets are from differents storage + * instances. + * * + * @param origin The original asset + * @param destination The destination path. + * @param copyOptions The copy options (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING} + * @throws IOException If it was not possible to copy the asset. + */ + void moveAsset(StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException; + + /** + * Copies the given asset to the new destination. Copies only assets that belong to the same storage instance. + * It will throw a IOException if the assets are from differents storage instances. + * + * @param origin The original asset + * @param destination The path to the new asset + * @param copyOptions The copy options, e.g. (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING} + * @return The asset representation of the copied object + * @throws IOException If it was not possible to copy the asset + */ + StorageAsset copyAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException; + + /** + * Copies the given asset to the new destination. Copies only assets that belong to the same storage instance. + * It will throw a IOException if the assets are from differents storage instances. + * + * @param origin The original asset + * @param destination The path to the new asset + * @param copyOptions The copy options, e.g. (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING} + * @throws IOException If it was not possible to copy the asset + */ + void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException; + + +} diff --git a/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/StorageAsset.java b/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/StorageAsset.java new file mode 100644 index 000000000..5e6b52987 --- /dev/null +++ b/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/StorageAsset.java @@ -0,0 +1,186 @@ +package org.apache.archiva.repository.storage; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.file.Path; +import java.time.Instant; +import java.util.List; + +/** + * A instance of this interface represents information about a specific asset in a repository. + * The asset may be an real artifact, a directory, or a virtual asset. + * + * Each asset has a unique path relative to the repository. + * + * The implementation may read the data directly from the filesystem or underlying storage implementation. + * + * @author Martin Stockhammer + */ +public interface StorageAsset +{ + + /** + * Returns the storage this asset belongs to. + * @return + */ + RepositoryStorage getStorage(); + + /** + * Returns the complete path relative to the repository to the given asset. + * + * @return A path starting with '/' that uniquely identifies the asset in the repository. + */ + String getPath(); + + /** + * Returns the name of the asset. It may be just the filename. + * @return + */ + String getName(); + + /** + * Returns the time of the last modification. + * + * @return + */ + Instant getModificationTime(); + + /** + * Returns true, if this asset is a container type and contains further child assets. + * @return + */ + boolean isContainer(); + + /** + * List the child assets. + * + * @return The list of children. If there are no children and if the asset is not a container, a empty list will be returned. + */ + List list(); + + /** + * The size in bytes of the asset. If the asset does not have a size, -1 should be returned. + * + * @return The size if the asset has a size, otherwise -1 + */ + long getSize(); + + /** + * Returns the input stream of the artifact content. + * It will throw a IOException, if the stream could not be created. + * Implementations should create a new stream instance for each invocation and make sure that the + * stream is proper closed after usage. + * + * @return The InputStream representing the content of the artifact. + * @throws IOException + */ + InputStream getReadStream() throws IOException; + + /** + * Returns a NIO representation of the data. + * + * @return A channel to the asset data. + * @throws IOException + */ + ReadableByteChannel getReadChannel() throws IOException; + + /** + * + * Returns an output stream where you can write data to the asset. The operation is not locked or synchronized. + * User of this method have to make sure, that the stream is proper closed after usage. + * + * @param replace If true, the original data will be replaced, otherwise the data will be appended. + * @return The OutputStream where the data can be written. + * @throws IOException + */ + OutputStream getWriteStream( boolean replace) throws IOException; + + /** + * Returns a NIO representation of the asset where you can write the data. + * + * @param replace True, if the content should be replaced by the data written to the stream. + * @return The Channel for writing the data. + * @throws IOException + */ + WritableByteChannel getWriteChannel( boolean replace) throws IOException; + + /** + * Replaces the content. The implementation may do an atomic move operation, or keep a backup. If + * the operation fails, the implementation should try to restore the old data, if possible. + * + * The original file may be deleted, if the storage was successful. + * + * @param newData Replaces the data by the content of the given file. + */ + boolean replaceDataFromFile( Path newData) throws IOException; + + /** + * Returns true, if the asset exists. + * + * @return True, if the asset exists, otherwise false. + */ + boolean exists(); + + /** + * Creates the asset in the underlying storage, if it does not exist. + */ + void create() throws IOException; + + /** + * Returns the real path to the asset, if it exist. Not all implementations may implement this method. + * The method throws {@link UnsupportedOperationException}, if and only if {@link #isFileBased()} returns false. + * + * @return The filesystem path to the asset. + * @throws UnsupportedOperationException If the underlying storage is not file based. + */ + Path getFilePath() throws UnsupportedOperationException; + + /** + * Returns true, if the asset can return a file path for the given asset. If this is true, the {@link #getFilePath()} + * will not throw a {@link UnsupportedOperationException} + * + * @return + */ + boolean isFileBased(); + + /** + * Returns true, if there is a parent to this asset. + * @return + */ + boolean hasParent(); + + /** + * Returns the parent of this asset. + * @return The asset, or null, if it does not exist. + */ + StorageAsset getParent(); + + /** + * Returns the asset relative to the given path + * @param toPath + * @return + */ + StorageAsset resolve(String toPath); +} diff --git a/archiva-modules/archiva-base/archiva-storage-fs/pom.xml b/archiva-modules/archiva-base/archiva-storage-fs/pom.xml new file mode 100644 index 000000000..d128bed2b --- /dev/null +++ b/archiva-modules/archiva-base/archiva-storage-fs/pom.xml @@ -0,0 +1,58 @@ + + + + archiva-base + org.apache.archiva + 3.0.0-SNAPSHOT + + 4.0.0 + + archiva-storage-fs + + Archiva Base :: Storage Filesystem Based + + + ${project.parent.parent.basedir} + + + + + + org.apache.archiva + archiva-storage-api + + + org.apache.archiva + archiva-filelock + + + + commons-io + commons-io + + + + + junit + junit + test + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + ${basedir} + + + + + + + \ No newline at end of file diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemAsset.java b/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemAsset.java new file mode 100644 index 000000000..b343c7b0e --- /dev/null +++ b/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemAsset.java @@ -0,0 +1,494 @@ +package org.apache.archiva.repository.storage; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.channels.FileChannel; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.file.*; +import java.nio.file.attribute.*; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Implementation of an asset that is stored on the filesystem. + *

+ * The implementation does not check the given paths. Caller should normalize the asset path + * and check, if the base path is a parent of the resulting path. + *

+ * The file must not exist for all operations. + * + * @author Martin Stockhammer + */ +public class FilesystemAsset implements StorageAsset { + + private final static Logger log = LoggerFactory.getLogger(FilesystemAsset.class); + + private final Path basePath; + private final Path assetPath; + private final String relativePath; + + public static final String DEFAULT_POSIX_FILE_PERMS = "rw-rw----"; + public static final String DEFAULT_POSIX_DIR_PERMS = "rwxrwx---"; + + public static final Set DEFAULT_POSIX_FILE_PERMISSIONS; + public static final Set DEFAULT_POSIX_DIR_PERMISSIONS; + + public static final AclEntryPermission[] DEFAULT_ACL_FILE_PERMISSIONS = new AclEntryPermission[]{ + AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL, + AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA + }; + + public static final AclEntryPermission[] DEFAULT_ACL_DIR_PERMISSIONS = new AclEntryPermission[]{ + AclEntryPermission.ADD_FILE, AclEntryPermission.ADD_SUBDIRECTORY, AclEntryPermission.DELETE_CHILD, + AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL, + AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA + }; + + static { + + DEFAULT_POSIX_FILE_PERMISSIONS = PosixFilePermissions.fromString(DEFAULT_POSIX_FILE_PERMS); + DEFAULT_POSIX_DIR_PERMISSIONS = PosixFilePermissions.fromString(DEFAULT_POSIX_DIR_PERMS); + } + + Set defaultPosixFilePermissions = DEFAULT_POSIX_FILE_PERMISSIONS; + Set defaultPosixDirectoryPermissions = DEFAULT_POSIX_DIR_PERMISSIONS; + + List defaultFileAcls; + List defaultDirectoryAcls; + + boolean supportsAcl = false; + boolean supportsPosix = false; + final boolean setPermissionsForNew; + final RepositoryStorage storage; + + boolean directoryHint = false; + + private static final OpenOption[] REPLACE_OPTIONS = new OpenOption[]{StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE}; + private static final OpenOption[] APPEND_OPTIONS = new OpenOption[]{StandardOpenOption.APPEND}; + + + FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath) { + this.assetPath = assetPath; + this.relativePath = normalizePath(path); + this.setPermissionsForNew=false; + this.basePath = basePath; + this.storage = storage; + init(); + } + + /** + * Creates an asset for the given path. The given paths are not checked. + * The base path should be an absolute path. + * + * @param path The logical path for the asset relative to the repository. + * @param assetPath The asset path. + */ + public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath) { + this.assetPath = assetPath; + this.relativePath = normalizePath(path); + this.setPermissionsForNew = false; + this.basePath = null; + this.storage = storage; + init(); + } + + /** + * Creates an asset for the given path. The given paths are not checked. + * The base path should be an absolute path. + * + * @param path The logical path for the asset relative to the repository + * @param assetPath The asset path. + * @param directory This is only relevant, if the represented file or directory does not exist yet and + * is a hint. + */ + public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory) { + this.assetPath = assetPath; + this.relativePath = normalizePath(path); + this.directoryHint = directory; + this.setPermissionsForNew = false; + this.basePath = basePath; + this.storage = storage; + init(); + } + + /** + * Creates an asset for the given path. The given paths are not checked. + * The base path should be an absolute path. + * + * @param path The logical path for the asset relative to the repository + * @param assetPath The asset path. + * @param directory This is only relevant, if the represented file or directory does not exist yet and + * is a hint. + */ + public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) { + this.assetPath = assetPath; + this.relativePath = normalizePath(path); + this.directoryHint = directory; + this.setPermissionsForNew = setPermissionsForNew; + this.basePath = basePath; + this.storage = storage; + init(); + } + + private String normalizePath(String path) { + if (!path.startsWith("/")) { + return "/"+path; + } else { + return path; + } + } + + private void init() { + + if (setPermissionsForNew) { + try { + supportsAcl = Files.getFileStore(assetPath.getRoot()).supportsFileAttributeView(AclFileAttributeView.class); + } catch (IOException e) { + log.error("Could not check filesystem capabilities {}", e.getMessage()); + } + try { + supportsPosix = Files.getFileStore(assetPath.getRoot()).supportsFileAttributeView(PosixFileAttributeView.class); + } catch (IOException e) { + log.error("Could not check filesystem capabilities {}", e.getMessage()); + } + + if (supportsAcl) { + AclFileAttributeView aclView = Files.getFileAttributeView(assetPath.getParent(), AclFileAttributeView.class); + UserPrincipal owner = null; + try { + owner = aclView.getOwner(); + setDefaultFileAcls(processPermissions(owner, DEFAULT_ACL_FILE_PERMISSIONS)); + setDefaultDirectoryAcls(processPermissions(owner, DEFAULT_ACL_DIR_PERMISSIONS)); + + } catch (IOException e) { + supportsAcl = false; + } + + + } + } + } + + private List processPermissions(UserPrincipal owner, AclEntryPermission[] defaultAclFilePermissions) { + AclEntry.Builder aclBuilder = AclEntry.newBuilder(); + aclBuilder.setPermissions(defaultAclFilePermissions); + aclBuilder.setType(AclEntryType.ALLOW); + aclBuilder.setPrincipal(owner); + ArrayList aclList = new ArrayList<>(); + aclList.add(aclBuilder.build()); + return aclList; + } + + + @Override + public RepositoryStorage getStorage( ) + { + return storage; + } + + @Override + public String getPath() { + return relativePath; + } + + @Override + public String getName() { + return assetPath.getFileName().toString(); + } + + @Override + public Instant getModificationTime() { + try { + return Files.getLastModifiedTime(assetPath).toInstant(); + } catch (IOException e) { + log.error("Could not read modification time of {}", assetPath); + return Instant.now(); + } + } + + /** + * Returns true, if the path of this asset points to a directory + * + * @return + */ + @Override + public boolean isContainer() { + if (Files.exists(assetPath)) { + return Files.isDirectory(assetPath); + } else { + return directoryHint; + } + } + + /** + * Returns the list of directory entries, if this asset represents a directory. + * Otherwise a empty list will be returned. + * + * @return The list of entries in the directory, if it exists. + */ + @Override + public List list() { + try { + return Files.list(assetPath).map(p -> new FilesystemAsset(storage, relativePath + "/" + p.getFileName().toString(), assetPath.resolve(p))) + .collect(Collectors.toList()); + } catch (IOException e) { + return Collections.EMPTY_LIST; + } + } + + /** + * Returns the size of the represented file. If it cannot be determined, -1 is returned. + * + * @return + */ + @Override + public long getSize() { + try { + return Files.size(assetPath); + } catch (IOException e) { + return -1; + } + } + + /** + * Returns a input stream to the underlying file, if it exists. The caller has to make sure, that + * the stream is closed after it was used. + * + * @return + * @throws IOException + */ + @Override + public InputStream getReadStream() throws IOException { + if (isContainer()) { + throw new IOException("Can not create input stream for container"); + } + return Files.newInputStream(assetPath); + } + + @Override + public ReadableByteChannel getReadChannel( ) throws IOException + { + return FileChannel.open( assetPath, StandardOpenOption.READ ); + } + + private OpenOption[] getOpenOptions(boolean replace) { + return replace ? REPLACE_OPTIONS : APPEND_OPTIONS; + } + + @Override + public OutputStream getWriteStream( boolean replace) throws IOException { + OpenOption[] options = getOpenOptions( replace ); + if (!Files.exists( assetPath )) { + create(); + } + return Files.newOutputStream(assetPath, options); + } + + @Override + public WritableByteChannel getWriteChannel( boolean replace ) throws IOException + { + OpenOption[] options = getOpenOptions( replace ); + return FileChannel.open( assetPath, options ); + } + + @Override + public boolean replaceDataFromFile( Path newData) throws IOException { + final boolean createNew = !Files.exists(assetPath); + Path backup = null; + if (!createNew) { + backup = findBackupFile(assetPath); + } + try { + if (!createNew) { + Files.move(assetPath, backup); + } + Files.move(newData, assetPath, StandardCopyOption.REPLACE_EXISTING); + applyDefaultPermissions(assetPath); + return true; + } catch (IOException e) { + log.error("Could not overwrite file {}", assetPath); + // Revert if possible + if (backup != null && Files.exists(backup)) { + Files.move(backup, assetPath, StandardCopyOption.REPLACE_EXISTING); + } + throw e; + } finally { + if (backup != null) { + try { + Files.deleteIfExists(backup); + } catch (IOException e) { + log.error("Could not delete backup file {}", backup); + } + } + } + + } + + private void applyDefaultPermissions(Path filePath) { + try { + if (supportsPosix) { + Set perms; + if (Files.isDirectory(filePath)) { + perms = defaultPosixFilePermissions; + } else { + perms = defaultPosixDirectoryPermissions; + } + Files.setPosixFilePermissions(filePath, perms); + } else if (supportsAcl) { + List perms; + if (Files.isDirectory(filePath)) { + perms = getDefaultDirectoryAcls(); + } else { + perms = getDefaultFileAcls(); + } + AclFileAttributeView aclAttr = Files.getFileAttributeView(filePath, AclFileAttributeView.class); + aclAttr.setAcl(perms); + } + } catch (IOException e) { + log.error("Could not set permissions for {}: {}", filePath, e.getMessage()); + } + } + + private Path findBackupFile(Path file) { + String ext = ".bak"; + Path backupPath = file.getParent().resolve(file.getFileName().toString() + ext); + int idx = 0; + while (Files.exists(backupPath)) { + backupPath = file.getParent().resolve(file.getFileName().toString() + ext + idx++); + } + return backupPath; + } + + @Override + public boolean exists() { + return Files.exists(assetPath); + } + + @Override + public Path getFilePath() throws UnsupportedOperationException { + return assetPath; + } + + @Override + public boolean isFileBased( ) + { + return true; + } + + @Override + public boolean hasParent( ) + { + if (basePath!=null && assetPath.equals(basePath)) { + return false; + } + return assetPath.getParent()!=null; + } + + @Override + public StorageAsset getParent( ) + { + Path parentPath; + if (basePath!=null && assetPath.equals( basePath )) { + parentPath=null; + } else + { + parentPath = assetPath.getParent( ); + } + String relativeParent = StringUtils.substringBeforeLast( relativePath,"/"); + if (parentPath!=null) { + return new FilesystemAsset(storage, relativeParent, parentPath, basePath, true, setPermissionsForNew ); + } else { + return null; + } + } + + @Override + public StorageAsset resolve(String toPath) { + return storage.getAsset(this.getPath()+"/"+toPath); + } + + + public void setDefaultFileAcls(List acl) { + defaultFileAcls = acl; + } + + public List getDefaultFileAcls() { + return defaultFileAcls; + } + + public void setDefaultPosixFilePermissions(Set perms) { + defaultPosixFilePermissions = perms; + } + + public Set getDefaultPosixFilePermissions() { + return defaultPosixFilePermissions; + } + + public void setDefaultDirectoryAcls(List acl) { + defaultDirectoryAcls = acl; + } + + public List getDefaultDirectoryAcls() { + return defaultDirectoryAcls; + } + + public void setDefaultPosixDirectoryPermissions(Set perms) { + defaultPosixDirectoryPermissions = perms; + } + + public Set getDefaultPosixDirectoryPermissions() { + return defaultPosixDirectoryPermissions; + } + + @Override + public void create() throws IOException { + if (!Files.exists(assetPath)) { + if (directoryHint) { + Files.createDirectories(assetPath); + } else { + if (!Files.exists( assetPath.getParent() )) { + Files.createDirectories( assetPath.getParent( ) ); + } + Files.createFile(assetPath); + } + if (setPermissionsForNew) { + applyDefaultPermissions(assetPath); + } + } + } + + @Override + public String toString() { + return relativePath+":"+assetPath; + } + +} diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemStorage.java b/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemStorage.java new file mode 100644 index 000000000..860a7e9d7 --- /dev/null +++ b/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemStorage.java @@ -0,0 +1,388 @@ +package org.apache.archiva.repository.storage; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.archiva.common.filelock.FileLockException; +import org.apache.archiva.common.filelock.FileLockManager; +import org.apache.archiva.common.filelock.FileLockTimeoutException; +import org.apache.archiva.common.filelock.Lock; +import org.apache.commons.io.FileUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.channels.FileChannel; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.file.CopyOption; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.nio.file.StandardOpenOption; +import java.util.function.Consumer; + +/** + * Implementation of {@link RepositoryStorage} where data is stored in the filesystem. + * + * All files are relative to a given base path. Path values are separated by '/', '..' is allowed to navigate + * to a parent directory, but navigation out of the base path will lead to a exception. + */ +public class FilesystemStorage implements RepositoryStorage { + + private static final Logger log = LoggerFactory.getLogger(FilesystemStorage.class); + + private final Path basePath; + private final FileLockManager fileLockManager; + + public FilesystemStorage(Path basePath, FileLockManager fileLockManager) throws IOException { + if (!Files.exists(basePath)) { + Files.createDirectories(basePath); + } + this.basePath = basePath.normalize().toRealPath(); + this.fileLockManager = fileLockManager; + } + + private Path normalize(final String path) { + String nPath = path; + while (nPath.startsWith("/")) { + nPath = nPath.substring(1); + } + return Paths.get(nPath); + } + + private Path getAssetPath(String path) throws IOException { + Path assetPath = basePath.resolve(normalize(path)).normalize(); + if (!assetPath.startsWith(basePath)) + { + throw new IOException("Path navigation out of allowed scope: "+path); + } + return assetPath; + } + + @Override + public void consumeData(StorageAsset asset, Consumer consumerFunction, boolean readLock ) throws IOException + { + final Path path = asset.getFilePath(); + try { + if (readLock) { + consumeDataLocked( path, consumerFunction ); + } else + { + try ( InputStream is = Files.newInputStream( path ) ) + { + consumerFunction.accept( is ); + } + catch ( IOException e ) + { + log.error("Could not read the input stream from file {}", path); + throw e; + } + } + } catch (RuntimeException e) + { + log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() ); + throw new IOException( e ); + } + + } + + @Override + public void consumeDataFromChannel( StorageAsset asset, Consumer consumerFunction, boolean readLock ) throws IOException + { + final Path path = asset.getFilePath(); + try { + if (readLock) { + consumeDataFromChannelLocked( path, consumerFunction ); + } else + { + try ( FileChannel is = FileChannel.open( path, StandardOpenOption.READ ) ) + { + consumerFunction.accept( is ); + } + catch ( IOException e ) + { + log.error("Could not read the input stream from file {}", path); + throw e; + } + } + } catch (RuntimeException e) + { + log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() ); + throw new IOException( e ); + } + } + + @Override + public void writeData( StorageAsset asset, Consumer consumerFunction, boolean writeLock ) throws IOException + { + final Path path = asset.getFilePath(); + try { + if (writeLock) { + writeDataLocked( path, consumerFunction ); + } else + { + try ( OutputStream is = Files.newOutputStream( path ) ) + { + consumerFunction.accept( is ); + } + catch ( IOException e ) + { + log.error("Could not write the output stream to file {}", path); + throw e; + } + } + } catch (RuntimeException e) + { + log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() ); + throw new IOException( e ); + } + + } + + @Override + public void writeDataToChannel( StorageAsset asset, Consumer consumerFunction, boolean writeLock ) throws IOException + { + final Path path = asset.getFilePath(); + try { + if (writeLock) { + writeDataToChannelLocked( path, consumerFunction ); + } else + { + try ( FileChannel os = FileChannel.open( path, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE )) + { + consumerFunction.accept( os ); + } + catch ( IOException e ) + { + log.error("Could not write the data to file {}", path); + throw e; + } + } + } catch (RuntimeException e) + { + log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() ); + throw new IOException( e ); + } + } + + private void consumeDataLocked( Path file, Consumer consumerFunction) throws IOException + { + + final Lock lock; + try + { + lock = fileLockManager.readFileLock( file ); + try ( InputStream is = Files.newInputStream( lock.getFile())) + { + consumerFunction.accept( is ); + } + catch ( IOException e ) + { + log.error("Could not read the input stream from file {}", file); + throw e; + } finally + { + fileLockManager.release( lock ); + } + } + catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e) + { + log.error("Locking error on file {}", file); + throw new IOException(e); + } + } + + private void consumeDataFromChannelLocked( Path file, Consumer consumerFunction) throws IOException + { + + final Lock lock; + try + { + lock = fileLockManager.readFileLock( file ); + try ( FileChannel is = FileChannel.open( lock.getFile( ), StandardOpenOption.READ )) + { + consumerFunction.accept( is ); + } + catch ( IOException e ) + { + log.error("Could not read the input stream from file {}", file); + throw e; + } finally + { + fileLockManager.release( lock ); + } + } + catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e) + { + log.error("Locking error on file {}", file); + throw new IOException(e); + } + } + + + private void writeDataLocked( Path file, Consumer consumerFunction) throws IOException + { + + final Lock lock; + try + { + lock = fileLockManager.writeFileLock( file ); + try ( OutputStream is = Files.newOutputStream( lock.getFile())) + { + consumerFunction.accept( is ); + } + catch ( IOException e ) + { + log.error("Could not write the output stream to file {}", file); + throw e; + } finally + { + fileLockManager.release( lock ); + } + } + catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e) + { + log.error("Locking error on file {}", file); + throw new IOException(e); + } + } + + private void writeDataToChannelLocked( Path file, Consumer consumerFunction) throws IOException + { + + final Lock lock; + try + { + lock = fileLockManager.writeFileLock( file ); + try ( FileChannel is = FileChannel.open( lock.getFile( ), StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE )) + { + consumerFunction.accept( is ); + } + catch ( IOException e ) + { + log.error("Could not write to file {}", file); + throw e; + } finally + { + fileLockManager.release( lock ); + } + } + catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e) + { + log.error("Locking error on file {}", file); + throw new IOException(e); + } + } + + @Override + public StorageAsset getAsset( String path ) + { + try { + return new FilesystemAsset(this, path, getAssetPath(path)); + } catch (IOException e) { + throw new IllegalArgumentException("Path navigates outside of base directory "+path); + } + } + + @Override + public StorageAsset addAsset( String path, boolean container ) + { + try { + return new FilesystemAsset(this, path, getAssetPath(path), basePath, container); + } catch (IOException e) { + throw new IllegalArgumentException("Path navigates outside of base directory "+path); + } + } + + @Override + public void removeAsset( StorageAsset asset ) throws IOException + { + Files.delete(asset.getFilePath()); + } + + @Override + public StorageAsset moveAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException + { + boolean container = origin.isContainer(); + FilesystemAsset newAsset = new FilesystemAsset(this, destination, getAssetPath(destination), basePath, container ); + moveAsset( origin, newAsset, copyOptions ); + return newAsset; + } + + @Override + public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException + { + if (origin.getStorage()!=this) { + throw new IOException("The origin asset does not belong to this storage instance. Cannot copy between different storage instances."); + } + if (destination.getStorage()!=this) { + throw new IOException("The destination asset does not belong to this storage instance. Cannot copy between different storage instances."); + } + Files.move(origin.getFilePath(), destination.getFilePath(), copyOptions); + } + + @Override + public StorageAsset copyAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException + { + boolean container = origin.isContainer(); + FilesystemAsset newAsset = new FilesystemAsset(this, destination, getAssetPath(destination), basePath, container ); + copyAsset( origin, newAsset, copyOptions ); + return newAsset; + } + + @Override + public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException + { + if (origin.getStorage()!=this) { + throw new IOException("The origin asset does not belong to this storage instance. Cannot copy between different storage instances."); + } + if (destination.getStorage()!=this) { + throw new IOException("The destination asset does not belong to this storage instance. Cannot copy between different storage instances."); + } + Path destinationPath = destination.getFilePath(); + boolean overwrite = false; + for (int i=0; i + */ +public class StorageUtil +{ + private static final int DEFAULT_BUFFER_SIZE = 4096; + private static final Logger log = LoggerFactory.getLogger(StorageUtil.class); + + /** + * Copies the source asset to the target. The assets may be from different RepositoryStorage instances. + * If you know that source and asset are from the same storage instance, the copy method of the storage + * instance may be faster. + * + * @param source The source asset + * @param target The target asset + * @param locked If true, a readlock is set on the source and a write lock is set on the target. + * @param copyOptions Copy options + * @throws IOException + */ + public static final void copyAsset( final StorageAsset source, + final StorageAsset target, + boolean locked, + final CopyOption... copyOptions ) throws IOException + { + if (source.isFileBased() && target.isFileBased()) { + // Short cut for FS operations + final Path sourcePath = source.getFilePath(); + final Path targetPath = target.getFilePath( ); + if (locked) { + final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager(); + final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager(); + try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) ) + { + Files.copy( sourcePath, targetPath, copyOptions ); + } + catch ( FileLockException e ) + { + throw new IOException( e ); + } + catch ( FileLockTimeoutException e ) + { + throw new IOException( e ); + } + } else + { + Files.copy( sourcePath, targetPath, copyOptions ); + } + } else { + try { + final RepositoryStorage sourceStorage = source.getStorage(); + final RepositoryStorage targetStorage = target.getStorage(); + sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked); + } catch (IOException e) { + throw e; + } catch (Throwable e) { + Throwable cause = e.getCause(); + if (cause instanceof IOException) { + throw (IOException)cause; + } else + { + throw new IOException( e ); + } + } + } + } + + /** + * Moves a asset between different storage instances. + * If you know that source and asset are from the same storage instance, the move method of the storage + * instance may be faster. + * + * @param source The source asset + * @param target The target asset + * @param locked If true, a lock is used for the move operation. + * @param copyOptions Options for copying + * @throws IOException If the move fails + */ + public static final void moveAsset(StorageAsset source, StorageAsset target, boolean locked, CopyOption... copyOptions) throws IOException + { + if (source.isFileBased() && target.isFileBased()) { + // Short cut for FS operations + // Move is atomic operation + Files.move( source.getFilePath(), target.getFilePath(), copyOptions ); + } else { + try { + final RepositoryStorage sourceStorage = source.getStorage(); + final RepositoryStorage targetStorage = target.getStorage(); + sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked); + sourceStorage.removeAsset( source ); + } catch (IOException e) { + throw e; + } catch (Throwable e) { + Throwable cause = e.getCause(); + if (cause instanceof IOException) { + throw (IOException)cause; + } else + { + throw new IOException( e ); + } + } + } + + } + + private static final void wrapWriteFunction(ReadableByteChannel is, RepositoryStorage targetStorage, StorageAsset target, boolean locked) { + try { + targetStorage.writeDataToChannel( target, os -> copy(is, os), locked ); + } catch (Exception e) { + throw new RuntimeException( e ); + } + } + + + private static final void copy( final ReadableByteChannel is, final WritableByteChannel os ) { + if (is instanceof FileChannel) { + copy( (FileChannel) is, os ); + } else if (os instanceof FileChannel) { + copy(is, (FileChannel)os); + } else + { + try + { + ByteBuffer buffer = ByteBuffer.allocate( DEFAULT_BUFFER_SIZE ); + while ( is.read( buffer ) != -1 ) + { + buffer.flip( ); + while ( buffer.hasRemaining( ) ) + { + os.write( buffer ); + } + buffer.clear( ); + } + } + catch ( IOException e ) + { + throw new RuntimeException( e ); + } + } + } + + private static final void copy( final FileChannel is, final WritableByteChannel os ) { + try + { + is.transferTo( 0, is.size( ), os ); + } + catch ( IOException e ) + { + throw new RuntimeException( e ); + } + } + + private static final void copy( final ReadableByteChannel is, final FileChannel os ) { + try + { + os.transferFrom( is, 0, Long.MAX_VALUE ); + } + catch ( IOException e ) + { + throw new RuntimeException( e ); + } + } + + /** + * Runs the consumer function recursively on each asset found starting at the base path + * @param baseAsset The base path where to start search + * @param consumer The consumer function applied to each found asset + * @param depthFirst If true, the deepest elements are consumed first. + * @param maxDepth The maximum depth to recurse into. 0 means, only the baseAsset is consumed, 1 the base asset and its children and so forth. + */ + public static final void recurse(final StorageAsset baseAsset, final Consumer consumer, final boolean depthFirst, final int maxDepth) throws IOException { + recurse(baseAsset, consumer, depthFirst, maxDepth, 0); + } + + /** + * Runs the consumer function recursively on each asset found starting at the base path. The function descends into + * maximum depth. + * + * @param baseAsset The base path where to start search + * @param consumer The consumer function applied to each found asset + * @param depthFirst If true, the deepest elements are consumed first. + */ + public static final void recurse(final StorageAsset baseAsset, final Consumer consumer, final boolean depthFirst) throws IOException { + recurse(baseAsset, consumer, depthFirst, Integer.MAX_VALUE, 0); + } + + /** + * Runs the consumer function recursively on each asset found starting at the base path. It does not recurse with + * depth first and stops only if there are no more children available. + * + * @param baseAsset The base path where to start search + * @param consumer The consumer function applied to each found asset + */ + public static final void recurse(final StorageAsset baseAsset, final Consumer consumer) throws IOException { + recurse(baseAsset, consumer, false, Integer.MAX_VALUE, 0); + } + + private static final void recurse(final StorageAsset baseAsset, final Consumer consumer, final boolean depthFirst, final int maxDepth, final int currentDepth) + throws IOException { + if (!depthFirst) { + consumer.accept(baseAsset); + } + if (currentDepth { + try { + a.getStorage().removeAsset(a); + } catch (IOException e) { + log.error("Could not delete asset {}", a.getPath()); + } + },true); + } + + /** + * Returns the extension of the name of a given asset. Extension is the substring after the last occurence of '.' in the + * string. If no '.' is found, the empty string is returned. + * + * @param asset The asset from which to return the extension string. + * @return The extension. + */ + public static final String getExtension(StorageAsset asset) { + return StringUtils.substringAfterLast(asset.getName(),"."); + } + + public static final void copyToLocalFile(StorageAsset asset, Path destination, CopyOption... copyOptions) throws IOException { + if (asset.isFileBased()) { + Files.copy(asset.getFilePath(), destination, copyOptions); + } else { + try { + + HashSet openOptions = new HashSet<>(); + for (CopyOption option : copyOptions) { + if (option == StandardCopyOption.REPLACE_EXISTING) { + openOptions.add(StandardOpenOption.CREATE); + openOptions.add(StandardOpenOption.TRUNCATE_EXISTING); + openOptions.add(StandardOpenOption.WRITE); + } else { + openOptions.add(StandardOpenOption.WRITE); + openOptions.add(StandardOpenOption.CREATE_NEW); + } + } + asset.getStorage().consumeDataFromChannel(asset, channel -> { + try { + FileChannel.open(destination, openOptions).transferFrom(channel, 0, Long.MAX_VALUE); + } catch (IOException e) { + throw new RuntimeException(e); + } + }, false); + } catch (Throwable e) { + if (e.getCause() instanceof IOException) { + throw (IOException)e.getCause(); + } else { + throw new IOException(e); + } + } + } + } + + public static class PathInformation { + final Path path ; + final boolean tmpFile; + + PathInformation(Path path, boolean tmpFile) { + this.path = path; + this.tmpFile = tmpFile; + } + + public Path getPath() { + return path; + } + + public boolean isTmpFile() { + return tmpFile; + } + + } + + public static final PathInformation getAssetDataAsPath(StorageAsset asset) throws IOException { + if (!asset.exists()) { + throw new IOException("Asset does not exist"); + } + if (asset.isFileBased()) { + return new PathInformation(asset.getFilePath(), false); + } else { + Path tmpFile = Files.createTempFile(asset.getName(), getExtension(asset)); + copyToLocalFile(asset, tmpFile, StandardCopyOption.REPLACE_EXISTING); + return new PathInformation(tmpFile, true); + } + } + +} diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemAssetTest.java b/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemAssetTest.java new file mode 100644 index 000000000..566c0cb5d --- /dev/null +++ b/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemAssetTest.java @@ -0,0 +1,203 @@ +package org.apache.archiva.repository.storage; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.archiva.common.filelock.DefaultFileLockManager; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Instant; + +public class FilesystemAssetTest { + + Path assetPathFile; + Path assetPathDir; + FilesystemStorage filesystemStorage; + + @Before + public void init() throws IOException { + assetPathDir = Files.createTempDirectory("assetDir"); + assetPathFile = Files.createTempFile(assetPathDir,"assetFile", "dat"); + filesystemStorage = new FilesystemStorage(assetPathDir, new DefaultFileLockManager()); + } + + @After + public void cleanup() { + + try { + Files.deleteIfExists(assetPathFile); + } catch (IOException e) { + e.printStackTrace(); + } + FileUtils.deleteQuietly(assetPathDir.toFile()); + } + + + @Test + public void getPath() { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, assetPathFile.getFileName().toString(), assetPathFile); + Assert.assertEquals("/"+assetPathFile.getFileName().toString(), asset.getPath()); + } + + @Test + public void getName() { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/"+assetPathFile.getFileName().toString(), assetPathFile); + Assert.assertEquals(assetPathFile.getFileName().toString(), asset.getName()); + + } + + @Test + public void getModificationTime() throws IOException { + Instant modTime = Files.getLastModifiedTime(assetPathFile).toInstant(); + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test123", assetPathFile); + Assert.assertTrue(modTime.equals(asset.getModificationTime())); + } + + @Test + public void isContainer() { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1323", assetPathFile); + Assert.assertFalse(asset.isContainer()); + FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir); + Assert.assertTrue(asset2.isContainer()); + } + + @Test + public void list() throws IOException { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile); + Assert.assertEquals(0, asset.list().size()); + + FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1235", assetPathDir); + Assert.assertEquals(1, asset2.list().size()); + Path f1 = Files.createTempFile(assetPathDir, "testfile", "dat"); + Path f2 = Files.createTempFile(assetPathDir, "testfile", "dat"); + Path d1 = Files.createTempDirectory(assetPathDir, "testdir"); + Assert.assertEquals(4, asset2.list().size()); + Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f1.getFileName().toString()))); + Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f2.getFileName().toString()))); + Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(d1.getFileName().toString()))); + Files.deleteIfExists(f1); + Files.deleteIfExists(f2); + Files.deleteIfExists(d1); + + + } + + @Test + public void getSize() throws IOException { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile); + Assert.assertEquals(0, asset.getSize()); + + Files.write(assetPathFile, new String("abcdef").getBytes("ASCII")); + Assert.assertTrue(asset.getSize()>=6); + + + } + + @Test + public void getData() throws IOException { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile); + Files.write(assetPathFile, "abcdef".getBytes("ASCII")); + try(InputStream is = asset.getReadStream()) { + Assert.assertEquals("abcdef", IOUtils.toString(is, "ASCII")); + } + + } + + @Test + public void getDataExceptionOnDir() throws IOException { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir); + Files.write(assetPathFile, "abcdef".getBytes("ASCII")); + try { + InputStream is = asset.getReadStream(); + Assert.assertFalse("Exception expected for data on dir", true); + } catch (IOException e) { + // fine + } + + } + + @Test + public void writeData() throws IOException { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile); + Files.write(assetPathFile, "abcdef".getBytes("ASCII")); + try(OutputStream os = asset.getWriteStream(true)) { + IOUtils.write("test12345", os, "ASCII"); + } + Assert.assertEquals("test12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII")); + } + + @Test + public void writeDataAppend() throws IOException { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile); + Files.write(assetPathFile, "abcdef".getBytes("ASCII")); + try(OutputStream os = asset.getWriteStream(false)) { + IOUtils.write("test12345", os, "ASCII"); + } + Assert.assertEquals("abcdeftest12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII")); + } + + @Test + public void writeDataExceptionOnDir() throws IOException { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir); + try { + + OutputStream os = asset.getWriteStream(true); + Assert.assertTrue("Writing to a directory should throw a IOException", false); + } catch (IOException e) { + // Fine + } + } + + @Test + public void storeDataFile() throws IOException { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile); + Path dataFile = Files.createTempFile("testdata", "dat"); + try(OutputStream os = Files.newOutputStream(dataFile)) { + IOUtils.write("testkdkdkd", os, "ASCII"); + } + asset.replaceDataFromFile(dataFile); + Assert.assertEquals("testkdkdkd", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII")); + } + + @Test + public void exists() { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile); + Assert.assertTrue(asset.exists()); + FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1234", Paths.get("abcdefgkdkdk")); + Assert.assertFalse(asset2.exists()); + + } + + @Test + public void getFilePath() { + FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile); + Assert.assertEquals(assetPathFile, asset.getFilePath()); + } +} \ No newline at end of file diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemStorageTest.java b/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemStorageTest.java new file mode 100644 index 000000000..ebbc6a5a6 --- /dev/null +++ b/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemStorageTest.java @@ -0,0 +1,200 @@ +package org.apache.archiva.repository.storage; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.archiva.common.filelock.DefaultFileLockManager; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; + +import static org.junit.Assert.*; + +public class FilesystemStorageTest { + + private FilesystemStorage fsStorage; + private FilesystemAsset file1Asset; + private FilesystemAsset dir1Asset; + private Path baseDir; + private Path file1; + private Path dir1; + + @Before + public void init() throws IOException { + baseDir = Files.createTempDirectory("FsStorageTest"); + DefaultFileLockManager fl = new DefaultFileLockManager(); + fsStorage = new FilesystemStorage(baseDir,fl); + Files.createDirectories(baseDir.resolve("dir1")); + Files.createDirectories(baseDir.resolve("dir2")); + file1 = Files.createFile(baseDir.resolve("dir1/testfile1.dat")); + dir1 = Files.createDirectories(baseDir.resolve("dir1/testdir")); + file1Asset = new FilesystemAsset(fsStorage, "/dir1/testfile1.dat", file1); + dir1Asset = new FilesystemAsset(fsStorage, "/dir1/testdir", dir1); + } + + private class StringResult { + public String getData() { + return data; + } + + public void setData(String data) { + this.data = data; + } + + String data; + } + + + @After + public void cleanup() { + FileUtils.deleteQuietly(file1.toFile()); + FileUtils.deleteQuietly(dir1.toFile()); + FileUtils.deleteQuietly(baseDir.resolve("dir1").toFile()); + FileUtils.deleteQuietly(baseDir.resolve("dir2").toFile()); + FileUtils.deleteQuietly(baseDir.toFile()); + } + + + + + @Test + public void consumeData() throws IOException { + try(OutputStream os = Files.newOutputStream(file1)) { + IOUtils.write("abcdefghijkl", os, "ASCII"); + } + StringResult result = new StringResult(); + fsStorage.consumeData(file1Asset, is -> consume(is, result), false ); + Assert.assertEquals("abcdefghijkl" ,result.getData()); + } + + private void consume(InputStream is, StringResult result) { + try { + result.setData(IOUtils.toString(is, "ASCII")); + } catch (IOException e) { + e.printStackTrace(); + } + } + + + @Test + public void getAsset() { + StorageAsset asset = fsStorage.getAsset("/dir1/testfile1.dat"); + Assert.assertEquals(file1, asset.getFilePath()); + } + + @Test + public void addAsset() { + StorageAsset newAsset = fsStorage.addAsset("dir2/test", false); + Assert.assertNotNull(newAsset); + Assert.assertFalse(newAsset.isContainer()); + Assert.assertFalse(newAsset.exists()); + + StorageAsset newDirAsset = fsStorage.addAsset("/dir2/testdir2", true); + Assert.assertNotNull(newDirAsset); + Assert.assertTrue(newDirAsset.isContainer()); + Assert.assertFalse(newDirAsset.exists()); + } + + @Test + public void removeAsset() throws IOException { + Assert.assertTrue(Files.exists(file1)); + fsStorage.removeAsset(file1Asset); + Assert.assertFalse(Files.exists(file1)); + + Assert.assertTrue(Files.exists(dir1)); + fsStorage.removeAsset(dir1Asset); + Assert.assertFalse(Files.exists(dir1)); + } + + @Test + public void moveAsset() throws IOException { + Path newFile=null; + Path newDir=null; + try { + Assert.assertTrue(Files.exists(file1)); + try (OutputStream os = Files.newOutputStream(file1)) { + IOUtils.write("testakdkkdkdkdk", os, "ASCII"); + } + long fileSize = Files.size(file1); + fsStorage.moveAsset(file1Asset, "/dir2/testfile2.dat"); + Assert.assertFalse(Files.exists(file1)); + newFile = baseDir.resolve("dir2/testfile2.dat"); + Assert.assertTrue(Files.exists(newFile)); + Assert.assertEquals(fileSize, Files.size(newFile)); + + + Assert.assertTrue(Files.exists(dir1)); + newDir = baseDir.resolve("dir2/testdir2"); + fsStorage.moveAsset(dir1Asset, "dir2/testdir2"); + Assert.assertFalse(Files.exists(dir1)); + Assert.assertTrue(Files.exists(newDir)); + } finally { + if (newFile!=null) Files.deleteIfExists(newFile); + if (newDir!=null) Files.deleteIfExists(newDir); + } + } + + @Test + public void copyAsset() throws IOException { + Path newFile=null; + Path newDir=null; + try { + Assert.assertTrue(Files.exists(file1)); + try (OutputStream os = Files.newOutputStream(file1)) { + IOUtils.write("testakdkkdkdkdk", os, "ASCII"); + } + long fileSize = Files.size(file1); + fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat", StandardCopyOption.REPLACE_EXISTING); + Assert.assertTrue(Files.exists(file1)); + Assert.assertEquals(fileSize, Files.size(file1)); + newFile = baseDir.resolve("dir2/testfile2.dat"); + Assert.assertTrue(Files.exists(newFile)); + Assert.assertEquals(fileSize, Files.size(newFile)); + + try { + fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat"); + Assert.assertTrue("IOException should be thrown (File exists)", false); + } catch (IOException ex) { + Assert.assertTrue("Exception must contain 'file exists'", ex.getMessage().contains("file exists")); + } + + Assert.assertTrue(Files.exists(dir1)); + newDir = baseDir.resolve("dir2/testdir2"); + fsStorage.copyAsset(dir1Asset, "dir2/testdir2"); + Assert.assertTrue(Files.exists(dir1)); + Assert.assertTrue(Files.exists(newDir)); + } finally { + if (newFile!=null) Files.deleteIfExists(newFile); + if (newDir!=null) FileUtils.deleteQuietly(newDir.toFile()); + } + } +} \ No newline at end of file diff --git a/archiva-modules/archiva-base/pom.xml b/archiva-modules/archiva-base/pom.xml index a687a9564..a580e84a8 100644 --- a/archiva-modules/archiva-base/pom.xml +++ b/archiva-modules/archiva-base/pom.xml @@ -52,5 +52,7 @@ archiva-repository-scanner archiva-repository-admin archiva-security-common + archiva-storage-api + archiva-storage-fs diff --git a/archiva-modules/archiva-maven/archiva-maven-converter/src/main/java/org/apache/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java b/archiva-modules/archiva-maven/archiva-maven-converter/src/main/java/org/apache/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java index 25a3f9d64..93cc6ce81 100644 --- a/archiva-modules/archiva-maven/archiva-maven-converter/src/main/java/org/apache/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java +++ b/archiva-modules/archiva-maven/archiva-maven-converter/src/main/java/org/apache/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java @@ -28,12 +28,11 @@ import org.apache.archiva.consumers.InvalidRepositoryContentConsumer; import org.apache.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.archiva.converter.RepositoryConversionException; import org.apache.archiva.repository.BasicManagedRepository; -import org.apache.archiva.repository.content.FilesystemStorage; +import org.apache.archiva.repository.storage.FilesystemStorage; import org.apache.archiva.repository.content.maven2.ManagedDefaultRepositoryContent; import org.apache.archiva.repository.scanner.RepositoryScanner; import org.apache.archiva.repository.scanner.RepositoryScannerException; import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.repository.ArtifactRepositoryFactory; import org.apache.maven.artifact.repository.MavenArtifactRepository; import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; import org.springframework.stereotype.Service; diff --git a/archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexContext.java b/archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexContext.java index 47de4b72c..7f18ad185 100644 --- a/archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexContext.java +++ b/archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexContext.java @@ -19,14 +19,21 @@ package org.apache.archiva.indexer.maven; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; +import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.repository.Repository; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.maven.index.context.IndexingContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.nio.file.NoSuchFileException; +import java.nio.file.Path; import java.sql.Date; import java.time.ZonedDateTime; import java.util.Set; @@ -36,8 +43,11 @@ import java.util.Set; */ public class MavenIndexContext implements ArchivaIndexingContext { + private static final Logger log = LoggerFactory.getLogger(ArchivaIndexingContext.class); + private IndexingContext delegate; private Repository repository; + private StorageAsset dir = null; protected MavenIndexContext(Repository repository, IndexingContext delegate) { this.delegate = delegate; @@ -56,8 +66,23 @@ public class MavenIndexContext implements ArchivaIndexingContext { } @Override - public URI getPath() { - return delegate.getIndexDirectoryFile().toURI(); + public StorageAsset getPath() { + if (dir==null) { + StorageAsset repositoryDirAsset = repository.getAsset(""); + Path repositoryDir = repositoryDirAsset.getFilePath().toAbsolutePath(); + Path indexDir = delegate.getIndexDirectoryFile().toPath(); + if (indexDir.startsWith(repositoryDir)) { + dir = repository.getAsset(repositoryDir.relativize(indexDir).toString()); + } else { + try { + FilesystemStorage storage = new FilesystemStorage(indexDir, new DefaultFileLockManager()); + dir = storage.getAsset(""); + } catch (IOException e) { + log.error("Error occured while creating storage for index dir"); + } + } + } + return dir; } @Override diff --git a/archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexManager.java b/archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexManager.java index d04911206..a621a5bc8 100644 --- a/archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexManager.java +++ b/archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexManager.java @@ -19,7 +19,6 @@ package org.apache.archiva.indexer.maven; * under the License. */ -import org.apache.archiva.admin.model.RepositoryAdminException; import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.configuration.ArchivaConfiguration; @@ -28,8 +27,6 @@ import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.indexer.IndexCreationFailedException; import org.apache.archiva.indexer.IndexUpdateFailedException; import org.apache.archiva.indexer.UnsupportedBaseContextException; -import org.apache.archiva.indexer.merger.IndexMergerException; -import org.apache.archiva.indexer.merger.TemporaryGroupIndex; import org.apache.archiva.proxy.ProxyRegistry; import org.apache.archiva.proxy.maven.WagonFactory; import org.apache.archiva.proxy.maven.WagonFactoryException; @@ -42,10 +39,12 @@ import org.apache.archiva.repository.RemoteRepository; import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.UnsupportedRepositoryTypeException; -import org.apache.archiva.repository.content.FilesystemAsset; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.RepositoryStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.RemoteIndexFeature; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.commons.lang.StringUtils; import org.apache.maven.index.ArtifactContext; import org.apache.maven.index.ArtifactContextProducer; @@ -142,7 +141,7 @@ public class MavenIndexManager implements ArchivaIndexManager { private ProxyRegistry proxyRegistry; - private ConcurrentSkipListSet activeContexts = new ConcurrentSkipListSet<>( ); + private ConcurrentSkipListSet activeContexts = new ConcurrentSkipListSet<>( ); private static final int WAIT_TIME = 100; private static final int MAX_WAIT = 10; @@ -158,9 +157,9 @@ public class MavenIndexManager implements ArchivaIndexManager { return context.getBaseContext( IndexingContext.class ); } - private Path getIndexPath( ArchivaIndexingContext ctx ) + private StorageAsset getIndexPath( ArchivaIndexingContext ctx ) { - return PathUtil.getPathFromUri( ctx.getPath( ) ); + return ctx.getPath( ); } @FunctionalInterface @@ -185,7 +184,7 @@ public class MavenIndexManager implements ArchivaIndexManager { { throw new IndexUpdateFailedException( "Maven index is not supported by this context", e ); } - final Path ctxPath = getIndexPath( context ); + final StorageAsset ctxPath = getIndexPath( context ); int loop = MAX_WAIT; boolean active = false; while ( loop-- > 0 && !active ) @@ -395,9 +394,9 @@ public class MavenIndexManager implements ArchivaIndexManager { @Override public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection artifactReference ) throws IndexUpdateFailedException { - final URI ctxUri = context.getPath(); + final StorageAsset ctxUri = context.getPath(); executeUpdateFunction(context, indexingContext -> { - Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList()); + Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList()); try { indexer.addArtifactsToIndex(artifacts, indexingContext); } catch (IOException e) { @@ -411,9 +410,9 @@ public class MavenIndexManager implements ArchivaIndexManager { @Override public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection artifactReference ) throws IndexUpdateFailedException { - final URI ctxUri = context.getPath(); + final StorageAsset ctxUri = context.getPath(); executeUpdateFunction(context, indexingContext -> { - Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList()); + Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList()); try { indexer.deleteArtifactsFromIndex(artifacts, indexingContext); } catch (IOException e) { @@ -457,9 +456,8 @@ public class MavenIndexManager implements ArchivaIndexManager { throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e ); } - MavenIndexContext context = new MavenIndexContext( repository, mvnCtx ); - return context; + return new MavenIndexContext( repository, mvnCtx ); } @Override @@ -472,7 +470,7 @@ public class MavenIndexManager implements ArchivaIndexManager { log.warn("Index close failed"); } try { - FileUtils.deleteDirectory(Paths.get(context.getPath())); + StorageUtil.deleteRecursively(context.getPath()); } catch (IOException e) { throw new IndexUpdateFailedException("Could not delete index files"); } @@ -593,51 +591,57 @@ public class MavenIndexManager implements ArchivaIndexManager { } } - private StorageAsset getIndexPath(URI indexDir, Path repoDir, String defaultDir) throws IOException + private StorageAsset getIndexPath(URI indexDirUri, RepositoryStorage storage, String defaultDir) throws IOException { - String indexPath = indexDir.getPath(); - Path indexDirectory = null; - if ( ! StringUtils.isEmpty(indexDir.toString( ) ) ) + Path indexDirectory; + Path repositoryPath = storage.getAsset("").getFilePath().toAbsolutePath(); + StorageAsset indexDir; + if ( ! StringUtils.isEmpty(indexDirUri.toString( ) ) ) { - indexDirectory = PathUtil.getPathFromUri( indexDir ); + indexDirectory = PathUtil.getPathFromUri( indexDirUri ); // not absolute so create it in repository directory - if ( indexDirectory.isAbsolute( ) ) + if ( indexDirectory.isAbsolute( ) && !indexDirectory.startsWith(repositoryPath)) { - indexPath = indexDirectory.getFileName().toString(); + if (storage instanceof FilesystemStorage) { + FilesystemStorage fsStorage = (FilesystemStorage) storage; + FilesystemStorage indexStorage = new FilesystemStorage(indexDirectory.getParent(), fsStorage.getFileLockManager()); + indexDir = indexStorage.getAsset(indexDirectory.getFileName().toString()); + } else { + throw new IOException("The given storage is not file based."); + } + } else if (indexDirectory.isAbsolute()) { + indexDir = storage.getAsset(repositoryPath.relativize(indexDirectory).toString()); } else { - indexDirectory = repoDir.resolve( indexDirectory ); + indexDir = storage.getAsset(indexDirectory.toString()); } } else { - indexDirectory = repoDir.resolve( defaultDir ); - indexPath = defaultDir; + indexDir = storage.getAsset( defaultDir ); } - if ( !Files.exists( indexDirectory ) ) + if ( !indexDir.exists() ) { - Files.createDirectories( indexDirectory ); + indexDir.create(); } - return new FilesystemAsset( indexPath, indexDirectory); + return indexDir; } private StorageAsset getIndexPath( Repository repo) throws IOException { IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get(); - return getIndexPath( icf.getIndexPath(), repo.getAsset( "" ).getFilePath(), DEFAULT_INDEX_PATH); + return getIndexPath( icf.getIndexPath(), repo, DEFAULT_INDEX_PATH); } private StorageAsset getPackedIndexPath(Repository repo) throws IOException { IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get(); - return getIndexPath(icf.getPackedIndexPath(), repo.getAsset( "" ).getFilePath(), DEFAULT_PACKED_INDEX_PATH); + return getIndexPath(icf.getPackedIndexPath(), repo, DEFAULT_PACKED_INDEX_PATH); } private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException { - Path appServerBase = archivaConfiguration.getAppServerBaseDir( ); - String contextKey = "remote-" + remoteRepository.getId( ); @@ -648,7 +652,7 @@ public class MavenIndexManager implements ArchivaIndexManager { Files.createDirectories( repoDir ); } - StorageAsset indexDirectory = null; + StorageAsset indexDirectory; // is there configured indexDirectory ? if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) ) @@ -715,7 +719,7 @@ public class MavenIndexManager implements ArchivaIndexManager { } } - StorageAsset indexDirectory = null; + StorageAsset indexDirectory; if ( repository.supportsFeature( IndexCreationFeature.class ) ) { @@ -837,23 +841,18 @@ public class MavenIndexManager implements ArchivaIndexManager { } @Override - public void connect( String id, String url ) - throws IOException - { + public void connect( String id, String url ) { //no op } @Override - public void disconnect( ) - throws IOException - { + public void disconnect( ) { // no op } @Override public InputStream retrieve( String name ) - throws IOException, FileNotFoundException - { + throws IOException { try { log.info( "index update retrieve file, name:{}", name ); diff --git a/archiva-modules/archiva-maven/archiva-maven-indexer/src/test/java/org/apache/archiva/indexer/maven/MavenIndexManagerTest.java b/archiva-modules/archiva-maven/archiva-maven-indexer/src/test/java/org/apache/archiva/indexer/maven/MavenIndexManagerTest.java index 2698575db..5f73f27eb 100644 --- a/archiva-modules/archiva-maven/archiva-maven-indexer/src/test/java/org/apache/archiva/indexer/maven/MavenIndexManagerTest.java +++ b/archiva-modules/archiva-maven/archiva-maven-indexer/src/test/java/org/apache/archiva/indexer/maven/MavenIndexManagerTest.java @@ -208,7 +208,7 @@ public class MavenIndexManagerTest { assertNotNull(ctx); assertEquals(repository, ctx.getRepository()); assertEquals("test-repo", ctx.getId()); - assertEquals(indexPath.toAbsolutePath(), Paths.get(ctx.getPath()).toAbsolutePath()); + assertEquals(indexPath.toAbsolutePath(), ctx.getPath().getFilePath().toAbsolutePath()); assertTrue(Files.exists(indexPath)); List li = Files.list(indexPath).collect(Collectors.toList()); assertTrue(li.size()>0); diff --git a/archiva-modules/archiva-maven/archiva-maven-metadata/pom.xml b/archiva-modules/archiva-maven/archiva-maven-metadata/pom.xml index aca1c7d4b..3448fe001 100644 --- a/archiva-modules/archiva-maven/archiva-maven-metadata/pom.xml +++ b/archiva-modules/archiva-maven/archiva-maven-metadata/pom.xml @@ -39,6 +39,10 @@ org.apache.archiva archiva-model + + org.apache.archiva + archiva-storage-api + org.apache.archiva archiva-xml-tools diff --git a/archiva-modules/archiva-maven/archiva-maven-metadata/src/main/java/org/apache/archiva/maven2/metadata/MavenMetadataReader.java b/archiva-modules/archiva-maven/archiva-maven-metadata/src/main/java/org/apache/archiva/maven2/metadata/MavenMetadataReader.java index 023b8eaa2..2c81bd994 100644 --- a/archiva-modules/archiva-maven/archiva-maven-metadata/src/main/java/org/apache/archiva/maven2/metadata/MavenMetadataReader.java +++ b/archiva-modules/archiva-maven/archiva-maven-metadata/src/main/java/org/apache/archiva/maven2/metadata/MavenMetadataReader.java @@ -21,6 +21,7 @@ package org.apache.archiva.maven2.metadata; import org.apache.archiva.model.ArchivaRepositoryMetadata; import org.apache.archiva.model.Plugin; import org.apache.archiva.model.SnapshotVersion; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.xml.XMLException; import org.apache.archiva.xml.XMLReader; import org.apache.commons.lang.math.NumberUtils; @@ -64,6 +65,14 @@ public class MavenMetadataReader private static final Logger log = LoggerFactory.getLogger( MavenMetadataReader.class ); + public static ArchivaRepositoryMetadata read(StorageAsset metadataFile) throws XMLException, IOException { + if (metadataFile.isFileBased()) { + return read(metadataFile.getFilePath()); + } else { + throw new IOException("StorageAsset is not file based"); + } + } + /** * Read and return the {@link org.apache.archiva.model.ArchivaRepositoryMetadata} object from the provided xml file. * @@ -72,8 +81,7 @@ public class MavenMetadataReader * @throws XMLException */ public static ArchivaRepositoryMetadata read( Path metadataFile ) - throws XMLException - { + throws XMLException, IOException { XMLReader xml = new XMLReader( "metadata", metadataFile ); // invoke this to remove namespaces, see MRM-1136 @@ -85,25 +93,9 @@ public class MavenMetadataReader metadata.setArtifactId( xml.getElementText( "//metadata/artifactId" ) ); metadata.setVersion( xml.getElementText( "//metadata/version" ) ); Date modTime; - try - { - modTime = new Date(Files.getLastModifiedTime( metadataFile ).toMillis( )); - } - catch ( IOException e ) - { - modTime = new Date(); - log.error("Could not read modification time of {}", metadataFile); - } + modTime = new Date(Files.getLastModifiedTime(metadataFile).toMillis()); metadata.setFileLastModified( modTime ); - try - { - metadata.setFileSize( Files.size( metadataFile ) ); - } - catch ( IOException e ) - { - metadata.setFileSize( 0 ); - log.error("Could not read file size of {}", metadataFile); - } + metadata.setFileSize( Files.size(metadataFile) ); metadata.setLastUpdated( xml.getElementText( "//metadata/versioning/lastUpdated" ) ); metadata.setLatestVersion( xml.getElementText( "//metadata/versioning/latest" ) ); diff --git a/archiva-modules/archiva-maven/archiva-maven-proxy/src/main/java/org/apache/archiva/proxy/maven/MavenRepositoryProxyHandler.java b/archiva-modules/archiva-maven/archiva-maven-proxy/src/main/java/org/apache/archiva/proxy/maven/MavenRepositoryProxyHandler.java index 9c383c37d..7119c9613 100644 --- a/archiva-modules/archiva-maven/archiva-maven-proxy/src/main/java/org/apache/archiva/proxy/maven/MavenRepositoryProxyHandler.java +++ b/archiva-modules/archiva-maven/archiva-maven-proxy/src/main/java/org/apache/archiva/proxy/maven/MavenRepositoryProxyHandler.java @@ -28,7 +28,7 @@ import org.apache.archiva.proxy.ProxyException; import org.apache.archiva.proxy.model.NetworkProxy; import org.apache.archiva.proxy.model.ProxyConnector; import org.apache.archiva.repository.*; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import org.apache.maven.wagon.ConnectionException; import org.apache.maven.wagon.ResourceDoesNotExistException; @@ -113,7 +113,7 @@ public class MavenRepositoryProxyHandler extends DefaultRepositoryProxyHandler { * @throws NotModifiedException */ protected void transferResources( ProxyConnector connector, RemoteRepositoryContent remoteRepository, - Path tmpResource, Path[] checksumFiles, String url, String remotePath, StorageAsset resource, + StorageAsset tmpResource, StorageAsset[] checksumFiles, String url, String remotePath, StorageAsset resource, Path workingDirectory, ManagedRepositoryContent repository ) throws ProxyException, NotModifiedException { Wagon wagon = null; @@ -153,9 +153,9 @@ public class MavenRepositoryProxyHandler extends DefaultRepositoryProxyHandler { // to // save on connections since md5 is rarely used for (int i=0; i listRootNamespaces(String repoId, Filter filter) throws RepositoryStorageRuntimeException { - Path dir = getRepositoryBasedir(repoId); + StorageAsset dir = getRepositoryBasedir(repoId); return getSortedFiles(dir, filter); } - private static Collection getSortedFiles(Path dir, Filter filter) { + private static Collection getSortedFiles(StorageAsset dir, Filter filter) { - try (Stream stream = Files.list(dir)) { - final Predicate dFilter = new DirectoryFilter(filter); - return stream.filter(Files::isDirectory) + final Predicate dFilter = new DirectoryFilter(filter); + return dir.list().stream().filter(f -> f.isContainer()) .filter(dFilter) - .map(path -> path.getFileName().toString()) + .map(path -> path.getName().toString()) .sorted().collect(Collectors.toList()); - } catch (IOException e) { - LOGGER.error("Could not read directory list {}: {}", dir, e.getMessage(), e); - return Collections.emptyList(); - } } - private Path getRepositoryBasedir(String repoId) + private StorageAsset getRepositoryBasedir(String repoId) throws RepositoryStorageRuntimeException { ManagedRepository repositoryConfiguration = repositoryRegistry.getManagedRepository(repoId); - return Paths.get(repositoryConfiguration.getLocation()); + return repositoryConfiguration.getAsset(""); } @Override public Collection listNamespaces(String repoId, String namespace, Filter filter) throws RepositoryStorageRuntimeException { - Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace); - if (!(Files.exists(dir) && Files.isDirectory(dir))) { + StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace); + if (!(dir.exists()) && !dir.isContainer()) { return Collections.emptyList(); } // scan all the directories which are potential namespaces. Any directories known to be projects are excluded - Predicate dFilter = new DirectoryFilter(filter); - try (Stream stream = Files.list(dir)) { - return stream.filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getFileName().toString()) + Predicate dFilter = new DirectoryFilter(filter); + return dir.list().stream().filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getName().toString()) .sorted().collect(Collectors.toList()); - } catch (IOException e) { - LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e); - return Collections.emptyList(); - } } @Override public Collection listProjects(String repoId, String namespace, Filter filter) throws RepositoryStorageRuntimeException { - Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace); - if (!(Files.exists(dir) && Files.isDirectory(dir))) { + StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace); + if (!(dir.exists() && dir.isContainer())) { return Collections.emptyList(); } // scan all directories in the namespace, and only include those that are known to be projects - final Predicate dFilter = new DirectoryFilter(filter); - try (Stream stream = Files.list(dir)) { - return stream.filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getFileName().toString()) + final Predicate dFilter = new DirectoryFilter(filter); + return dir.list().stream().filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getName().toString()) .sorted().collect(Collectors.toList()); - } catch (IOException e) { - LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e); - return Collections.emptyList(); - } } @@ -528,8 +511,8 @@ public class Maven2RepositoryStorage public Collection listProjectVersions(String repoId, String namespace, String projectId, Filter filter) throws RepositoryStorageRuntimeException { - Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace, projectId); - if (!(Files.exists(dir) && Files.isDirectory(dir))) { + StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace, projectId); + if (!(dir.exists() && dir.isContainer())) { return Collections.emptyList(); } @@ -540,18 +523,17 @@ public class Maven2RepositoryStorage @Override public Collection readArtifactsMetadata(ReadMetadataRequest readMetadataRequest) throws RepositoryStorageRuntimeException { - Path dir = pathTranslator.toFile(getRepositoryBasedir(readMetadataRequest.getRepositoryId()), + StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(readMetadataRequest.getRepositoryId()), readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion()); - if (!(Files.exists(dir) && Files.isDirectory(dir))) { + if (!(dir.exists() && dir.isContainer())) { return Collections.emptyList(); } // all files that are not metadata and not a checksum / signature are considered artifacts - final Predicate dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter()); - try (Stream stream = Files.list(dir)) { + final Predicate dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter()); // Returns a map TRUE -> (success values), FALSE -> (Exceptions) - Map>> result = stream.filter(dFilter).map(path -> { + Map>> result = dir.list().stream().filter(dFilter).map(path -> { try { return Try.success(getArtifactFromFile(readMetadataRequest.getRepositoryId(), readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion(), @@ -573,10 +555,6 @@ public class Maven2RepositoryStorage } return result.get(Boolean.TRUE).stream().map(tr -> tr.get()).collect(Collectors.toList()); } - } catch (IOException e) { - LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e); - } - return Collections.emptyList(); } @@ -595,9 +573,9 @@ public class Maven2RepositoryStorage } private ArtifactMetadata getArtifactFromFile(String repoId, String namespace, String projectId, - String projectVersion, Path file) throws IOException { + String projectVersion, StorageAsset file) throws IOException { ArtifactMetadata metadata = - pathTranslator.getArtifactFromId(repoId, namespace, projectId, projectVersion, file.getFileName().toString()); + pathTranslator.getArtifactFromId(repoId, namespace, projectId, projectVersion, file.getName()); populateArtifactMetadataFromFile(metadata, file); @@ -629,17 +607,17 @@ public class Maven2RepositoryStorage proxyHandler.fetchFromProxies(managedRepository, pomReference); // Open and read the POM from the managed repo - Path pom = managedRepository.toFile(pomReference); + StorageAsset pom = managedRepository.toFile(pomReference); - if (!Files.exists(pom)) { + if (!pom.exists()) { return; } try { // MavenXpp3Reader leaves the file open, so we need to close it ourselves. - Model model = null; - try (Reader reader = Files.newBufferedReader(pom, Charset.defaultCharset())) { + Model model; + try (Reader reader = Channels.newReader(pom.getReadChannel(), Charset.defaultCharset().name())) { model = MAVEN_XPP_3_READER.read(reader); } @@ -708,7 +686,7 @@ public class Maven2RepositoryStorage @Override public String getFilePathWithVersion(final String requestPath, ManagedRepositoryContent managedRepositoryContent) - throws XMLException, RelocationException { + throws RelocationException, XMLException, IOException { if (StringUtils.endsWith(requestPath, METADATA_FILENAME)) { return getFilePath(requestPath, managedRepositoryContent.getRepository()); @@ -725,12 +703,12 @@ public class Maven2RepositoryStorage if (StringUtils.endsWith(artifactReference.getVersion(), VersionUtil.SNAPSHOT)) { // read maven metadata to get last timestamp - Path metadataDir = Paths.get(managedRepositoryContent.getRepoRoot(), filePath).getParent(); - if (!Files.exists(metadataDir)) { + StorageAsset metadataDir = managedRepositoryContent.getRepository().getAsset( filePath).getParent(); + if (!metadataDir.exists()) { return filePath; } - Path metadataFile = metadataDir.resolve(METADATA_FILENAME); - if (!Files.exists(metadataFile)) { + StorageAsset metadataFile = metadataDir.resolve(METADATA_FILENAME); + if (!metadataFile.exists()) { return filePath; } ArchivaRepositoryMetadata archivaRepositoryMetadata = MavenMetadataReader.read(metadataFile); @@ -788,10 +766,10 @@ public class Maven2RepositoryStorage return joinedString; } - private static void populateArtifactMetadataFromFile(ArtifactMetadata metadata, Path file) throws IOException { + private static void populateArtifactMetadataFromFile(ArtifactMetadata metadata, StorageAsset file) throws IOException { metadata.setWhenGathered(new Date()); - metadata.setFileLastModified(Files.getLastModifiedTime(file).toMillis()); - ChecksummedFile checksummedFile = new ChecksummedFile(file); + metadata.setFileLastModified(file.getModificationTime().toEpochMilli()); + ChecksummedFile checksummedFile = new ChecksummedFile(file.getFilePath()); try { metadata.setMd5(checksummedFile.calculateChecksum(ChecksumAlgorithm.MD5)); } catch (IOException e) { @@ -802,52 +780,43 @@ public class Maven2RepositoryStorage } catch (IOException e) { LOGGER.error("Unable to checksum file {}: {},SHA1", file, e.getMessage()); } - metadata.setSize(Files.size(file)); + metadata.setSize(file.getSize()); } - private boolean isProject(Path dir, Filter filter) { + private boolean isProject(StorageAsset dir, Filter filter) { // scan directories for a valid project version subdirectory, meaning this must be a project directory - final Predicate dFilter = new DirectoryFilter(filter); - try (Stream stream = Files.list(dir)) { - boolean projFound = stream.filter(dFilter) + final Predicate dFilter = new DirectoryFilter(filter); + boolean projFound = dir.list().stream().filter(dFilter) .anyMatch(path -> isProjectVersion(path)); if (projFound) { return true; } - } catch (IOException e) { - LOGGER.error("Could not read directory list {}: {}", dir, e.getMessage(), e); - } // if a metadata file is present, check if this is the "artifactId" directory, marking it as a project ArchivaRepositoryMetadata metadata = readMetadata(dir); - if (metadata != null && dir.getFileName().toString().equals(metadata.getArtifactId())) { + if (metadata != null && dir.getName().toString().equals(metadata.getArtifactId())) { return true; } return false; } - private boolean isProjectVersion(Path dir) { - final String artifactId = dir.getParent().getFileName().toString(); - final String projectVersion = dir.getFileName().toString(); + private boolean isProjectVersion(StorageAsset dir) { + final String artifactId = dir.getParent().getName(); + final String projectVersion = dir.getName(); // check if there is a POM artifact file to ensure it is a version directory - Predicate filter; + Predicate filter; if (VersionUtil.isSnapshot(projectVersion)) { filter = new PomFilenameFilter(artifactId, projectVersion); } else { final String pomFile = artifactId + "-" + projectVersion + ".pom"; filter = new PomFileFilter(pomFile); } - try (Stream stream = Files.list(dir)) { - if (stream.filter(Files::isRegularFile).anyMatch(filter)) { + if (dir.list().stream().filter(f -> !f.isContainer()).anyMatch(filter)) { return true; } - } catch (IOException e) { - LOGGER.error("Could not list directory {}: {}", dir, e.getMessage(), e); - } - // if a metadata file is present, check if this is the "version" directory, marking it as a project version ArchivaRepositoryMetadata metadata = readMetadata(dir); if (metadata != null && projectVersion.equals(metadata.getVersion())) { @@ -857,13 +826,13 @@ public class Maven2RepositoryStorage return false; } - private ArchivaRepositoryMetadata readMetadata(Path directory) { + private ArchivaRepositoryMetadata readMetadata(StorageAsset directory) { ArchivaRepositoryMetadata metadata = null; - Path metadataFile = directory.resolve(METADATA_FILENAME); - if (Files.exists(metadataFile)) { + StorageAsset metadataFile = directory.resolve(METADATA_FILENAME); + if (metadataFile.exists()) { try { metadata = MavenMetadataReader.read(metadataFile); - } catch (XMLException e) { + } catch (XMLException | IOException e) { // ignore missing or invalid metadata } } @@ -871,7 +840,7 @@ public class Maven2RepositoryStorage } private static class DirectoryFilter - implements Predicate { + implements Predicate { private final Filter filter; public DirectoryFilter(Filter filter) { @@ -879,13 +848,13 @@ public class Maven2RepositoryStorage } @Override - public boolean test(Path dir) { - final String name = dir.getFileName().toString(); + public boolean test(StorageAsset dir) { + final String name = dir.getName(); if (!filter.accept(name)) { return false; } else if (name.startsWith(".")) { return false; - } else if (!Files.isDirectory(dir)) { + } else if (!dir.isContainer()) { return false; } return true; @@ -893,7 +862,7 @@ public class Maven2RepositoryStorage } private static class ArtifactDirectoryFilter - implements Predicate { + implements Predicate { private final Filter filter; private ArtifactDirectoryFilter(Filter filter) { @@ -901,8 +870,8 @@ public class Maven2RepositoryStorage } @Override - public boolean test(Path dir) { - final String name = dir.getFileName().toString(); + public boolean test(StorageAsset dir) { + final String name = dir.getName().toString(); // TODO compare to logic in maven-repository-layer if (!filter.accept(name)) { return false; @@ -912,7 +881,7 @@ public class Maven2RepositoryStorage return false; } else if (Arrays.binarySearch(IGNORED_FILES, name) >= 0) { return false; - } else if (Files.isDirectory(dir)) { + } else if (dir.isContainer()) { return false; } // some files from remote repositories can have name like maven-metadata-archiva-vm-all-public.xml @@ -927,7 +896,7 @@ public class Maven2RepositoryStorage private static final class PomFilenameFilter - implements Predicate { + implements Predicate { private final String artifactId, projectVersion; @@ -937,8 +906,8 @@ public class Maven2RepositoryStorage } @Override - public boolean test(Path dir) { - final String name = dir.getFileName().toString(); + public boolean test(StorageAsset dir) { + final String name = dir.getName(); if (name.startsWith(artifactId + "-") && name.endsWith(".pom")) { String v = name.substring(artifactId.length() + 1, name.length() - 4); v = VersionUtil.getBaseVersion(v); @@ -952,7 +921,7 @@ public class Maven2RepositoryStorage } private static class PomFileFilter - implements Predicate { + implements Predicate { private final String pomFile; private PomFileFilter(String pomFile) { @@ -960,8 +929,8 @@ public class Maven2RepositoryStorage } @Override - public boolean test(Path dir) { - return pomFile.equals(dir.getFileName().toString()); + public boolean test(StorageAsset dir) { + return pomFile.equals(dir.getName()); } } diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/RepositoryModelResolver.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/RepositoryModelResolver.java index f978d554f..b03beed9f 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/RepositoryModelResolver.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/RepositoryModelResolver.java @@ -32,6 +32,7 @@ import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.RemoteRepository; import org.apache.archiva.repository.RepositoryCredentials; import org.apache.archiva.repository.maven2.MavenSystemManager; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.xml.XMLException; import org.apache.commons.lang.StringUtils; import org.apache.http.auth.UsernamePasswordCredentials; @@ -78,7 +79,7 @@ public class RepositoryModelResolver private RepositorySystemSession session; private VersionRangeResolver versionRangeResolver; - private Path basedir; + private StorageAsset basedir; private RepositoryPathTranslator pathTranslator; @@ -98,7 +99,7 @@ public class RepositoryModelResolver private ManagedRepository managedRepository; - public RepositoryModelResolver( Path basedir, RepositoryPathTranslator pathTranslator ) + public RepositoryModelResolver(StorageAsset basedir, RepositoryPathTranslator pathTranslator ) { this.basedir = basedir; @@ -110,7 +111,7 @@ public class RepositoryModelResolver Map networkProxiesMap, ManagedRepository targetRepository, MavenSystemManager mavenSystemManager) { - this( Paths.get( managedRepository.getLocation() ), pathTranslator ); + this( managedRepository.getAsset(""), pathTranslator ); this.managedRepository = managedRepository; @@ -138,9 +139,9 @@ public class RepositoryModelResolver String filename = artifactId + "-" + version + ".pom"; // TODO: we need to convert 1.0-20091120.112233-1 type paths to baseVersion for the below call - add a test - Path model = pathTranslator.toFile( basedir, groupId, artifactId, version, filename ); + StorageAsset model = pathTranslator.toFile( basedir, groupId, artifactId, version, filename ); - if ( !Files.exists(model) ) + if ( !model.exists() ) { /** * @@ -161,10 +162,10 @@ public class RepositoryModelResolver try { boolean success = getModelFromProxy( remoteRepository, groupId, artifactId, version, filename ); - if ( success && Files.exists(model) ) + if ( success && model.exists() ) { log.info( "Model '{}' successfully retrieved from remote repository '{}'", - model.toAbsolutePath(), remoteRepository.getId() ); + model.getPath(), remoteRepository.getId() ); break; } } @@ -172,20 +173,20 @@ public class RepositoryModelResolver { log.info( "An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}", - model.toAbsolutePath(), remoteRepository.getId(), e.getMessage() ); + model.getPath(), remoteRepository.getId(), e.getMessage() ); } catch ( Exception e ) { log.warn( "An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}", - model.toAbsolutePath(), remoteRepository.getId(), e.getMessage() ); + model.getPath(), remoteRepository.getId(), e.getMessage() ); continue; } } } - return new FileModelSource( model.toFile() ); + return new FileModelSource( model.getFilePath().toFile() ); } public ModelSource resolveModel(Parent parent) throws UnresolvableModelException { @@ -249,15 +250,15 @@ public class RepositoryModelResolver log.debug( "use snapshot path {} for maven coordinate {}:{}:{}", snapshotPath, groupId, artifactId, version ); - Path model = basedir.resolve( snapshotPath ); + StorageAsset model = basedir.resolve( snapshotPath ); //model = pathTranslator.toFile( basedir, groupId, artifactId, lastVersion, filename ); - if ( Files.exists(model) ) + if ( model.exists() ) { - return model; + return model.getFilePath(); } } } - catch ( XMLException e ) + catch (XMLException | IOException e ) { log.warn( "fail to read {}, {}", mavenMetadata.toAbsolutePath(), e.getCause() ); } diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java index 9c36647d0..266a84105 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java @@ -34,7 +34,7 @@ import org.apache.archiva.repository.LayoutException; import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.RepositoryException; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; import java.io.IOException; diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenManagedRepository.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenManagedRepository.java index 026990e87..f8e171dac 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenManagedRepository.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenManagedRepository.java @@ -23,8 +23,7 @@ import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.repository.*; -import org.apache.archiva.repository.content.FilesystemStorage; -import org.apache.archiva.repository.content.RepositoryStorage; +import org.apache.archiva.repository.storage.FilesystemStorage; import org.apache.archiva.repository.content.maven2.MavenRepositoryRequestInfo; import org.apache.archiva.repository.features.ArtifactCleanupFeature; import org.apache.archiva.repository.features.IndexCreationFeature; @@ -38,7 +37,6 @@ import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.util.Locale; -import java.util.function.Function; /** * Maven2 managed repository implementation. diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRemoteRepository.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRemoteRepository.java index f3380c0ec..e89321704 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRemoteRepository.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRemoteRepository.java @@ -9,7 +9,7 @@ import org.apache.archiva.repository.RepositoryCapabilities; import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.StandardCapabilities; import org.apache.archiva.repository.UnsupportedFeatureException; -import org.apache.archiva.repository.content.FilesystemStorage; +import org.apache.archiva.repository.storage.FilesystemStorage; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.archiva.repository.features.RepositoryFeature; diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryGroup.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryGroup.java index 56e65a2bd..60c91ac71 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryGroup.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryGroup.java @@ -22,13 +22,12 @@ package org.apache.archiva.repository.maven2; import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.repository.*; -import org.apache.archiva.repository.content.FilesystemStorage; +import org.apache.archiva.repository.storage.FilesystemStorage; import org.apache.archiva.repository.features.IndexCreationFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; -import java.nio.file.Files; import java.nio.file.Path; import java.util.Locale; diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryProvider.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryProvider.java index bbcb6585d..75b2117a2 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryProvider.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryProvider.java @@ -22,8 +22,7 @@ package org.apache.archiva.repository.maven2; import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.configuration.*; import org.apache.archiva.repository.*; -import org.apache.archiva.repository.content.FilesystemAsset; -import org.apache.archiva.repository.content.FilesystemStorage; +import org.apache.archiva.repository.storage.FilesystemStorage; import org.apache.archiva.repository.features.ArtifactCleanupFeature; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.RemoteIndexFeature; @@ -298,11 +297,19 @@ public class MavenRepositoryProvider implements RepositoryProvider { IndexCreationFeature indexCreationFeature = repositoryGroup.getFeature( IndexCreationFeature.class ).get(); indexCreationFeature.setIndexPath( getURIFromString(configuration.getMergedIndexPath()) ); Path localPath = Paths.get(configuration.getMergedIndexPath()); - if (localPath.isAbsolute()) { - indexCreationFeature.setLocalIndexPath( new FilesystemAsset(localPath.getFileName().toString(), localPath) ); + Path repoGroupPath = repositoryGroup.getAsset("").getFilePath().toAbsolutePath(); + if (localPath.isAbsolute() && !localPath.startsWith(repoGroupPath)) { + try { + FilesystemStorage storage = new FilesystemStorage(localPath.getParent(), fileLockManager); + indexCreationFeature.setLocalIndexPath(storage.getAsset(localPath.getFileName().toString())); + } catch (IOException e) { + throw new RepositoryException("Could not initialize storage for index path "+localPath); + } + } else if (localPath.isAbsolute()) { + indexCreationFeature.setLocalIndexPath(repositoryGroup.getAsset(repoGroupPath.relativize(localPath).toString())); } else { - indexCreationFeature.setLocalIndexPath( new FilesystemAsset(localPath.toString(), archivaConfiguration.getRepositoryGroupBaseDir( ).resolve( localPath ))); + indexCreationFeature.setLocalIndexPath(repositoryGroup.getAsset(localPath.toString())); } } // References to other repositories are set filled by the registry diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/MavenRepositoryMetadataReaderTest.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/MavenRepositoryMetadataReaderTest.java index e2fec5010..769be5ef9 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/MavenRepositoryMetadataReaderTest.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/MavenRepositoryMetadataReaderTest.java @@ -29,6 +29,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; @@ -46,8 +47,7 @@ public class MavenRepositoryMetadataReaderTest @Test public void testGroupMetadata() - throws XMLException - { + throws XMLException, IOException { Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/plugins/maven-metadata.xml" ); ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile ); @@ -81,8 +81,7 @@ public class MavenRepositoryMetadataReaderTest @Test public void testProjectMetadata() - throws XMLException - { + throws XMLException, IOException { Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" ); ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile); @@ -99,8 +98,7 @@ public class MavenRepositoryMetadataReaderTest @Test public void testProjectVersionMetadata() - throws XMLException - { + throws XMLException, IOException { Path metadataFile = defaultRepoDir.resolve( "org/apache/apache/5-SNAPSHOT/maven-metadata.xml" ); ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile ); diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/ArchivaIndexManagerMock.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/ArchivaIndexManagerMock.java index 171563add..53bb59308 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/ArchivaIndexManagerMock.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/ArchivaIndexManagerMock.java @@ -19,7 +19,7 @@ package org.apache.archiva.repository.index.mock; * under the License. */ -import org.apache.archiva.admin.model.RepositoryAdminException; +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.configuration.ArchivaConfiguration; @@ -40,8 +40,9 @@ import org.apache.archiva.repository.RemoteRepository; import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.UnsupportedRepositoryTypeException; -import org.apache.archiva.repository.content.FilesystemAsset; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.commons.lang.StringUtils; @@ -145,7 +146,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { private Path getIndexPath( ArchivaIndexingContext ctx ) { - return PathUtil.getPathFromUri( ctx.getPath( ) ); + return ctx.getPath( ).getFilePath(); } @FunctionalInterface @@ -380,9 +381,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { @Override public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection artifactReference ) throws IndexUpdateFailedException { - final URI ctxUri = context.getPath(); + final StorageAsset ctxUri = context.getPath(); executeUpdateFunction(context, indexingContext -> { - Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList()); + Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList()); try { indexer.addArtifactsToIndex(artifacts, indexingContext); } catch (IOException e) { @@ -396,9 +397,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { @Override public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection artifactReference ) throws IndexUpdateFailedException { - final URI ctxUri = context.getPath(); + final StorageAsset ctxUri = context.getPath(); executeUpdateFunction(context, indexingContext -> { - Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList()); + Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList()); try { indexer.deleteArtifactsFromIndex(artifacts, indexingContext); } catch (IOException e) { @@ -442,7 +443,12 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e ); } - MavenIndexContextMock context = new MavenIndexContextMock( repository, mvnCtx ); + MavenIndexContextMock context = null; + try { + context = new MavenIndexContextMock( repository, mvnCtx ); + } catch (IOException e) { + throw new IndexCreationFailedException(e); + } return context; } @@ -457,7 +463,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { log.warn("Index close failed"); } try { - FileUtils.deleteDirectory(Paths.get(context.getPath())); + FileUtils.deleteDirectory(context.getPath().getFilePath()); } catch (IOException e) { throw new IndexUpdateFailedException("Could not delete index files"); } @@ -527,12 +533,14 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { } + private StorageAsset getIndexPath( Repository repo) throws IOException { IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get(); Path repoDir = repo.getAsset( "" ).getFilePath(); URI indexDir = icf.getIndexPath(); String indexPath = indexDir.getPath(); Path indexDirectory = null; + FilesystemStorage fsStorage = (FilesystemStorage) repo.getAsset("").getStorage(); if ( ! StringUtils.isEmpty(indexDir.toString( ) ) ) { @@ -541,6 +549,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { if ( indexDirectory.isAbsolute( ) ) { indexPath = indexDirectory.getFileName().toString(); + fsStorage = new FilesystemStorage(indexDirectory.getParent(), new DefaultFileLockManager()); } else { @@ -557,7 +566,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { { Files.createDirectories( indexDirectory ); } - return new FilesystemAsset( indexPath, indexDirectory ); + return new FilesystemAsset( fsStorage, indexPath, indexDirectory ); } private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/MavenIndexContextMock.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/MavenIndexContextMock.java index e0db09597..a6dddae4e 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/MavenIndexContextMock.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/MavenIndexContextMock.java @@ -19,12 +19,14 @@ package org.apache.archiva.repository.index.mock; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.repository.Repository; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.maven.index.context.IndexingContext; import java.io.IOException; -import java.net.URI; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.sql.Date; @@ -38,10 +40,12 @@ public class MavenIndexContextMock implements ArchivaIndexingContext { private IndexingContext delegate; private Repository repository; + private FilesystemStorage indexStorage; - MavenIndexContextMock(Repository repository, IndexingContext delegate) { + MavenIndexContextMock(Repository repository, IndexingContext delegate) throws IOException { this.delegate = delegate; this.repository = repository; + indexStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath(), new DefaultFileLockManager()); } @@ -56,8 +60,8 @@ public class MavenIndexContextMock implements ArchivaIndexingContext { } @Override - public URI getPath() { - return delegate.getIndexDirectoryFile().toURI(); + public StorageAsset getPath() { + return indexStorage.getAsset(""); } @Override diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/metadata/RepositoryMetadataReaderTest.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/metadata/RepositoryMetadataReaderTest.java index 7c7da49cf..a02814042 100644 --- a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/metadata/RepositoryMetadataReaderTest.java +++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/metadata/RepositoryMetadataReaderTest.java @@ -27,6 +27,7 @@ import org.apache.archiva.xml.XMLException; import org.junit.Test; import org.junit.runner.RunWith; +import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; @@ -41,8 +42,7 @@ public class RepositoryMetadataReaderTest { @Test public void testLoadSimple() - throws XMLException - { + throws XMLException, IOException { Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" ); Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" ); @@ -59,8 +59,7 @@ public class RepositoryMetadataReaderTest @Test public void testLoadComplex() - throws XMLException - { + throws XMLException, IOException { Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" ); Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/samplejar/maven-metadata.xml" ); diff --git a/archiva-modules/archiva-maven/archiva-maven-scheduler/src/test/java/org/apache/archiva/scheduler/indexing/maven/ArchivaIndexingTaskExecutorTest.java b/archiva-modules/archiva-maven/archiva-maven-scheduler/src/test/java/org/apache/archiva/scheduler/indexing/maven/ArchivaIndexingTaskExecutorTest.java index a890ca4f6..b4a8e05ce 100644 --- a/archiva-modules/archiva-maven/archiva-maven-scheduler/src/test/java/org/apache/archiva/scheduler/indexing/maven/ArchivaIndexingTaskExecutorTest.java +++ b/archiva-modules/archiva-maven/archiva-maven-scheduler/src/test/java/org/apache/archiva/scheduler/indexing/maven/ArchivaIndexingTaskExecutorTest.java @@ -26,7 +26,7 @@ import org.apache.archiva.repository.BasicManagedRepository; import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ReleaseScheme; import org.apache.archiva.repository.RepositoryRegistry; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; diff --git a/archiva-modules/archiva-scheduler/archiva-scheduler-repository-api/src/main/java/org/apache/archiva/scheduler/repository/model/RepositoryTask.java b/archiva-modules/archiva-scheduler/archiva-scheduler-repository-api/src/main/java/org/apache/archiva/scheduler/repository/model/RepositoryTask.java index a858c1a84..511ce6149 100644 --- a/archiva-modules/archiva-scheduler/archiva-scheduler-repository-api/src/main/java/org/apache/archiva/scheduler/repository/model/RepositoryTask.java +++ b/archiva-modules/archiva-scheduler/archiva-scheduler-repository-api/src/main/java/org/apache/archiva/scheduler/repository/model/RepositoryTask.java @@ -1,9 +1,7 @@ package org.apache.archiva.scheduler.repository.model; import org.apache.archiva.redback.components.taskqueue.Task; -import org.apache.archiva.repository.content.StorageAsset; - -import java.nio.file.Path; +import org.apache.archiva.repository.storage.StorageAsset; /* diff --git a/archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/ArchivaIndexManagerMock.java b/archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/ArchivaIndexManagerMock.java index 60f0a7b75..90d742475 100644 --- a/archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/ArchivaIndexManagerMock.java +++ b/archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/ArchivaIndexManagerMock.java @@ -19,6 +19,7 @@ package org.apache.archiva.mock; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.configuration.ArchivaConfiguration; @@ -27,7 +28,6 @@ import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.indexer.IndexCreationFailedException; import org.apache.archiva.indexer.IndexUpdateFailedException; import org.apache.archiva.indexer.UnsupportedBaseContextException; -import org.apache.archiva.proxy.ProxyRegistry; import org.apache.archiva.proxy.maven.WagonFactory; import org.apache.archiva.proxy.maven.WagonFactoryException; import org.apache.archiva.proxy.maven.WagonFactoryRequest; @@ -39,10 +39,12 @@ import org.apache.archiva.repository.RemoteRepository; import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.UnsupportedRepositoryTypeException; -import org.apache.archiva.repository.content.FilesystemAsset; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.RemoteIndexFeature; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.commons.lang.StringUtils; import org.apache.maven.index.ArtifactContext; import org.apache.maven.index.ArtifactContextProducer; @@ -140,7 +142,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { private Path getIndexPath( ArchivaIndexingContext ctx ) { - return PathUtil.getPathFromUri( ctx.getPath( ) ); + return ctx.getPath().getFilePath(); } @FunctionalInterface @@ -365,9 +367,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { @Override public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection artifactReference ) throws IndexUpdateFailedException { - final URI ctxUri = context.getPath(); + StorageAsset ctxUri = context.getPath(); executeUpdateFunction(context, indexingContext -> { - Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList()); + Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList()); try { indexer.addArtifactsToIndex(artifacts, indexingContext); } catch (IOException e) { @@ -381,9 +383,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { @Override public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection artifactReference ) throws IndexUpdateFailedException { - final URI ctxUri = context.getPath(); + final StorageAsset ctxUri = context.getPath(); executeUpdateFunction(context, indexingContext -> { - Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList()); + Collection artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList()); try { indexer.deleteArtifactsFromIndex(artifacts, indexingContext); } catch (IOException e) { @@ -442,7 +444,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { log.warn("Index close failed"); } try { - FileUtils.deleteDirectory(Paths.get(context.getPath())); + StorageUtil.deleteRecursively(context.getPath()); } catch (IOException e) { throw new IndexUpdateFailedException("Could not delete index files"); } @@ -517,6 +519,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { URI indexDir = icf.getIndexPath(); String indexPath = indexDir.getPath(); Path indexDirectory = null; + FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getAsset("").getStorage(); if ( ! StringUtils.isEmpty(indexDir.toString( ) ) ) { @@ -525,6 +528,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { if ( indexDirectory.isAbsolute( ) ) { indexPath = indexDirectory.getFileName().toString(); + filesystemStorage = new FilesystemStorage(indexDirectory, new DefaultFileLockManager()); } else { @@ -541,7 +545,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager { { Files.createDirectories( indexDirectory ); } - return new FilesystemAsset( indexPath, indexDirectory); + return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory); } private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException diff --git a/archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MavenIndexContextMock.java b/archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MavenIndexContextMock.java index b3ae99c7b..fa070a7d9 100644 --- a/archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MavenIndexContextMock.java +++ b/archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MavenIndexContextMock.java @@ -19,8 +19,12 @@ package org.apache.archiva.mock; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.repository.Repository; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.maven.index.context.IndexingContext; import java.io.IOException; @@ -38,10 +42,16 @@ public class MavenIndexContextMock implements ArchivaIndexingContext { private IndexingContext delegate; private Repository repository; + private FilesystemStorage filesystemStorage; MavenIndexContextMock( Repository repository, IndexingContext delegate) { this.delegate = delegate; this.repository = repository; + try { + filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath().getParent(), new DefaultFileLockManager()); + } catch (IOException e) { + e.printStackTrace(); + } } @@ -56,8 +66,9 @@ public class MavenIndexContextMock implements ArchivaIndexingContext { } @Override - public URI getPath() { - return delegate.getIndexDirectoryFile().toURI(); + public StorageAsset getPath() { + return new FilesystemAsset(filesystemStorage, delegate.getIndexDirectoryFile().toPath().getFileName().toString(), delegate.getIndexDirectoryFile().toPath()); + } @Override diff --git a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/pom.xml b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/pom.xml index 1067fc24a..403377b42 100644 --- a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/pom.xml +++ b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/pom.xml @@ -44,6 +44,10 @@ org.apache.archiva archiva-security + + org.apache.archiva + archiva-storage-api + org.apache.archiva archiva-repository-admin-api diff --git a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultBrowseService.java b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultBrowseService.java index 9488261b4..56117836d 100644 --- a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultBrowseService.java +++ b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultBrowseService.java @@ -43,6 +43,8 @@ import org.apache.archiva.repository.ReleaseScheme; import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryNotFoundException; import org.apache.archiva.repository.metadata.MetadataTools; +import org.apache.archiva.repository.storage.StorageAsset; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.archiva.rest.api.model.*; import org.apache.archiva.rest.api.services.ArchivaRestServiceException; import org.apache.archiva.rest.api.services.BrowseService; @@ -62,6 +64,8 @@ import java.io.InputStream; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.nio.file.StandardOpenOption; import java.util.*; import java.util.jar.JarEntry; import java.util.jar.JarFile; @@ -696,8 +700,8 @@ public class DefaultBrowseService ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier, StringUtils.isEmpty( type ) ? "jar" : type, repoId ); - Path file = managedRepositoryContent.toFile( archivaArtifact ); - if ( Files.exists(file) ) + StorageAsset file = managedRepositoryContent.toFile( archivaArtifact ); + if ( file.exists() ) { return readFileEntries( file, path, repoId ); } @@ -781,8 +785,8 @@ public class DefaultBrowseService ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier, StringUtils.isEmpty( type ) ? "jar" : type, repoId ); - Path file = managedRepositoryContent.toFile( archivaArtifact ); - if ( !Files.exists(file) ) + StorageAsset file = managedRepositoryContent.toFile( archivaArtifact ); + if ( !file.exists() ) { log.debug( "file: {} not exists for repository: {} try next repository", file, repoId ); continue; @@ -790,7 +794,8 @@ public class DefaultBrowseService if ( StringUtils.isNotBlank( path ) ) { // zip entry of the path -> path must a real file entry of the archive - JarFile jarFile = new JarFile( file.toFile() ); + StorageUtil.PathInformation pathInfo = StorageUtil.getAssetDataAsPath(file); + JarFile jarFile = new JarFile( pathInfo.getPath().toFile()); ZipEntry zipEntry = jarFile.getEntry( path ); try (InputStream inputStream = jarFile.getInputStream( zipEntry )) { @@ -799,9 +804,14 @@ public class DefaultBrowseService finally { closeQuietly( jarFile ); + if (pathInfo.isTmpFile()) { + Files.deleteIfExists(pathInfo.getPath()); + } } } - return new ArtifactContent( new String(Files.readAllBytes( file ), ARTIFACT_CONTENT_ENCODING), repoId ); + try(InputStream readStream = file.getReadStream()) { + return new ArtifactContent(IOUtils.toString(readStream, ARTIFACT_CONTENT_ENCODING), repoId); + } } } catch ( IOException e ) @@ -846,9 +856,9 @@ public class DefaultBrowseService StringUtils.isEmpty( classifier ) ? "" : classifier, "jar", repoId ); - Path file = managedRepositoryContent.toFile( archivaArtifact ); + StorageAsset file = managedRepositoryContent.toFile( archivaArtifact ); - if ( file != null && Files.exists(file) ) + if ( file != null && file.exists() ) { return true; } @@ -856,8 +866,8 @@ public class DefaultBrowseService // in case of SNAPSHOT we can have timestamped version locally ! if ( StringUtils.endsWith( version, VersionUtil.SNAPSHOT ) ) { - Path metadataFile = file.getParent().resolve(MetadataTools.MAVEN_METADATA ); - if ( Files.exists(metadataFile) ) + StorageAsset metadataFile = file.getStorage().getAsset(file.getParent().getPath()+"/"+MetadataTools.MAVEN_METADATA ); + if ( metadataFile.exists() ) { try { @@ -873,14 +883,14 @@ public class DefaultBrowseService .append( ( StringUtils.isEmpty( classifier ) ? "" : "-" + classifier ) ) // .append( ".jar" ).toString(); - Path timeStampFile = file.getParent().resolve( timeStampFileName ); - log.debug( "try to find timestamped snapshot version file: {}", timeStampFile.toAbsolutePath() ); - if ( Files.exists(timeStampFile) ) + StorageAsset timeStampFile = file.getStorage().getAsset(file.getParent().getPath() + "/" + timeStampFileName ); + log.debug( "try to find timestamped snapshot version file: {}", timeStampFile.getPath() ); + if ( timeStampFile.exists() ) { return true; } } - catch ( XMLException e ) + catch (XMLException | IOException e ) { log.warn( "skip fail to find timestamped snapshot file: {}", e.getMessage() ); } @@ -891,7 +901,7 @@ public class DefaultBrowseService file = proxyHandler.fetchFromProxies( managedRepositoryContent, path ); - if ( file != null && Files.exists(file) ) + if ( file != null && file.exists() ) { // download pom now String pomPath = StringUtils.substringBeforeLast( path, ".jar" ) + ".pom"; @@ -1075,7 +1085,7 @@ public class DefaultBrowseService } } - protected List readFileEntries(final Path file, final String filterPath, final String repoId ) + protected List readFileEntries(final StorageAsset file, final String filterPath, final String repoId ) throws IOException { String cleanedfilterPath = filterPath==null ? "" : (StringUtils.startsWith(filterPath, "/") ? @@ -1085,7 +1095,9 @@ public class DefaultBrowseService if (!StringUtils.endsWith(cleanedfilterPath,"/") && !StringUtils.isEmpty(cleanedfilterPath)) { filterDepth++; } - JarFile jarFile = new JarFile( file.toFile() ); + + StorageUtil.PathInformation pathInfo = StorageUtil.getAssetDataAsPath(file); + JarFile jarFile = new JarFile(pathInfo.getPath().toFile()); try { Enumeration jarEntryEnumeration = jarFile.entries(); @@ -1141,6 +1153,9 @@ public class DefaultBrowseService { jarFile.close(); } + if (pathInfo.isTmpFile()) { + Files.deleteIfExists(pathInfo.getPath()); + } } List sorted = new ArrayList<>( artifactContentEntryMap.values() ); Collections.sort( sorted, ArtifactContentEntryComparator.INSTANCE ); diff --git a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultRepositoriesService.java b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultRepositoriesService.java index f3dd25569..b2d23dde8 100644 --- a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultRepositoriesService.java +++ b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultRepositoriesService.java @@ -54,9 +54,9 @@ import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryNotFoundException; import org.apache.archiva.repository.RepositoryRegistry; -import org.apache.archiva.repository.content.RepositoryStorage; -import org.apache.archiva.repository.content.StorageAsset; -import org.apache.archiva.repository.content.StorageUtil; +import org.apache.archiva.repository.storage.RepositoryStorage; +import org.apache.archiva.repository.storage.StorageAsset; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.archiva.repository.events.RepositoryListener; import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.RepositoryMetadataException; @@ -89,11 +89,10 @@ import javax.inject.Inject; import javax.inject.Named; import javax.ws.rs.core.Response; import java.io.IOException; -import java.nio.file.FileSystems; +import java.io.OutputStream; +import java.io.OutputStreamWriter; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -522,7 +521,7 @@ public class DefaultRepositoriesService { metadata = MavenMetadataReader.read( metadataFile.getFilePath() ); } - catch ( XMLException e ) + catch (XMLException | IOException e ) { throw new RepositoryMetadataException( e.getMessage(), e ); } @@ -543,7 +542,7 @@ public class DefaultRepositoriesService throws IOException { - StorageUtil.copyAsset( sourceStorage, sourceFile, targetStorage, targetPath, true ); + StorageUtil.copyAsset( sourceFile, targetPath, true ); if ( fixChecksums ) { fixChecksums( targetPath ); @@ -612,7 +611,11 @@ public class DefaultRepositoriesService projectMetadata.setReleasedVersion( latestVersion ); } - RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile.getFilePath()); + try(OutputStreamWriter writer = new OutputStreamWriter(projectMetadataFile.getWriteStream(true))) { + RepositoryMetadataWriter.write(projectMetadata, writer); + } catch (IOException e) { + throw new RepositoryMetadataException(e); + } if ( fixChecksums ) { @@ -1177,7 +1180,11 @@ public class DefaultRepositoriesService metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp ); metadata.setAvailableVersions( availableVersions ); - RepositoryMetadataWriter.write( metadata, metadataFile.getFilePath()); + try (OutputStreamWriter writer = new OutputStreamWriter(metadataFile.getWriteStream(true))) { + RepositoryMetadataWriter.write(metadata, writer); + } catch (IOException e) { + throw new RepositoryMetadataException(e); + } ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() ); checksum.fixChecksums( algorithms ); } diff --git a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/utils/ArtifactBuilder.java b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/utils/ArtifactBuilder.java index 7544dd355..2c6db17bf 100644 --- a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/utils/ArtifactBuilder.java +++ b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/utils/ArtifactBuilder.java @@ -23,6 +23,8 @@ import org.apache.archiva.metadata.model.ArtifactMetadata; import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet; import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.repository.ManagedRepositoryContent; +import org.apache.archiva.repository.storage.StorageAsset; +import org.apache.archiva.repository.storage.StorageUtil; import org.apache.commons.io.FilenameUtils; import java.nio.file.Path; @@ -79,7 +81,7 @@ public class ArtifactBuilder ref.setClassifier( classifier ); ref.setType( type ); - Path file = managedRepositoryContent.toFile( ref ); + StorageAsset file = managedRepositoryContent.toFile( ref ); String extension = getExtensionFromFile(file); @@ -124,10 +126,10 @@ public class ArtifactBuilder /** * Extract file extension */ - String getExtensionFromFile( Path file ) + String getExtensionFromFile( StorageAsset file ) { // we are just interested in the section after the last - - String[] parts = file.getFileName().toString().split( "-" ); + String[] parts = file.getName().split( "-" ); if ( parts.length > 0 ) { // get anything after a dot followed by a letter a-z, including other dots @@ -139,7 +141,7 @@ public class ArtifactBuilder } } // just in case - return FilenameUtils.getExtension( file.toFile().getName() ); + return StorageUtil.getExtension( file ); } } diff --git a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/ArtifactContentEntriesTests.java b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/ArtifactContentEntriesTests.java index 1472de5d9..4cd630e3c 100644 --- a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/ArtifactContentEntriesTests.java +++ b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/ArtifactContentEntriesTests.java @@ -19,6 +19,9 @@ package org.apache.archiva.rest.services; */ import junit.framework.TestCase; +import org.apache.archiva.common.filelock.DefaultFileLockManager; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; import org.apache.archiva.rest.api.model.ArtifactContentEntry; import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner; import org.junit.Test; @@ -56,10 +59,11 @@ public class ArtifactContentEntriesTests throws Exception { + FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager()); Path file = Paths.get( getBasedir(), "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" ); - List artifactContentEntries = browseService.readFileEntries( file, null, "foo" ); + List artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), null, "foo" ); log.info( "artifactContentEntries: {}", artifactContentEntries ); @@ -74,10 +78,12 @@ public class ArtifactContentEntriesTests throws Exception { + FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager()); Path file = Paths.get( getBasedir(), "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" ); - List artifactContentEntries = browseService.readFileEntries( file, "", "foo" ); + List artifactContentEntries = browseService.readFileEntries( + new FilesystemAsset(filesystemStorage, file.toString(), file), "", "foo" ); log.info( "artifactContentEntries: {}", artifactContentEntries ); @@ -92,10 +98,12 @@ public class ArtifactContentEntriesTests throws Exception { + FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager()); + Path file = Paths.get( getBasedir(), "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" ); - List artifactContentEntries = browseService.readFileEntries( file, "/", "foo" ); + List artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(),file), "/", "foo" ); log.info( "artifactContentEntries: {}", artifactContentEntries ); @@ -110,10 +118,12 @@ public class ArtifactContentEntriesTests throws Exception { + FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager()); + Path file = Paths.get( getBasedir(), "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" ); - List artifactContentEntries = browseService.readFileEntries( file, "org", "foo" ); + List artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org", "foo" ); log.info( "artifactContentEntries: {}", artifactContentEntries ); @@ -127,11 +137,13 @@ public class ArtifactContentEntriesTests throws Exception { + FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager()); + Path file = Paths.get( getBasedir(), "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" ); List artifactContentEntries = - browseService.readFileEntries( file, "org/apache/commons/logging/impl/", "foo" ); + browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org/apache/commons/logging/impl/", "foo" ); log.info( "artifactContentEntries: {}", artifactContentEntries ); @@ -145,11 +157,13 @@ public class ArtifactContentEntriesTests throws Exception { + FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager()); + Path file = Paths.get( getBasedir(), "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" ); List artifactContentEntries = - browseService.readFileEntries( file, "org/apache/commons/logging/", "foo" ); + browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org/apache/commons/logging/", "foo" ); log.info( "artifactContentEntries: {}", artifactContentEntries ); diff --git a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/utils/ArtifactBuilderTest.java b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/utils/ArtifactBuilderTest.java index f0e825b79..f95099252 100644 --- a/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/utils/ArtifactBuilderTest.java +++ b/archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/utils/ArtifactBuilderTest.java @@ -18,9 +18,15 @@ package org.apache.archiva.rest.services.utils; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.easymock.TestSubject; import org.junit.Test; +import java.io.IOException; +import java.nio.file.Path; import java.nio.file.Paths; import static org.assertj.core.api.Assertions.assertThat; @@ -30,39 +36,39 @@ public class ArtifactBuilderTest @TestSubject private ArtifactBuilder builder = new ArtifactBuilder(); + StorageAsset getFile(String path) throws IOException { + Path filePath = Paths.get(path); + FilesystemStorage filesystemStorage = new FilesystemStorage(filePath.getParent(), new DefaultFileLockManager()); + return new FilesystemAsset(filesystemStorage, filePath.getFileName().toString(), filePath); + } + @Test - public void testBuildSnapshot() - { - assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-2.3-20141119.064321-40.jar" ) ) ).isEqualTo( "jar" ); + public void testBuildSnapshot() throws IOException { + assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-2.3-20141119.064321-40.jar" ) ) ).isEqualTo( "jar" ); } @Test - public void testBuildPom() - { - assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0.pom" ) ) ).isEqualTo( "pom" ); + public void testBuildPom() throws IOException { + assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0.pom" ) ) ).isEqualTo( "pom" ); } @Test - public void testBuildJar() - { - assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0-sources.jar" ) ) ).isEqualTo( "jar" ); + public void testBuildJar() throws IOException { + assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0-sources.jar" ) ) ).isEqualTo( "jar" ); } @Test - public void testBuildTarGz() - { - assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0.tar.gz" ) ) ).isEqualTo( "tar.gz" ); + public void testBuildTarGz() throws IOException { + assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0.tar.gz" ) ) ).isEqualTo( "tar.gz" ); } @Test - public void testBuildPomZip() - { - assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0.pom.zip" ) ) ).isEqualTo( "pom.zip" ); + public void testBuildPomZip() throws IOException { + assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0.pom.zip" ) ) ).isEqualTo( "pom.zip" ); } @Test - public void testBuildR00() - { - assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0.r00" ) ) ).isEqualTo( "r00" ); + public void testBuildR00() throws IOException { + assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0.r00" ) ) ).isEqualTo( "r00" ); } } diff --git a/archiva-modules/archiva-web/archiva-security/src/test/java/org/apache/archiva/security/mock/MockBeanServices.java b/archiva-modules/archiva-web/archiva-security/src/test/java/org/apache/archiva/security/mock/MockBeanServices.java index 5d61568e8..929921aee 100644 --- a/archiva-modules/archiva-web/archiva-security/src/test/java/org/apache/archiva/security/mock/MockBeanServices.java +++ b/archiva-modules/archiva-web/archiva-security/src/test/java/org/apache/archiva/security/mock/MockBeanServices.java @@ -24,12 +24,7 @@ import org.apache.archiva.metadata.model.ProjectVersionMetadata; import org.apache.archiva.metadata.repository.MetadataRepository; import org.apache.archiva.metadata.repository.RepositorySession; import org.apache.archiva.filter.Filter; -import org.apache.archiva.metadata.repository.storage.ReadMetadataRequest; -import org.apache.archiva.metadata.repository.storage.RepositoryStorage; -import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataException; -import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException; -import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException; -import org.apache.archiva.metadata.repository.storage.RepositoryStorageRuntimeException; +import org.apache.archiva.metadata.repository.storage.*; import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.policies.ProxyDownloadException; import org.apache.archiva.redback.components.taskqueue.TaskQueueException; @@ -37,7 +32,9 @@ import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.events.RepositoryListener; import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler; import org.apache.archiva.scheduler.repository.model.RepositoryTask; +import org.apache.archiva.xml.XMLException; +import java.io.IOException; import java.util.Collection; /** @@ -166,7 +163,7 @@ public class MockBeanServices } @Override - public String getFilePathWithVersion( String requestPath, ManagedRepositoryContent managedRepositoryContent ) + public String getFilePathWithVersion( String requestPath, ManagedRepositoryContent managedRepositoryContent ) throws RelocationException, XMLException, IOException { return null; } diff --git a/archiva-modules/archiva-web/archiva-web-common/src/main/java/org/apache/archiva/web/api/DefaultFileUploadService.java b/archiva-modules/archiva-web/archiva-web-common/src/main/java/org/apache/archiva/web/api/DefaultFileUploadService.java index afe40d285..33ca26512 100644 --- a/archiva-modules/archiva-web/archiva-web-common/src/main/java/org/apache/archiva/web/api/DefaultFileUploadService.java +++ b/archiva-modules/archiva-web/archiva-web-common/src/main/java/org/apache/archiva/web/api/DefaultFileUploadService.java @@ -42,6 +42,7 @@ import org.apache.archiva.repository.content.ArtifactUtil; import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.RepositoryMetadataException; import org.apache.archiva.repository.metadata.RepositoryMetadataWriter; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.rest.api.services.ArchivaRestServiceException; import org.apache.archiva.rest.services.AbstractRestService; import org.apache.archiva.scheduler.ArchivaTaskScheduler; @@ -68,9 +69,7 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; -import java.io.FileOutputStream; -import java.io.FileWriter; -import java.io.IOException; +import java.io.*; import java.net.URLDecoder; import java.nio.file.*; import java.text.DateFormat; @@ -368,10 +367,10 @@ public class DefaultFileUploadService ArtifactReference artifactReference = createArtifactRef(fileMetadata, groupId, artifactId, version); artifactReference.setType(packaging); - Path pomPath = artifactUtil.getArtifactPath(repoConfig, artifactReference); - Path targetPath = pomPath.getParent(); + StorageAsset pomPath = artifactUtil.getArtifactAsset(repoConfig, artifactReference); + StorageAsset targetPath = pomPath.getParent(); - String pomFilename = pomPath.getFileName().toString(); + String pomFilename = pomPath.getName(); if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) { pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier()); } @@ -408,8 +407,8 @@ public class DefaultFileUploadService artifactReference.setType( StringUtils.isEmpty(fileMetadata.getPackaging()) ? packaging : fileMetadata.getPackaging()); - Path artifactPath = artifactUtil.getArtifactPath(repoConfig, artifactReference); - Path targetPath = artifactPath.getParent(); + StorageAsset artifactPath = artifactUtil.getArtifactAsset(repoConfig, artifactReference); + StorageAsset targetPath = artifactPath.getParent(); log.debug("artifactPath: {} found targetPath: {}", artifactPath, targetPath); @@ -417,7 +416,7 @@ public class DefaultFileUploadService int newBuildNumber = -1; String timestamp = null; - Path versionMetadataFile = targetPath.resolve(MetadataTools.MAVEN_METADATA); + StorageAsset versionMetadataFile = targetPath.resolve(MetadataTools.MAVEN_METADATA); ArchivaRepositoryMetadata versionMetadata = getMetadata(versionMetadataFile); if (VersionUtil.isSnapshot(version)) { @@ -432,11 +431,11 @@ public class DefaultFileUploadService } } - if (!Files.exists(targetPath)) { - Files.createDirectories(targetPath); + if (!targetPath.exists()) { + targetPath.create(); } - String filename = artifactPath.getFileName().toString(); + String filename = artifactPath.getName().toString(); if (VersionUtil.isSnapshot(version)) { filename = filename.replaceAll(VersionUtil.SNAPSHOT, timestamp + "-" + newBuildNumber); } @@ -446,8 +445,8 @@ public class DefaultFileUploadService // !(archivaAdministration.getKnownContentConsumers().contains("create-missing-checksums")); try { - Path targetFile = targetPath.resolve(filename); - if (Files.exists(targetFile) && !VersionUtil.isSnapshot(version) && repoConfig.blocksRedeployments()) { + StorageAsset targetFile = targetPath.resolve(filename); + if (targetFile.exists() && !VersionUtil.isSnapshot(version) && repoConfig.blocksRedeployments()) { throw new ArchivaRestServiceException( "Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.", Response.Status.BAD_REQUEST.getStatusCode(), null); @@ -471,7 +470,7 @@ public class DefaultFileUploadService pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom"; try { - Path generatedPomFile = + StorageAsset generatedPomFile = createPom(targetPath, pomFilename, fileMetadata, groupId, artifactId, version, packaging); triggerAuditEvent(repoConfig.getId(), targetPath.resolve(pomFilename).toString(), AuditEvent.UPLOAD_FILE); if (fixChecksums) { @@ -487,7 +486,7 @@ public class DefaultFileUploadService // explicitly update only if metadata-updater consumer is not enabled! if (!archivaAdministration.getKnownContentConsumers().contains("metadata-updater")) { - updateProjectMetadata(targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber, + updateProjectMetadata(targetPath, lastUpdatedTimestamp, timestamp, newBuildNumber, fixChecksums, fileMetadata, groupId, artifactId, version, packaging); if (VersionUtil.isSnapshot(version)) { @@ -525,20 +524,20 @@ public class DefaultFileUploadService return artifactReference; } - private ArchivaRepositoryMetadata getMetadata(Path metadataFile) + private ArchivaRepositoryMetadata getMetadata(StorageAsset metadataFile) throws RepositoryMetadataException { ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata(); - if (Files.exists(metadataFile)) { + if (metadataFile.exists()) { try { metadata = MavenMetadataReader.read(metadataFile); - } catch (XMLException e) { + } catch (XMLException | IOException e) { throw new RepositoryMetadataException(e.getMessage(), e); } } return metadata; } - private Path createPom(Path targetPath, String filename, FileMetadata fileMetadata, String groupId, + private StorageAsset createPom(StorageAsset targetPath, String filename, FileMetadata fileMetadata, String groupId, String artifactId, String version, String packaging) throws IOException { Model projectModel = new Model(); @@ -548,22 +547,22 @@ public class DefaultFileUploadService projectModel.setVersion(version); projectModel.setPackaging(packaging); - Path pomFile = targetPath.resolve(filename); + StorageAsset pomFile = targetPath.resolve(filename); MavenXpp3Writer writer = new MavenXpp3Writer(); - try (FileWriter w = new FileWriter(pomFile.toFile())) { + try (Writer w = new OutputStreamWriter(pomFile.getWriteStream(true))) { writer.write(w, projectModel); } return pomFile; } - private void fixChecksums(Path file) { - ChecksummedFile checksum = new ChecksummedFile(file); + private void fixChecksums(StorageAsset file) { + ChecksummedFile checksum = new ChecksummedFile(file.getFilePath()); checksum.fixChecksums(algorithms); } - private void queueRepositoryTask(String repositoryId, Path localFile) { + private void queueRepositoryTask(String repositoryId, StorageAsset localFile) { RepositoryTask task = new RepositoryTask(); task.setRepositoryId(repositoryId); task.setResourceFile(localFile); @@ -574,15 +573,14 @@ public class DefaultFileUploadService scheduler.queueTask(task); } catch (TaskQueueException e) { log.error("Unable to queue repository task to execute consumers on resource file ['{}" - + "'].", localFile.getFileName()); + + "'].", localFile.getName()); } } - private void copyFile(Path sourceFile, Path targetPath, String targetFilename, boolean fixChecksums) + private void copyFile(Path sourceFile, StorageAsset targetPath, String targetFilename, boolean fixChecksums) throws IOException { - Files.copy(sourceFile, targetPath.resolve(targetFilename), StandardCopyOption.REPLACE_EXISTING, - StandardCopyOption.COPY_ATTRIBUTES); + targetPath.resolve(targetFilename).replaceDataFromFile(sourceFile); if (fixChecksums) { fixChecksums(targetPath.resolve(targetFilename)); @@ -592,19 +590,19 @@ public class DefaultFileUploadService /** * Update artifact level metadata. If it does not exist, create the metadata and fix checksums if necessary. */ - private void updateProjectMetadata(String targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber, + private void updateProjectMetadata(StorageAsset targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber, boolean fixChecksums, FileMetadata fileMetadata, String groupId, String artifactId, String version, String packaging) throws RepositoryMetadataException { List availableVersions = new ArrayList<>(); String latestVersion = version; - Path projectDir = Paths.get(targetPath).getParent(); - Path projectMetadataFile = projectDir.resolve(MetadataTools.MAVEN_METADATA); + StorageAsset projectDir = targetPath.getParent(); + StorageAsset projectMetadataFile = projectDir.resolve(MetadataTools.MAVEN_METADATA); ArchivaRepositoryMetadata projectMetadata = getMetadata(projectMetadataFile); - if (Files.exists(projectMetadataFile)) { + if (projectMetadataFile.exists()) { availableVersions = projectMetadata.getAvailableVersions(); Collections.sort(availableVersions, VersionComparator.getInstance()); @@ -648,12 +646,12 @@ public class DefaultFileUploadService * Update version level metadata for snapshot artifacts. If it does not exist, create the metadata and fix checksums * if necessary. */ - private void updateVersionMetadata(ArchivaRepositoryMetadata metadata, Path metadataFile, + private void updateVersionMetadata(ArchivaRepositoryMetadata metadata, StorageAsset metadataFile, Date lastUpdatedTimestamp, String timestamp, int buildNumber, boolean fixChecksums, FileMetadata fileMetadata, String groupId, String artifactId, String version, String packaging) throws RepositoryMetadataException { - if (!Files.exists(metadataFile)) { + if (!metadataFile.exists()) { metadata.setGroupId(groupId); metadata.setArtifactId(artifactId); metadata.setVersion(version); diff --git a/archiva-modules/archiva-web/archiva-webdav/pom.xml b/archiva-modules/archiva-web/archiva-webdav/pom.xml index 73dcd4a7f..79a8e9def 100644 --- a/archiva-modules/archiva-web/archiva-webdav/pom.xml +++ b/archiva-modules/archiva-web/archiva-webdav/pom.xml @@ -59,6 +59,10 @@ runtime --> + + org.apache.archiva + archiva-storage-api + org.apache.archiva.maven archiva-maven-repository diff --git a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java index b3843e513..26ef91881 100644 --- a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java +++ b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java @@ -22,8 +22,8 @@ package org.apache.archiva.webdav; import edu.emory.mathcs.backport.java.util.Collections; import org.apache.archiva.metadata.model.facets.AuditEvent; import org.apache.archiva.repository.LayoutException; -import org.apache.archiva.repository.content.RepositoryStorage; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.RepositoryStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.repository.events.AuditListener; import org.apache.archiva.scheduler.ArchivaTaskScheduler; import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler; diff --git a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResourceFactory.java b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResourceFactory.java index 30e14433d..71868d10c 100644 --- a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResourceFactory.java +++ b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResourceFactory.java @@ -25,6 +25,7 @@ import org.apache.archiva.audit.Auditable; import org.apache.archiva.checksum.ChecksumAlgorithm; import org.apache.archiva.checksum.ChecksumUtil; import org.apache.archiva.checksum.StreamingChecksum; +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.plexusbridge.PlexusSisuBridgeException; import org.apache.archiva.common.utils.PathUtil; @@ -65,8 +66,8 @@ import org.apache.archiva.repository.ReleaseScheme; import org.apache.archiva.repository.RepositoryGroup; import org.apache.archiva.repository.RepositoryRegistry; import org.apache.archiva.repository.RepositoryRequestInfo; -import org.apache.archiva.repository.content.FilesystemAsset; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.repository.events.AuditListener; import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.metadata.MetadataTools; @@ -343,7 +344,7 @@ public class ArchivaDavResourceFactory ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read( metadataFile ); mergedMetadata = RepositoryMetadataMerge.merge( mergedMetadata, repoMetadata ); } - catch ( XMLException e ) + catch (XMLException | IOException e ) { throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while reading metadata file." ); @@ -427,11 +428,10 @@ public class ArchivaDavResourceFactory { // we are in the case of index file request String requestedFileName = StringUtils.substringAfterLast( pathInfo, "/" ); - Path temporaryIndexDirectory = + StorageAsset temporaryIndexDirectory = buildMergedIndexDirectory( activePrincipal, request, repoGroup ); - FilesystemAsset asset = new FilesystemAsset( pathInfo, temporaryIndexDirectory.resolve(requestedFileName) ); + StorageAsset asset = temporaryIndexDirectory.getStorage().getAsset(requestedFileName); - Path resourceFile = temporaryIndexDirectory.resolve( requestedFileName ); try { resource = new ArchivaDavResource( asset, requestedFileName, repoGroup, request.getRemoteAddr(), activePrincipal, request.getDavSession(), @@ -543,7 +543,7 @@ public class ArchivaDavResourceFactory throw new BrowserRedirectException( addHrefPrefix( contextPath, path ), e.getRelocationType() ); } - catch ( XMLException e ) + catch (XMLException | IOException e ) { log.error( e.getMessage(), e ); throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e ); @@ -765,7 +765,7 @@ public class ArchivaDavResourceFactory RepositoryProxyHandler proxyHandler = proxyRegistry.getHandler(managedRepository.getRepository().getType()).get(0); if ( repositoryRequestInfo.isSupportFile( path ) ) { - Path proxiedFile = proxyHandler.fetchFromProxies( managedRepository, path ); + StorageAsset proxiedFile = proxyHandler.fetchFromProxies( managedRepository, path ); return ( proxiedFile != null ); } @@ -780,7 +780,7 @@ public class ArchivaDavResourceFactory if ( repositoryRequestInfo.isArchetypeCatalog( path ) ) { // FIXME we must implement a merge of remote archetype catalog from remote servers. - Path proxiedFile = proxyHandler.fetchFromProxies( managedRepository, path ); + StorageAsset proxiedFile = proxyHandler.fetchFromProxies( managedRepository, path ); return ( proxiedFile != null ); } @@ -799,7 +799,7 @@ public class ArchivaDavResourceFactory this.applicationContext.getBean( "repositoryStorage#" + repositoryLayout, RepositoryStorage.class ); repositoryStorage.applyServerSideRelocation( managedRepository, artifact ); - Path proxiedFile = proxyHandler.fetchFromProxies( managedRepository, artifact ); + StorageAsset proxiedFile = proxyHandler.fetchFromProxies( managedRepository, artifact ); resource.setPath( managedRepository.toPath( artifact ) ); @@ -1058,10 +1058,9 @@ public class ArchivaDavResourceFactory if ( StringUtils.endsWith( pathInfo, mergedIndexPath ) ) { - Path mergedRepoDirPath = + StorageAsset mergedRepoDirPath = buildMergedIndexDirectory( activePrincipal, request, repositoryGroup ); - FilesystemAsset mergedRepoDir = new FilesystemAsset(pathInfo, mergedRepoDirPath); - mergedRepositoryContents.add( mergedRepoDir ); + mergedRepositoryContents.add( mergedRepoDirPath ); } else { @@ -1087,8 +1086,12 @@ public class ArchivaDavResourceFactory } } } - FilesystemAsset parentDir = new FilesystemAsset(pathInfo, tmpDirectory.getParent()); - mergedRepositoryContents.add( parentDir ); + try { + FilesystemStorage storage = new FilesystemStorage(tmpDirectory.getParent(), new DefaultFileLockManager()); + mergedRepositoryContents.add( storage.getAsset("") ); + } catch (IOException e) { + throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not create storage for " + tmpDirectory); + } } for ( ManagedRepository repo : repositories ) { @@ -1298,7 +1301,7 @@ public class ArchivaDavResourceFactory } } - protected Path buildMergedIndexDirectory( String activePrincipal, + protected StorageAsset buildMergedIndexDirectory( String activePrincipal, DavServletRequest request, RepositoryGroup repositoryGroup ) throws DavException @@ -1320,7 +1323,7 @@ public class ArchivaDavResourceFactory final String id = repositoryGroup.getId(); TemporaryGroupIndex tmp = temporaryGroupIndexMap.get(id); - if ( tmp != null && tmp.getDirectory() != null && Files.exists(tmp.getDirectory())) + if ( tmp != null && tmp.getDirectory() != null && tmp.getDirectory().exists()) { if ( System.currentTimeMillis() - tmp.getCreationTime() > ( repositoryGroup.getMergedIndexTTL() * 60 * 1000 ) ) @@ -1370,12 +1373,14 @@ public class ArchivaDavResourceFactory { Path tempRepoFile = Files.createTempDirectory( "temp" ); tempRepoFile.toFile( ).deleteOnExit( ); + FilesystemStorage storage = new FilesystemStorage(tempRepoFile, new DefaultFileLockManager()); + StorageAsset tmpAsset = storage.getAsset(""); IndexMergerRequest indexMergerRequest = new IndexMergerRequest( authzRepos, true, id, indexPath.toString( ), repositoryGroup.getMergedIndexTTL( ) ).mergedIndexDirectory( - tempRepoFile ).temporary( true ); + tmpAsset ).temporary( true ); MergedRemoteIndexesTaskRequest taskRequest = new MergedRemoteIndexesTaskRequest( indexMergerRequest, indexMerger ); @@ -1384,7 +1389,7 @@ public class ArchivaDavResourceFactory ArchivaIndexingContext indexingContext = job.execute( ).getIndexingContext( ); - Path mergedRepoDir = Paths.get( indexingContext.getPath( ) ); + StorageAsset mergedRepoDir = indexingContext.getPath( ); TemporaryGroupIndex temporaryGroupIndex = new TemporaryGroupIndex( mergedRepoDir, indexingContext.getId( ), id, repositoryGroup.getMergedIndexTTL( ) ) // diff --git a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaVirtualDavResource.java b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaVirtualDavResource.java index 586eebc88..ca996f526 100644 --- a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaVirtualDavResource.java +++ b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaVirtualDavResource.java @@ -19,8 +19,7 @@ package org.apache.archiva.webdav; * under the License. */ -import org.apache.archiva.repository.ManagedRepositoryContent; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.webdav.util.IndexWriter; import org.apache.archiva.webdav.util.MimeTypes; import org.apache.jackrabbit.util.Text; @@ -48,9 +47,6 @@ import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; import java.util.*; import java.util.stream.Collectors; diff --git a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/RepositoryServlet.java b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/RepositoryServlet.java index 8e09cd08c..800b81b26 100644 --- a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/RepositoryServlet.java +++ b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/RepositoryServlet.java @@ -26,7 +26,7 @@ import org.apache.archiva.configuration.ConfigurationListener; import org.apache.archiva.redback.integration.filter.authentication.HttpAuthenticator; import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.RepositoryRegistry; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.security.ServletAuthenticator; import org.apache.jackrabbit.webdav.DavException; import org.apache.jackrabbit.webdav.DavLocatorFactory; @@ -51,9 +51,6 @@ import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.locks.ReentrantReadWriteLock; diff --git a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java index 3aa9b387b..2763d4d4c 100644 --- a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java +++ b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java @@ -19,10 +19,8 @@ package org.apache.archiva.webdav.util; * under the License. */ -import org.apache.archiva.repository.ManagedRepositoryContent; -import org.apache.archiva.repository.content.StorageAsset; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.lang.StringUtils; -import org.apache.jackrabbit.webdav.DavResource; import org.apache.jackrabbit.webdav.io.OutputContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/TemporaryGroupIndexSessionCleaner.java b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/TemporaryGroupIndexSessionCleaner.java index 02c4125a4..1cfc42fb5 100644 --- a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/TemporaryGroupIndexSessionCleaner.java +++ b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/TemporaryGroupIndexSessionCleaner.java @@ -75,7 +75,7 @@ public class TemporaryGroupIndexSessionCleaner for ( TemporaryGroupIndex temporaryGroupIndex : tempFilesPerKey.values() ) { log.info( "cleanup temporaryGroupIndex {} directory {}", temporaryGroupIndex.getIndexId(), - temporaryGroupIndex.getDirectory().toAbsolutePath() ); + temporaryGroupIndex.getDirectory().getPath() ); getIndexMerger( httpSessionEvent ).cleanTemporaryGroupIndex( temporaryGroupIndex ); } } diff --git a/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/DavResourceTest.java b/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/DavResourceTest.java index 6eb286729..715057212 100644 --- a/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/DavResourceTest.java +++ b/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/DavResourceTest.java @@ -24,7 +24,7 @@ import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.repository.LayoutException; import org.apache.archiva.repository.RepositoryRegistry; -import org.apache.archiva.repository.content.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemAsset; import org.apache.archiva.repository.events.AuditListener; import org.apache.archiva.repository.maven2.MavenManagedRepository; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; @@ -127,7 +127,7 @@ public class DavResourceTest private DavResource getDavResource( String logicalPath, Path file ) throws LayoutException { - return new ArchivaDavResource( new FilesystemAsset( logicalPath, file.toAbsolutePath()) , logicalPath, repository, session, resourceLocator, + return new ArchivaDavResource( new FilesystemAsset( repository, logicalPath, file.toAbsolutePath()) , logicalPath, repository, session, resourceLocator, resourceFactory, mimeTypes, Collections. emptyList(), null); } @@ -349,7 +349,7 @@ public class DavResourceTest { try { - return new ArchivaDavResource( new FilesystemAsset( "/" , baseDir.toAbsolutePath()), "/", repository, session, resourceLocator, + return new ArchivaDavResource( new FilesystemAsset(repository, "/" , baseDir.toAbsolutePath()), "/", repository, session, resourceLocator, resourceFactory, mimeTypes, Collections. emptyList(), null ); } diff --git a/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/OverridingRepositoryProxyHandler.java b/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/OverridingRepositoryProxyHandler.java index 75f6726b7..1343c9452 100644 --- a/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/OverridingRepositoryProxyHandler.java +++ b/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/OverridingRepositoryProxyHandler.java @@ -22,6 +22,7 @@ package org.apache.archiva.webdav; import org.apache.archiva.proxy.maven.MavenRepositoryProxyHandler; import org.apache.archiva.proxy.model.ProxyFetchResult; import org.apache.archiva.repository.ManagedRepositoryContent; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.commons.io.FileUtils; import java.io.IOException; @@ -40,10 +41,10 @@ class OverridingRepositoryProxyHandler @Override public ProxyFetchResult fetchMetadataFromProxies(ManagedRepositoryContent repository, String logicalPath ) { - Path target = Paths.get(repository.getRepoRoot(), logicalPath ); + StorageAsset target = repository.getRepository().getAsset( logicalPath ); try { - FileUtils.copyFile( archivaDavResourceFactoryTest.getProjectBase().resolve( "target/test-classes/maven-metadata.xml" ).toFile(), target.toFile() ); + FileUtils.copyFile( archivaDavResourceFactoryTest.getProjectBase().resolve( "target/test-classes/maven-metadata.xml" ).toFile(), target.getFilePath().toFile() ); } catch ( IOException e ) { diff --git a/archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryPathTranslator.java b/archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryPathTranslator.java index 77bcb9ec0..50071593a 100644 --- a/archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryPathTranslator.java +++ b/archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryPathTranslator.java @@ -20,6 +20,7 @@ package org.apache.archiva.metadata.repository.storage; */ import org.apache.archiva.metadata.model.ArtifactMetadata; +import org.apache.archiva.repository.storage.StorageAsset; import java.nio.file.Path; @@ -31,13 +32,13 @@ public interface RepositoryPathTranslator String toPath( String namespace, String projectId ); - Path toFile( Path basedir, String namespace, String projectId, String projectVersion, String filename ); + StorageAsset toFile(StorageAsset basedir, String namespace, String projectId, String projectVersion, String filename ); - Path toFile( Path basedir, String namespace, String projectId ); + StorageAsset toFile(StorageAsset basedir, String namespace, String projectId ); - Path toFile(Path basedir, String namespace ); + StorageAsset toFile(StorageAsset basedir, String namespace ); - Path toFile( Path basedir, String namespace, String projectId, String projectVersion ); + StorageAsset toFile( StorageAsset basedir, String namespace, String projectId, String projectVersion ); ArtifactMetadata getArtifactForPath( String repoId, String relativePath ); diff --git a/archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryStorage.java b/archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryStorage.java index cb2051011..6fb46d676 100644 --- a/archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryStorage.java +++ b/archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryStorage.java @@ -28,6 +28,7 @@ import org.apache.archiva.policies.ProxyDownloadException; import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.xml.XMLException; +import java.io.IOException; import java.util.Collection; // FIXME: we should drop the repositoryId parameters and attach this to an instance of a repository storage @@ -83,7 +84,7 @@ public interface RepositoryStorage String getFilePath( String requestPath, org.apache.archiva.repository.ManagedRepository managedRepository ); String getFilePathWithVersion( final String requestPath, ManagedRepositoryContent managedRepositoryContent ) - throws RelocationException, XMLException; + throws RelocationException, XMLException, IOException; } diff --git a/archiva-modules/plugins/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/MockRepositoryStorage.java b/archiva-modules/plugins/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/MockRepositoryStorage.java index 2cd68f41b..3ebed1a49 100644 --- a/archiva-modules/plugins/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/MockRepositoryStorage.java +++ b/archiva-modules/plugins/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/MockRepositoryStorage.java @@ -38,6 +38,7 @@ import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.events.RepositoryListener; import org.apache.archiva.xml.XMLException; +import java.io.IOException; import java.util.Collection; /** @@ -118,7 +119,7 @@ public class MockRepositoryStorage @Override public String getFilePathWithVersion( String requestPath, ManagedRepositoryContent managedRepositoryContent ) - throws RelocationException, XMLException + throws RelocationException, XMLException, IOException { return null; } diff --git a/archiva-modules/plugins/stage-repository-merge/src/main/java/org/apache/archiva/stagerepository/merge/Maven2RepositoryMerger.java b/archiva-modules/plugins/stage-repository-merge/src/main/java/org/apache/archiva/stagerepository/merge/Maven2RepositoryMerger.java index 049f3a719..7b8c64230 100644 --- a/archiva-modules/plugins/stage-repository-merge/src/main/java/org/apache/archiva/stagerepository/merge/Maven2RepositoryMerger.java +++ b/archiva-modules/plugins/stage-repository-merge/src/main/java/org/apache/archiva/stagerepository/merge/Maven2RepositoryMerger.java @@ -19,6 +19,7 @@ package org.apache.archiva.stagerepository.merge; * under the License. */ +import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.utils.VersionComparator; import org.apache.archiva.common.utils.VersionUtil; import org.apache.archiva.configuration.ArchivaConfiguration; @@ -34,6 +35,9 @@ import org.apache.archiva.model.ArchivaRepositoryMetadata; import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.metadata.RepositoryMetadataException; import org.apache.archiva.repository.metadata.RepositoryMetadataWriter; +import org.apache.archiva.repository.storage.FilesystemAsset; +import org.apache.archiva.repository.storage.FilesystemStorage; +import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.xml.XMLException; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; @@ -42,7 +46,10 @@ import org.springframework.stereotype.Service; import javax.inject.Inject; import javax.inject.Named; +import java.io.BufferedWriter; import java.io.IOException; +import java.io.OutputStreamWriter; +import java.nio.Buffer; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -227,20 +234,22 @@ public class Maven2RepositoryMerger { // updating version metadata files - Path versionMetaDataFileInSourceRepo = - pathTranslator.toFile( Paths.get( sourceRepoPath ), artifactMetadata.getNamespace(), + FilesystemStorage fsStorage = new FilesystemStorage(Paths.get(sourceRepoPath), new DefaultFileLockManager()); + + StorageAsset versionMetaDataFileInSourceRepo = + pathTranslator.toFile( new FilesystemAsset(fsStorage, "", Paths.get(sourceRepoPath)), artifactMetadata.getNamespace(), artifactMetadata.getProject(), artifactMetadata.getVersion(), METADATA_FILENAME ); - if ( Files.exists(versionMetaDataFileInSourceRepo) ) + if ( versionMetaDataFileInSourceRepo.exists() ) {//Pattern quote for windows path String relativePathToVersionMetadataFile = - versionMetaDataFileInSourceRepo.toAbsolutePath().toString().split( Pattern.quote( sourceRepoPath ) )[1]; + versionMetaDataFileInSourceRepo.getPath().toString().split( Pattern.quote( sourceRepoPath ) )[1]; Path versionMetaDataFileInTargetRepo = Paths.get( targetRepoPath, relativePathToVersionMetadataFile ); if ( !Files.exists(versionMetaDataFileInTargetRepo) ) { - copyFile( versionMetaDataFileInSourceRepo, versionMetaDataFileInTargetRepo ); + copyFile( versionMetaDataFileInSourceRepo.getFilePath(), versionMetaDataFileInTargetRepo ); } else { @@ -250,19 +259,19 @@ public class Maven2RepositoryMerger } // updating project meta data file - Path projectDirectoryInSourceRepo = versionMetaDataFileInSourceRepo.getParent().getParent(); - Path projectMetadataFileInSourceRepo = projectDirectoryInSourceRepo.resolve(METADATA_FILENAME ); + StorageAsset projectDirectoryInSourceRepo = versionMetaDataFileInSourceRepo.getParent().getParent(); + StorageAsset projectMetadataFileInSourceRepo = projectDirectoryInSourceRepo.resolve(METADATA_FILENAME ); - if ( Files.exists(projectMetadataFileInSourceRepo) ) + if ( projectMetadataFileInSourceRepo.exists() ) { String relativePathToProjectMetadataFile = - projectMetadataFileInSourceRepo.toAbsolutePath().toString().split( Pattern.quote( sourceRepoPath ) )[1]; + projectMetadataFileInSourceRepo.getPath().split( Pattern.quote( sourceRepoPath ) )[1]; Path projectMetadataFileInTargetRepo = Paths.get( targetRepoPath, relativePathToProjectMetadataFile ); if ( !Files.exists(projectMetadataFileInTargetRepo) ) { - copyFile( projectMetadataFileInSourceRepo, projectMetadataFileInTargetRepo ); + copyFile( projectMetadataFileInSourceRepo.getFilePath(), projectMetadataFileInTargetRepo ); } else { @@ -331,7 +340,11 @@ public class Maven2RepositoryMerger projectMetadata.setReleasedVersion( latestVersion ); } - RepositoryMetadataWriter.write( projectMetadata, projectMetaDataFileIntargetRepo ); + try(BufferedWriter writer = Files.newBufferedWriter(projectMetaDataFileIntargetRepo)) { + RepositoryMetadataWriter.write( projectMetadata, writer ); + } catch (IOException e) { + throw new RepositoryMetadataException(e); + } } @@ -348,7 +361,11 @@ public class Maven2RepositoryMerger } versionMetadata.setLastUpdatedTimestamp( lastUpdatedTimestamp ); - RepositoryMetadataWriter.write( versionMetadata, versionMetaDataFileInTargetRepo ); + try(BufferedWriter writer = Files.newBufferedWriter(versionMetaDataFileInTargetRepo) ) { + RepositoryMetadataWriter.write( versionMetadata, writer); + } catch (IOException e) { + throw new RepositoryMetadataException(e); + } } private ArchivaRepositoryMetadata getMetadata( Path metadataFile ) @@ -361,7 +378,7 @@ public class Maven2RepositoryMerger { metadata = MavenMetadataReader.read( metadataFile ); } - catch ( XMLException e ) + catch (XMLException | IOException e ) { throw new RepositoryMetadataException( e.getMessage(), e ); } diff --git a/pom.xml b/pom.xml index 8a1ae2a87..f4dd5589f 100644 --- a/pom.xml +++ b/pom.xml @@ -295,6 +295,16 @@ archiva-metadata-consumer ${project.version} + + org.apache.archiva + archiva-storage-api + ${project.version} + + + org.apache.archiva + archiva-storage-fs + ${project.version} + org.apache.archiva test-repository