Browse Source

Refactoring to StorageAsset access

pull/51/head
Martin Stockhammer 4 years ago
parent
commit
bb3b074aaf
100 changed files with 1515 additions and 1087 deletions
  1. 4
    0
      archiva-modules/archiva-base/archiva-configuration/pom.xml
  2. 17
    14
      archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/AbstractRepositoryPurge.java
  3. 4
    3
      archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/DaysOldRepositoryPurge.java
  4. 9
    0
      archiva-modules/archiva-base/archiva-policies/pom.xml
  5. 1
    4
      archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/AbstractUpdatePolicy.java
  6. 1
    1
      archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/CachedFailuresPolicy.java
  7. 1
    1
      archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/ChecksumPolicy.java
  8. 1
    1
      archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadErrorPolicy.java
  9. 1
    1
      archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadPolicy.java
  10. 1
    1
      archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsDownloadPolicy.java
  11. 1
    2
      archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsOnUpdateDownloadPolicy.java
  12. 13
    5
      archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/CachedFailuresPolicyTest.java
  13. 31
    21
      archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ChecksumPolicyTest.java
  14. 8
    7
      archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ReleasePolicyTest.java
  15. 11
    7
      archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/SnapshotsPolicyTest.java
  16. 1
    3
      archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/ProxyFetchResult.java
  17. 1
    1
      archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/RepositoryProxyHandler.java
  18. 3
    7
      archiva-modules/archiva-base/archiva-proxy/src/main/java/org/apache/archiva/proxy/DefaultRepositoryProxyHandler.java
  19. 2
    2
      archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-api/src/main/java/org/apache/archiva/admin/model/group/RepositoryGroupAdmin.java
  20. 8
    6
      archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/main/java/org/apache/archiva/admin/repository/group/DefaultRepositoryGroupAdmin.java
  21. 14
    10
      archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/ArchivaIndexManagerMock.java
  22. 13
    2
      archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MavenIndexContextMock.java
  23. 2
    3
      archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MockMergedRemoteIndexesScheduler.java
  24. 4
    0
      archiva-modules/archiva-base/archiva-repository-api/pom.xml
  25. 0
    2
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexManager.java
  26. 2
    1
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java
  27. 6
    4
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/IndexMergerRequest.java
  28. 3
    3
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/MergedRemoteIndexesScheduler.java
  29. 6
    4
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/TemporaryGroupIndex.java
  30. 0
    2
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepository.java
  31. 1
    1
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java
  32. 1
    1
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/Repository.java
  33. 1
    2
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RepositoryGroup.java
  34. 1
    2
      archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/features/IndexCreationFeature.java
  35. 4
    0
      archiva-modules/archiva-base/archiva-repository-layer/pom.xml
  36. 6
    4
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultIndexMerger.java
  37. 2
    1
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultMergedRemoteIndexesScheduler.java
  38. 1
    6
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractManagedRepository.java
  39. 1
    1
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRemoteRepository.java
  40. 2
    3
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepository.java
  41. 1
    6
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepositoryGroup.java
  42. 2
    5
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicManagedRepository.java
  43. 2
    2
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicRemoteRepository.java
  44. 0
    4
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/RepositoryRegistry.java
  45. 16
    0
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/ArtifactUtil.java
  46. 0
    192
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/StorageUtil.java
  47. 40
    46
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/MetadataTools.java
  48. 14
    4
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/RepositoryMetadataWriter.java
  49. 0
    202
      archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemAssetTest.java
  50. 1
    1
      archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java
  51. 1
    1
      archiva-modules/archiva-base/archiva-repository-scanner/src/main/java/org/apache/archiva/repository/scanner/DefaultRepositoryScanner.java
  52. 1
    1
      archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/RepositoryScannerTest.java
  53. 18
    3
      archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/mock/ManagedRepositoryContentMock.java
  54. 35
    0
      archiva-modules/archiva-base/archiva-storage-api/pom.xml
  55. 24
    13
      archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/RepositoryStorage.java
  56. 9
    3
      archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/StorageAsset.java
  57. 58
    0
      archiva-modules/archiva-base/archiva-storage-fs/pom.xml
  58. 18
    5
      archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemAsset.java
  59. 14
    2
      archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemStorage.java
  60. 346
    0
      archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/StorageUtil.java
  61. 203
    0
      archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemAssetTest.java
  62. 50
    58
      archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemStorageTest.java
  63. 2
    0
      archiva-modules/archiva-base/pom.xml
  64. 1
    2
      archiva-modules/archiva-maven/archiva-maven-converter/src/main/java/org/apache/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java
  65. 27
    2
      archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexContext.java
  66. 42
    43
      archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexManager.java
  67. 1
    1
      archiva-modules/archiva-maven/archiva-maven-indexer/src/test/java/org/apache/archiva/indexer/maven/MavenIndexManagerTest.java
  68. 4
    0
      archiva-modules/archiva-maven/archiva-maven-metadata/pom.xml
  69. 12
    20
      archiva-modules/archiva-maven/archiva-maven-metadata/src/main/java/org/apache/archiva/maven2/metadata/MavenMetadataReader.java
  70. 6
    6
      archiva-modules/archiva-maven/archiva-maven-proxy/src/main/java/org/apache/archiva/proxy/maven/MavenRepositoryProxyHandler.java
  71. 7
    6
      archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/CacheFailuresTransferTest.java
  72. 38
    37
      archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ChecksumTransferTest.java
  73. 9
    8
      archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ErrorHandlingTest.java
  74. 6
    5
      archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/HttpProxyTransferTest.java
  75. 29
    28
      archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ManagedDefaultTransferTest.java
  76. 11
    6
      archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/MetadataTransferTest.java
  77. 19
    18
      archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/SnapshotTransferTest.java
  78. 18
    3
      archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java
  79. 10
    10
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/dependency/tree/maven2/Maven3DependencyTreeBuilder.java
  80. 5
    4
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/Maven2RepositoryPathTranslator.java
  81. 72
    103
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/Maven2RepositoryStorage.java
  82. 15
    14
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/RepositoryModelResolver.java
  83. 1
    1
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java
  84. 1
    3
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenManagedRepository.java
  85. 1
    1
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRemoteRepository.java
  86. 1
    2
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryGroup.java
  87. 12
    5
      archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryProvider.java
  88. 4
    6
      archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/MavenRepositoryMetadataReaderTest.java
  89. 20
    11
      archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/ArchivaIndexManagerMock.java
  90. 8
    4
      archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/MavenIndexContextMock.java
  91. 3
    4
      archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/metadata/RepositoryMetadataReaderTest.java
  92. 1
    1
      archiva-modules/archiva-maven/archiva-maven-scheduler/src/test/java/org/apache/archiva/scheduler/indexing/maven/ArchivaIndexingTaskExecutorTest.java
  93. 1
    3
      archiva-modules/archiva-scheduler/archiva-scheduler-repository-api/src/main/java/org/apache/archiva/scheduler/repository/model/RepositoryTask.java
  94. 14
    10
      archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/ArchivaIndexManagerMock.java
  95. 13
    2
      archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MavenIndexContextMock.java
  96. 4
    0
      archiva-modules/archiva-web/archiva-rest/archiva-rest-services/pom.xml
  97. 32
    17
      archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultBrowseService.java
  98. 17
    10
      archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultRepositoriesService.java
  99. 6
    4
      archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/utils/ArtifactBuilder.java
  100. 0
    0
      archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/ArtifactContentEntriesTests.java

+ 4
- 0
archiva-modules/archiva-base/archiva-configuration/pom.xml View File

</properties> </properties>


<dependencies> <dependencies>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-policies</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.archiva.redback.components.registry</groupId> <groupId>org.apache.archiva.redback.components.registry</groupId>
<artifactId>spring-registry-api</artifactId> <artifactId>spring-registry-api</artifactId>

+ 17
- 14
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/AbstractRepositoryPurge.java View File

import org.apache.archiva.repository.ContentNotFoundException; import org.apache.archiva.repository.ContentNotFoundException;
import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.events.RepositoryListener; import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
log.error( "Error during metadata retrieval {}: {}", metaBaseId, e.getMessage( ) ); log.error( "Error during metadata retrieval {}: {}", metaBaseId, e.getMessage( ) );
} }
} }
Path artifactFile = repository.toFile( reference );
StorageAsset artifactFile = repository.toFile( reference );


for ( RepositoryListener listener : listeners ) for ( RepositoryListener listener : listeners )
{ {
listener.deleteArtifact( metadataRepository, repository.getId( ), reference.getGroupId( ), listener.deleteArtifact( metadataRepository, repository.getId( ), reference.getGroupId( ),
reference.getArtifactId( ), reference.getVersion( ), reference.getArtifactId( ), reference.getVersion( ),
artifactFile.getFileName( ).toString( ) );
artifactFile.getName( ));
} }
try try
{ {
Files.delete( artifactFile );
log.debug( "File deleted: {}", artifactFile.toAbsolutePath( ) );
artifactFile.getStorage().removeAsset(artifactFile);
log.debug( "File deleted: {}", artifactFile );
} }
catch ( IOException e ) catch ( IOException e )
{ {
log.error( "Could not delete file {}: {}", artifactFile.toAbsolutePath( ), e.getMessage( ), e );
log.error( "Could not delete file {}: {}", artifactFile.toString(), e.getMessage( ), e );
continue; continue;
} }
try try
} }
} }


private void deleteSilently( Path path )
private void deleteSilently( StorageAsset path )
{ {
try try
{ {
Files.deleteIfExists( path );
path.getStorage().removeAsset(path);
triggerAuditEvent( repository.getRepository( ).getId( ), path.toString( ), AuditEvent.PURGE_FILE ); triggerAuditEvent( repository.getRepository( ).getId( ), path.toString( ), AuditEvent.PURGE_FILE );
} }
catch ( IOException e ) catch ( IOException e )
* *
* @param artifactFile the file to base off of. * @param artifactFile the file to base off of.
*/ */
private void purgeSupportFiles( Path artifactFile )
private void purgeSupportFiles( StorageAsset artifactFile )
{ {
Path parentDir = artifactFile.getParent( );
StorageAsset parentDir = artifactFile.getParent( );


if ( !Files.exists( parentDir ) )
if ( !parentDir.exists() )
{ {
return; return;
} }


final String artifactName = artifactFile.getFileName( ).toString( );
final String artifactName = artifactFile.getName( );


try try
{ {
Files.find( parentDir, 3,
( path, basicFileAttributes ) -> path.getFileName( ).toString( ).startsWith( artifactName )
&& Files.isRegularFile( path ) ).forEach( this::deleteSilently );

StorageUtil.recurse(parentDir, a -> {
if (!artifactFile.isContainer() && artifactFile.getName().startsWith(artifactName)) deleteSilently(a);
}, true, 3 );
} }
catch ( IOException e ) catch ( IOException e )
{ {

+ 4
- 3
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/DaysOldRepositoryPurge.java View File

import org.apache.archiva.repository.LayoutException; import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.events.RepositoryListener; import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.time.DateUtils; import org.apache.commons.lang.time.DateUtils;


import java.io.IOException; import java.io.IOException;
artifactFile.toAbsolutePath( ).toString() ); artifactFile.toAbsolutePath( ).toString() );
newArtifactReference.setVersion( version ); newArtifactReference.setVersion( version );


Path newArtifactFile = repository.toFile( newArtifactReference );
StorageAsset newArtifactFile = repository.toFile( newArtifactReference );


// Is this a generic snapshot "1.0-SNAPSHOT" ? // Is this a generic snapshot "1.0-SNAPSHOT" ?
if ( VersionUtil.isGenericSnapshot( newArtifactReference.getVersion( ) ) ) if ( VersionUtil.isGenericSnapshot( newArtifactReference.getVersion( ) ) )
{ {
if ( Files.getLastModifiedTime( newArtifactFile ).toMillis() < olderThanThisDate.getTimeInMillis( ) )
if ( newArtifactFile.getModificationTime().toEpochMilli() < olderThanThisDate.getTimeInMillis( ) )
{ {
artifactsToDelete.addAll( repository.getRelatedArtifacts( newArtifactReference ) ); artifactsToDelete.addAll( repository.getRelatedArtifacts( newArtifactReference ) );
} }
} }
purge( artifactsToDelete ); purge( artifactsToDelete );
} }
catch ( ContentNotFoundException | IOException e )
catch ( ContentNotFoundException e )
{ {
throw new RepositoryPurgeException( e.getMessage( ), e ); throw new RepositoryPurgeException( e.getMessage( ), e );
} }

+ 9
- 0
archiva-modules/archiva-base/archiva-policies/pom.xml View File

<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-common</artifactId> <artifactId>archiva-common</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId> <artifactId>archiva-checksum</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-fs</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-test-utils</artifactId> <artifactId>archiva-test-utils</artifactId>

+ 1
- 4
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/AbstractUpdatePolicy.java View File

*/ */


import org.apache.archiva.common.utils.VersionUtil; import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;


import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Calendar; import java.util.Calendar;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;



+ 1
- 1
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/CachedFailuresPolicy.java View File

*/ */


import org.apache.archiva.policies.urlcache.UrlFailureCache; import org.apache.archiva.policies.urlcache.UrlFailureCache;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

+ 1
- 1
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/ChecksumPolicy.java View File

import org.apache.archiva.checksum.ChecksumAlgorithm; import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksummedFile; import org.apache.archiva.checksum.ChecksummedFile;
import org.apache.archiva.checksum.UpdateStatus; import org.apache.archiva.checksum.UpdateStatus;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

+ 1
- 1
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadErrorPolicy.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;


import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;

+ 1
- 1
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadPolicy.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;


import java.util.Properties; import java.util.Properties;



+ 1
- 1
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsDownloadPolicy.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

+ 1
- 2
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsOnUpdateDownloadPolicy.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;


import java.nio.file.Files;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;

+ 13
- 5
archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/CachedFailuresPolicyTest.java View File

*/ */


import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.policies.urlcache.UrlFailureCache; import org.apache.archiva.policies.urlcache.UrlFailureCache;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;


import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Named; import javax.inject.Named;
import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.Properties; import java.util.Properties;
@Inject @Inject
private UrlFailureCache urlFailureCache; private UrlFailureCache urlFailureCache;


private FilesystemStorage filesystemStorage;

@Inject @Inject
@Named( value = "preDownloadPolicy#cache-failures" ) @Named( value = "preDownloadPolicy#cache-failures" )
DownloadPolicy downloadPolicy; DownloadPolicy downloadPolicy;
return downloadPolicy; return downloadPolicy;
} }


private Path getFile()
{
return Paths.get( "target/cache-failures/" + getName() + ".txt" );
private StorageAsset getFile() throws IOException {
if (filesystemStorage==null) {
filesystemStorage = new FilesystemStorage(Paths.get("target/cache-failures"), new DefaultFileLockManager());
}
return filesystemStorage.getAsset( getName() + ".txt" );
} }


private Properties createRequest() private Properties createRequest()
throws Exception throws Exception
{ {
DownloadPolicy policy = lookupPolicy(); DownloadPolicy policy = lookupPolicy();
Path localFile = getFile();
StorageAsset localFile = getFile();
Properties request = createRequest(); Properties request = createRequest();


request.setProperty( "url", "http://a.bad.hostname.maven.org/path/to/resource.txt" ); request.setProperty( "url", "http://a.bad.hostname.maven.org/path/to/resource.txt" );
{ {


DownloadPolicy policy = lookupPolicy(); DownloadPolicy policy = lookupPolicy();
Path localFile = getFile();
StorageAsset localFile = getFile();
Properties request = createRequest(); Properties request = createRequest();
// make unique name // make unique name
String url = "http://a.bad.hostname.maven.org/path/to/resource"+ System.currentTimeMillis() +".txt"; String url = "http://a.bad.hostname.maven.org/path/to/resource"+ System.currentTimeMillis() +".txt";

+ 31
- 21
archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ChecksumPolicyTest.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.junit.Rule; import org.junit.Rule;
import javax.inject.Named; import javax.inject.Named;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.FileReader; import java.io.FileReader;
import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;


private static final String BAD = "bad"; private static final String BAD = "bad";


private static FilesystemStorage filesystemStorage;

@Inject @Inject
@Named( value = "postDownloadPolicy#checksum" ) @Named( value = "postDownloadPolicy#checksum" )
PostDownloadPolicy downloadPolicy; PostDownloadPolicy downloadPolicy;
throws Exception throws Exception
{ {
PostDownloadPolicy policy = lookupPolicy(); PostDownloadPolicy policy = lookupPolicy();
Path localFile = createTestableFiles( null, null );
StorageAsset localFile = createTestableFiles( null, null );
Properties request = createRequest(); Properties request = createRequest();


policy.applyPolicy( ChecksumPolicy.IGNORE, request, localFile ); policy.applyPolicy( ChecksumPolicy.IGNORE, request, localFile );
throws Exception throws Exception
{ {
PostDownloadPolicy policy = lookupPolicy(); PostDownloadPolicy policy = lookupPolicy();
Path localFile = createTestableFiles( md5State, sha1State );
StorageAsset localFile = createTestableFiles( md5State, sha1State );
Properties request = createRequest(); Properties request = createRequest();


boolean actualResult; boolean actualResult;
actualResult = false; actualResult = false;
String msg = createMessage( ChecksumPolicy.FAIL, md5State, sha1State ); String msg = createMessage( ChecksumPolicy.FAIL, md5State, sha1State );


assertFalse( msg + " local file should not exist:", Files.exists(localFile) );
Path md5File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".sha1" );
Path sha1File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".md5" );
assertFalse( msg + " local file should not exist:", localFile.exists() );
Path md5File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".sha1" );
Path sha1File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".md5" );
assertFalse( msg + " local md5 file should not exist:", Files.exists(md5File) ); assertFalse( msg + " local md5 file should not exist:", Files.exists(md5File) );
assertFalse( msg + " local sha1 file should not exist:", Files.exists(sha1File) ); assertFalse( msg + " local sha1 file should not exist:", Files.exists(sha1File) );
} }
throws Exception throws Exception
{ {
PostDownloadPolicy policy = lookupPolicy(); PostDownloadPolicy policy = lookupPolicy();
Path localFile = createTestableFiles( md5State, sha1State );
StorageAsset localFile = createTestableFiles( md5State, sha1State );
Properties request = createRequest(); Properties request = createRequest();


boolean actualResult; boolean actualResult;
assertEquals( createMessage( ChecksumPolicy.FIX, md5State, sha1State ), expectedResult, actualResult ); assertEquals( createMessage( ChecksumPolicy.FIX, md5State, sha1State ), expectedResult, actualResult );


// End result should be legitimate SHA1 and MD5 files. // End result should be legitimate SHA1 and MD5 files.
Path md5File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".md5" );
Path sha1File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".sha1" );
Path md5File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".md5" );
Path sha1File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".sha1" );


assertTrue( "ChecksumPolicy.apply(FIX) md5 should exist.", Files.exists(md5File) && Files.isRegularFile(md5File) ); assertTrue( "ChecksumPolicy.apply(FIX) md5 should exist.", Files.exists(md5File) && Files.isRegularFile(md5File) );
assertTrue( "ChecksumPolicy.apply(FIX) sha1 should exist.", Files.exists(sha1File) && Files.isRegularFile(sha1File) ); assertTrue( "ChecksumPolicy.apply(FIX) sha1 should exist.", Files.exists(sha1File) && Files.isRegularFile(sha1File) );
return request; return request;
} }


private Path createTestableFiles( String md5State, String sha1State )
private StorageAsset createTestableFiles(String md5State, String sha1State )
throws Exception throws Exception
{ {
Path sourceDir = getTestFile( "src/test/resources/checksums/" );
Path destDir = getTestFile( "target/checksum-tests/" + name.getMethodName() + "/" );
FilesystemStorage fs = new FilesystemStorage(Paths.get("target/checksum-tests"), new DefaultFileLockManager());
StorageAsset sourceDir = getTestFile( "src/test/resources/checksums/" );
StorageAsset destDir = getTestFile( "target/checksum-tests/" + name.getMethodName() + "/" );


FileUtils.copyFileToDirectory( sourceDir.resolve("artifact.jar" ).toFile(), destDir.toFile() );
FileUtils.copyFileToDirectory( sourceDir.getFilePath().resolve("artifact.jar" ).toFile(), destDir.getFilePath().toFile() );


if ( md5State != null ) if ( md5State != null )
{ {
Path md5File = sourceDir.resolve("artifact.jar.md5-" + md5State );
Path md5File = sourceDir.getFilePath().resolve("artifact.jar.md5-" + md5State );
assertTrue( "Testable file exists: " + md5File.getFileName() + ":", Files.exists(md5File) && Files.isRegularFile(md5File) ); assertTrue( "Testable file exists: " + md5File.getFileName() + ":", Files.exists(md5File) && Files.isRegularFile(md5File) );
Path destFile = destDir.resolve("artifact.jar.md5" );
Path destFile = destDir.getFilePath().resolve("artifact.jar.md5" );
FileUtils.copyFile( md5File.toFile(), destFile.toFile() ); FileUtils.copyFile( md5File.toFile(), destFile.toFile() );
} }


if ( sha1State != null ) if ( sha1State != null )
{ {
Path sha1File = sourceDir.resolve("artifact.jar.sha1-" + sha1State );
Path sha1File = sourceDir.getFilePath().resolve("artifact.jar.sha1-" + sha1State );
assertTrue( "Testable file exists: " + sha1File.getFileName() + ":", Files.exists(sha1File) && Files.isRegularFile(sha1File) ); assertTrue( "Testable file exists: " + sha1File.getFileName() + ":", Files.exists(sha1File) && Files.isRegularFile(sha1File) );
Path destFile = destDir.resolve("artifact.jar.sha1" );
Path destFile = destDir.getFilePath().resolve("artifact.jar.sha1" );
FileUtils.copyFile( sha1File.toFile(), destFile.toFile() ); FileUtils.copyFile( sha1File.toFile(), destFile.toFile() );
} }


Path localFile = destDir.resolve("artifact.jar" );
return localFile;

StorageAsset localAsset = fs.getAsset("artifact.jar");
return localAsset;
} }


public static Path getTestFile( String path )
{
return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
public static StorageAsset getTestFile( String path ) throws IOException {
if (filesystemStorage==null) {
filesystemStorage = new FilesystemStorage(Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()), new DefaultFileLockManager());
}
return filesystemStorage.getAsset( path );
} }


} }

+ 8
- 7
archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ReleasePolicyTest.java View File

*/ */


import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
request.setProperty( "version", "2.0" ); request.setProperty( "version", "2.0" );
} }


Path targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
Path localFile = targetDir.resolve( path );
StorageAsset targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
StorageAsset localFile = targetDir.resolve( path );


Files.deleteIfExists( localFile );
Files.deleteIfExists( localFile.getFilePath() );


if ( createLocalFile ) if ( createLocalFile )
{ {
Files.createDirectories( localFile.getParent());
org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile, FILE_ENCODING, "random-junk" );
Files.setLastModifiedTime( localFile,
FileTime.fromMillis(Files.getLastModifiedTime(localFile).toMillis() - generatedLocalFileUpdateDelta));
Files.createDirectories( localFile.getParent().getFilePath());
org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile.getFilePath(), FILE_ENCODING, "random-junk" );
Files.setLastModifiedTime( localFile.getFilePath(),
FileTime.fromMillis(Files.getLastModifiedTime(localFile.getFilePath()).toMillis() - generatedLocalFileUpdateDelta));
} }


policy.applyPolicy( setting, request, localFile ); policy.applyPolicy( setting, request, localFile );

+ 11
- 7
archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/SnapshotsPolicyTest.java View File

*/ */


import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@Inject @Named(value="preDownloadPolicy#snapshots") @Inject @Named(value="preDownloadPolicy#snapshots")
PreDownloadPolicy policy; PreDownloadPolicy policy;


private FilesystemStorage filesystemStorage;

private PreDownloadPolicy lookupPolicy() private PreDownloadPolicy lookupPolicy()
throws Exception throws Exception
{ {
request.setProperty( "version", "2.0" ); request.setProperty( "version", "2.0" );
} }


Path targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
Path localFile = targetDir.resolve( path );
StorageAsset targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
StorageAsset localFile = targetDir.resolve( path );


Files.deleteIfExists( localFile );
Files.deleteIfExists( localFile.getFilePath() );


if ( createLocalFile ) if ( createLocalFile )
{ {
Files.createDirectories( localFile.getParent());
org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile, FILE_ENCODING, "random-junk" );
Files.setLastModifiedTime( localFile,
FileTime.fromMillis( Files.getLastModifiedTime( localFile ).toMillis() - generatedLocalFileUpdateDelta ));
Files.createDirectories( localFile.getParent().getFilePath() );
org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile.getFilePath(), FILE_ENCODING, "random-junk" );
Files.setLastModifiedTime( localFile.getFilePath(),
FileTime.fromMillis( Files.getLastModifiedTime( localFile.getFilePath() ).toMillis() - generatedLocalFileUpdateDelta ));
} }


policy.applyPolicy( setting, request, localFile ); policy.applyPolicy( setting, request, localFile );

+ 1
- 3
archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/ProxyFetchResult.java View File

*/ */




import org.apache.archiva.repository.content.StorageAsset;

import java.nio.file.Path;
import org.apache.archiva.repository.storage.StorageAsset;


/** /**
* A result from a proxy fetch operation. * A result from a proxy fetch operation.

+ 1
- 1
archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/RepositoryProxyHandler.java View File

import org.apache.archiva.policies.ProxyDownloadException; import org.apache.archiva.policies.ProxyDownloadException;
import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;


import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;

+ 3
- 7
archiva-modules/archiva-base/archiva-proxy/src/main/java/org/apache/archiva/proxy/DefaultRepositoryProxyHandler.java View File

import org.apache.archiva.checksum.ChecksumAlgorithm; import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksumUtil; import org.apache.archiva.checksum.ChecksumUtil;
import org.apache.archiva.proxy.model.ProxyConnectorRuleType; import org.apache.archiva.proxy.model.ProxyConnectorRuleType;
import org.apache.archiva.common.filelock.FileLockException;
import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;
import org.apache.archiva.configuration.*; import org.apache.archiva.configuration.*;
import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.Keys; import org.apache.archiva.model.Keys;
import org.apache.archiva.redback.components.registry.RegistryListener; import org.apache.archiva.redback.components.registry.RegistryListener;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException; import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.content.StorageUtil;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.metadata.RepositoryMetadataException; import org.apache.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.archiva.scheduler.ArchivaTaskScheduler; import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*; import java.util.*;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;

+ 2
- 2
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-api/src/main/java/org/apache/archiva/admin/model/group/RepositoryGroupAdmin.java View File

import org.apache.archiva.admin.model.AuditInformation; import org.apache.archiva.admin.model.AuditInformation;
import org.apache.archiva.admin.model.RepositoryAdminException; import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.admin.model.beans.RepositoryGroup; import org.apache.archiva.admin.model.beans.RepositoryGroup;
import org.apache.archiva.repository.storage.StorageAsset;


import java.nio.file.Path;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;


Map<String, List<String>> getRepositoryToGroupMap() Map<String, List<String>> getRepositoryToGroupMap()
throws RepositoryAdminException; throws RepositoryAdminException;


Path getMergedIndexDirectory(String repositoryGroupId );
StorageAsset getMergedIndexDirectory(String repositoryGroupId );
} }

+ 8
- 6
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/main/java/org/apache/archiva/admin/repository/group/DefaultRepositoryGroupAdmin.java View File

import org.apache.archiva.configuration.RepositoryGroupConfiguration; import org.apache.archiva.configuration.RepositoryGroupConfiguration;
import org.apache.archiva.metadata.model.facets.AuditEvent; import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler; import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler;
import org.apache.archiva.repository.EditableRepository;
import org.apache.archiva.repository.EditableRepositoryGroup; import org.apache.archiva.repository.EditableRepositoryGroup;
import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryRegistry; import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;




@Override @Override
public Path getMergedIndexDirectory( String repositoryGroupId )
public StorageAsset getMergedIndexDirectory(String repositoryGroupId )
{ {
return groupsDirectory.resolve( repositoryGroupId );
org.apache.archiva.repository.RepositoryGroup group = repositoryRegistry.getRepositoryGroup(repositoryGroupId);
if (group!=null) {
return group.getFeature(IndexCreationFeature.class).get().getLocalIndexPath();
} else {
return null;
}
} }


@Override @Override

+ 14
- 10
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/ArchivaIndexManagerMock.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration; import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException; import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.index.ArtifactContext; import org.apache.maven.index.ArtifactContext;
import org.apache.maven.index.ArtifactContextProducer; import org.apache.maven.index.ArtifactContextProducer;


private Path getIndexPath( ArchivaIndexingContext ctx ) private Path getIndexPath( ArchivaIndexingContext ctx )
{ {
return PathUtil.getPathFromUri( ctx.getPath( ) );
return ctx.getPath( ).getFilePath();
} }


@FunctionalInterface @FunctionalInterface
@Override @Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{ {
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> { executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try { try {
indexer.addArtifactsToIndex(artifacts, indexingContext); indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) { } catch (IOException e) {
@Override @Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{ {
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> { executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try { try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext); indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) { } catch (IOException e) {
log.warn("Index close failed"); log.warn("Index close failed");
} }
try { try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
StorageUtil.deleteRecursively(context.getPath());
} catch (IOException e) { } catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files"); throw new IndexUpdateFailedException("Could not delete index files");
} }
URI indexDir = icf.getIndexPath(); URI indexDir = icf.getIndexPath();
String indexPath = indexDir.getPath(); String indexPath = indexDir.getPath();
Path indexDirectory = null; Path indexDirectory = null;
FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getAsset("").getStorage();
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) ) if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
{ {


if ( indexDirectory.isAbsolute( ) ) if ( indexDirectory.isAbsolute( ) )
{ {
indexPath = indexDirectory.getFileName().toString(); indexPath = indexDirectory.getFileName().toString();
filesystemStorage = new FilesystemStorage(indexDirectory.getParent(), new DefaultFileLockManager());
} }
else else
{ {
{ {
Files.createDirectories( indexDirectory ); Files.createDirectories( indexDirectory );
} }
return new FilesystemAsset( indexPath, indexDirectory);
return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory);
} }


private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException

+ 13
- 2
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MavenIndexContextMock.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.index.context.IndexingContext; import org.apache.maven.index.context.IndexingContext;


import java.io.IOException; import java.io.IOException;


private IndexingContext delegate; private IndexingContext delegate;
private Repository repository; private Repository repository;
private FilesystemStorage filesystemStorage;


MavenIndexContextMock(Repository repository, IndexingContext delegate) { MavenIndexContextMock(Repository repository, IndexingContext delegate) {
this.delegate = delegate; this.delegate = delegate;
this.repository = repository; this.repository = repository;
try {
this.filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath(), new DefaultFileLockManager());
} catch (IOException e) {
e.printStackTrace();
}


} }


} }


@Override @Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
public StorageAsset getPath() {
return
new FilesystemAsset(filesystemStorage, "", delegate.getIndexDirectoryFile().toPath());
} }


@Override @Override

+ 2
- 3
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MockMergedRemoteIndexesScheduler.java View File



import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler; import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler;
import org.apache.archiva.repository.RepositoryGroup; import org.apache.archiva.repository.RepositoryGroup;
import org.apache.archiva.repository.storage.StorageAsset;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;


import java.nio.file.Path;

/** /**
* @author Olivier Lamy * @author Olivier Lamy
*/ */
{ {


@Override @Override
public void schedule( RepositoryGroup repositoryGroup, Path directory )
public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory )
{ {
// no op // no op
} }

+ 4
- 0
archiva-modules/archiva-base/archiva-repository-api/pom.xml View File

<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-common</artifactId> <artifactId>archiva-common</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>


<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>commons-lang</groupId>

+ 0
- 2
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexManager.java View File



import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.content.StorageAsset;


import java.net.URI; import java.net.URI;
import java.nio.file.Path;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;



+ 2
- 1
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java View File

*/ */


import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.StorageAsset;


import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
* The path where the index is stored. * The path where the index is stored.
* @return * @return
*/ */
URI getPath();
StorageAsset getPath();


/** /**
* Returns true, if the index has no entries or is not initialized. * Returns true, if the index has no entries or is not initialized.

+ 6
- 4
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/IndexMergerRequest.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.repository.storage.StorageAsset;

import java.nio.file.Path; import java.nio.file.Path;
import java.util.Collection; import java.util.Collection;




private int mergedIndexTtl; private int mergedIndexTtl;


private Path mergedIndexDirectory;
private StorageAsset mergedIndexDirectory;


private boolean temporary; private boolean temporary;


this.mergedIndexTtl = mergedIndexTtl; this.mergedIndexTtl = mergedIndexTtl;
} }


public Path getMergedIndexDirectory()
public StorageAsset getMergedIndexDirectory()
{ {
return mergedIndexDirectory; return mergedIndexDirectory;
} }


public void setMergedIndexDirectory( Path mergedIndexDirectory )
public void setMergedIndexDirectory( StorageAsset mergedIndexDirectory )
{ {
this.mergedIndexDirectory = mergedIndexDirectory; this.mergedIndexDirectory = mergedIndexDirectory;
} }


public IndexMergerRequest mergedIndexDirectory( Path mergedIndexDirectory )
public IndexMergerRequest mergedIndexDirectory( StorageAsset mergedIndexDirectory )
{ {
this.mergedIndexDirectory = mergedIndexDirectory; this.mergedIndexDirectory = mergedIndexDirectory;
return this; return this;

+ 3
- 3
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/MergedRemoteIndexesScheduler.java View File





import org.apache.archiva.repository.RepositoryGroup; import org.apache.archiva.repository.RepositoryGroup;

import java.nio.file.Path;
import org.apache.archiva.repository.storage.StorageAsset;


/** /**
* @author Olivier Lamy * @author Olivier Lamy
* will check if this repository group need to a schedule a cron to download/merge * will check if this repository group need to a schedule a cron to download/merge
* remote indexes * remote indexes
* @param repositoryGroup * @param repositoryGroup
* @param directory
*/ */
void schedule(RepositoryGroup repositoryGroup, Path directory );
void schedule(RepositoryGroup repositoryGroup, StorageAsset directory );


void unschedule( RepositoryGroup repositoryGroup ); void unschedule( RepositoryGroup repositoryGroup );



+ 6
- 4
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/TemporaryGroupIndex.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.repository.storage.StorageAsset;

import java.io.Serializable; import java.io.Serializable;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Date; import java.util.Date;
{ {
private long creationTime = new Date().getTime(); private long creationTime = new Date().getTime();


private Path directory;
private StorageAsset directory;


private String indexId; private String indexId;




private int mergedIndexTtl; private int mergedIndexTtl;


public TemporaryGroupIndex(Path directory, String indexId, String groupId, int mergedIndexTtl)
public TemporaryGroupIndex(StorageAsset directory, String indexId, String groupId, int mergedIndexTtl)
{ {
this.directory = directory; this.directory = directory;
this.indexId = indexId; this.indexId = indexId;
return this; return this;
} }


public Path getDirectory()
public StorageAsset getDirectory()
{ {
return directory; return directory;
} }


public TemporaryGroupIndex setDirectory( Path directory )
public TemporaryGroupIndex setDirectory( StorageAsset directory )
{ {
this.directory = directory; this.directory = directory;
return this; return this;

+ 0
- 2
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepository.java View File

*/ */




import org.apache.archiva.repository.content.RepositoryStorage;

import java.util.Set; import java.util.Set;


/** /**

+ 1
- 1
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java View File

import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.ProjectReference; import org.apache.archiva.model.ProjectReference;
import org.apache.archiva.model.VersionedReference; import org.apache.archiva.model.VersionedReference;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;


import java.util.Set; import java.util.Set;



+ 1
- 1
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/Repository.java View File

*/ */


import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.features.RepositoryFeature; import org.apache.archiva.repository.features.RepositoryFeature;


import java.net.URI; import java.net.URI;

+ 1
- 2
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RepositoryGroup.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.RepositoryStorage;


import java.util.List; import java.util.List;



+ 1
- 2
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/features/IndexCreationFeature.java View File



import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryEventListener; import org.apache.archiva.repository.RepositoryEventListener;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;


import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.nio.file.Path;


import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH; import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_PACKED_INDEX_PATH; import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_PACKED_INDEX_PATH;

+ 4
- 0
archiva-modules/archiva-base/archiva-repository-layer/pom.xml View File

<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId> <artifactId>archiva-model</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-fs</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId> <artifactId>archiva-checksum</artifactId>

+ 6
- 4
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultIndexMerger.java View File

import org.apache.archiva.indexer.merger.TemporaryGroupIndex; import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryRegistry; import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.time.StopWatch; import org.apache.commons.lang.time.StopWatch;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
stopWatch.reset(); stopWatch.reset();
stopWatch.start(); stopWatch.start();


Path mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
StorageAsset mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
Repository destinationRepository = repositoryRegistry.getRepository(indexMergerRequest.getGroupId()); Repository destinationRepository = repositoryRegistry.getRepository(indexMergerRequest.getGroupId());


ArchivaIndexManager idxManager = repositoryRegistry.getIndexManager(destinationRepository.getType()); ArchivaIndexManager idxManager = repositoryRegistry.getIndexManager(destinationRepository.getType());
ctx.close(true); ctx.close(true);
temporaryGroupIndexes.remove( temporaryGroupIndex ); temporaryGroupIndexes.remove( temporaryGroupIndex );
temporaryContextes.remove( ctx ); temporaryContextes.remove( ctx );
Path directory = temporaryGroupIndex.getDirectory();
if ( directory != null && Files.exists(directory) )
StorageAsset directory = temporaryGroupIndex.getDirectory();
if ( directory != null && directory.exists() )
{ {
FileUtils.deleteDirectory( directory );
StorageUtil.deleteRecursively( directory );
} }
} }
} }

+ 2
- 1
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultMergedRemoteIndexesScheduler.java View File

import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.RepositoryGroup; import org.apache.archiva.repository.RepositoryGroup;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
private Map<String, ScheduledFuture> scheduledFutureMap = new ConcurrentHashMap<>(); private Map<String, ScheduledFuture> scheduledFutureMap = new ConcurrentHashMap<>();


@Override @Override
public void schedule(RepositoryGroup repositoryGroup, Path directory )
public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory )
{ {
if ( StringUtils.isEmpty( repositoryGroup.getSchedulingDefinition() ) ) if ( StringUtils.isEmpty( repositoryGroup.getSchedulingDefinition() ) )
{ {

+ 1
- 6
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractManagedRepository.java View File

*/ */




import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.RepositoryStorage;


import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Locale; import java.util.Locale;
import java.util.Set; import java.util.Set;
import java.util.function.Consumer;


/** /**
* Simple implementation of a managed repository. * Simple implementation of a managed repository.

+ 1
- 1
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRemoteRepository.java View File

*/ */




import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;


import java.nio.file.Path; import java.nio.file.Path;
import java.time.Duration; import java.time.Duration;

+ 2
- 3
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepository.java View File

import com.cronutils.model.definition.CronDefinition; import com.cronutils.model.definition.CronDefinition;
import com.cronutils.model.definition.CronDefinitionBuilder; import com.cronutils.model.definition.CronDefinitionBuilder;
import com.cronutils.parser.CronParser; import com.cronutils.parser.CronParser;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.RepositoryFeature; import org.apache.archiva.repository.features.RepositoryFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature; import org.apache.archiva.repository.features.StagingRepositoryFeature;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;

+ 1
- 6
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepositoryGroup.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.commons.collections4.map.ListOrderedMap; import org.apache.commons.collections4.map.ListOrderedMap;


import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Consumer;


/** /**
* Abstract repository group implementation. * Abstract repository group implementation.

+ 2
- 5
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicManagedRepository.java View File



import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.features.ArtifactCleanupFeature; import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature; import org.apache.archiva.repository.features.StagingRepositoryFeature;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;


import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Locale; import java.util.Locale;
import java.util.function.Consumer;


/** /**
* *

+ 2
- 2
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicRemoteRepository.java View File



import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.slf4j.Logger; import org.slf4j.Logger;

+ 0
- 4
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/RepositoryRegistry.java View File

import org.apache.archiva.indexer.IndexManagerFactory; import org.apache.archiva.indexer.IndexManagerFactory;
import org.apache.archiva.indexer.IndexUpdateFailedException; import org.apache.archiva.indexer.IndexUpdateFailedException;
import org.apache.archiva.redback.components.registry.RegistryException; import org.apache.archiva.redback.components.registry.RegistryException;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationEvent; import org.apache.archiva.repository.features.IndexCreationEvent;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.StagingRepositoryFeature; import org.apache.archiva.repository.features.StagingRepositoryFeature;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;

+ 16
- 0
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/ArtifactUtil.java View File

import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryContentFactory; import org.apache.archiva.repository.RepositoryContentFactory;
import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.storage.StorageAsset;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;


import javax.inject.Inject; import javax.inject.Inject;
return Paths.get(repository.getLocation()).resolve(artifactPath); return Paths.get(repository.getLocation()).resolve(artifactPath);
} }


/**
* Returns the physical location of a given artifact in the repository. There is no check for the
* existence of the returned file.
*
* @param repository The repository, where the artifact is stored.
* @param artifactReference The artifact reference.
* @return The asset representation of the artifact.
* @throws RepositoryException
*/
public StorageAsset getArtifactAsset(ManagedRepository repository, ArtifactReference artifactReference) throws RepositoryException {
final ManagedRepositoryContent content = repositoryContentFactory.getManagedRepositoryContent(repository);
final String artifactPath = content.toPath( artifactReference );
return repository.getAsset(artifactPath);
}

} }

+ 0
- 192
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/StorageUtil.java View File

package org.apache.archiva.repository.content;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.archiva.common.filelock.FileLockException;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.CopyOption;
import java.nio.file.Files;
import java.nio.file.Path;

/**
* @author Martin Stockhammer <martin_s@apache.org>
*/
public class StorageUtil
{
private static final int DEFAULT_BUFFER_SIZE = 4096;

/**
* Copies the source asset to the target. The assets may be from different RepositoryStorage instances.
*
* @param source The source asset
* @param target The target asset
* @param locked If true, a readlock is set on the source and a write lock is set on the target.
* @param copyOptions Copy options
* @throws IOException
*/
public static final void copyAsset( final StorageAsset source,
final StorageAsset target,
boolean locked,
final CopyOption... copyOptions ) throws IOException
{
if (source.isFileBased() && target.isFileBased()) {
// Short cut for FS operations
final Path sourcePath = source.getFilePath();
final Path targetPath = target.getFilePath( );
if (locked) {
final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager();
final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager();
try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) )
{
Files.copy( sourcePath, targetPath, copyOptions );
}
catch ( FileLockException e )
{
throw new IOException( e );
}
catch ( FileLockTimeoutException e )
{
throw new IOException( e );
}
} else
{
Files.copy( sourcePath, targetPath, copyOptions );
}
} else {
try {
final RepositoryStorage sourceStorage = source.getStorage();
final RepositoryStorage targetStorage = target.getStorage();
sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
} catch (IOException e) {
throw e;
} catch (Throwable e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
} else
{
throw new IOException( e );
}
}
}
}

/**
*
* @param source
* @param target
* @param locked
* @param copyOptions
* @throws IOException
*/
public static void moveAsset(StorageAsset source, StorageAsset target, boolean locked, CopyOption... copyOptions) throws IOException
{
if (source.isFileBased() && target.isFileBased()) {
// Short cut for FS operations
// Move is atomic operation
Files.move( source.getFilePath(), target.getFilePath(), copyOptions );
} else {
try {
final RepositoryStorage sourceStorage = source.getStorage();
final RepositoryStorage targetStorage = target.getStorage();
sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
sourceStorage.removeAsset( source );
} catch (IOException e) {
throw e;
} catch (Throwable e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
} else
{
throw new IOException( e );
}
}
}

}

private static void wrapWriteFunction(ReadableByteChannel is, RepositoryStorage targetStorage, StorageAsset target, boolean locked) {
try {
targetStorage.writeDataToChannel( target, os -> copy(is, os), locked );
} catch (Exception e) {
throw new RuntimeException( e );
}
}


private static void copy( final ReadableByteChannel is, final WritableByteChannel os ) {
if (is instanceof FileChannel) {
copy( (FileChannel) is, os );
} else if (os instanceof FileChannel) {
copy(is, (FileChannel)os);
} else
{
try
{
ByteBuffer buffer = ByteBuffer.allocate( DEFAULT_BUFFER_SIZE );
while ( is.read( buffer ) != -1 )
{
buffer.flip( );
while ( buffer.hasRemaining( ) )
{
os.write( buffer );
}
buffer.clear( );
}
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
}

private static void copy( final FileChannel is, final WritableByteChannel os ) {
try
{
is.transferTo( 0, is.size( ), os );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}

private static void copy( final ReadableByteChannel is, final FileChannel os ) {
try
{
os.transferFrom( is, 0, Long.MAX_VALUE );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}

}

+ 40
- 46
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/MetadataTools.java View File

import org.apache.archiva.repository.LayoutException; import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RemoteRepositoryContent; import org.apache.archiva.repository.RemoteRepositoryContent;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException; import org.apache.archiva.xml.XMLException;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
ProjectReference reference, String proxyId ) ProjectReference reference, String proxyId )
{ {
String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) ); String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );


if ( !Files.exists(metadataFile) || !Files.isRegularFile( metadataFile ))
if ( !metadataFile.exists() || metadataFile.isContainer())
{ {
// Nothing to do. return null. // Nothing to do. return null.
return null; return null;
{ {
return MavenMetadataReader.read( metadataFile ); return MavenMetadataReader.read( metadataFile );
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
// TODO: [monitor] consider a monitor for this event. // TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code? // TODO: consider a read-redo on monitor return code?
log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
return null; return null;
} }
} }
String logicalResource, String proxyId ) String logicalResource, String proxyId )
{ {
String metadataPath = getRepositorySpecificName( proxyId, logicalResource ); String metadataPath = getRepositorySpecificName( proxyId, logicalResource );
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );


if ( !Files.exists(metadataFile) || !Files.isRegularFile( metadataFile))
if ( !metadataFile.exists() || metadataFile.isContainer())
{ {
// Nothing to do. return null. // Nothing to do. return null.
return null; return null;
{ {
return MavenMetadataReader.read( metadataFile ); return MavenMetadataReader.read( metadataFile );
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
// TODO: [monitor] consider a monitor for this event. // TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code? // TODO: consider a read-redo on monitor return code?
log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
return null; return null;
} }
} }
VersionedReference reference, String proxyId ) VersionedReference reference, String proxyId )
{ {
String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) ); String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );


if ( !Files.exists(metadataFile) || !Files.isRegularFile(metadataFile))
if ( !metadataFile.exists() || metadataFile.isContainer())
{ {
// Nothing to do. return null. // Nothing to do. return null.
return null; return null;
{ {
return MavenMetadataReader.read( metadataFile ); return MavenMetadataReader.read( metadataFile );
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
// TODO: [monitor] consider a monitor for this event. // TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code? // TODO: consider a read-redo on monitor return code?
log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
return null; return null;
} }
} }
public void updateMetadata( ManagedRepositoryContent managedRepository, String logicalResource ) public void updateMetadata( ManagedRepositoryContent managedRepository, String logicalResource )
throws RepositoryMetadataException throws RepositoryMetadataException
{ {
final Path metadataFile = Paths.get( managedRepository.getRepoRoot(), logicalResource );
final StorageAsset metadataFile = managedRepository.getRepository().getAsset( logicalResource );
ArchivaRepositoryMetadata metadata = null; ArchivaRepositoryMetadata metadata = null;


//Gather and merge all metadata available //Gather and merge all metadata available


RepositoryMetadataWriter.write( metadata, metadataFile ); RepositoryMetadataWriter.write( metadata, metadataFile );


ChecksummedFile checksum = new ChecksummedFile( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
checksum.fixChecksums( algorithms ); checksum.fixChecksums( algorithms );
} }


* @param metadataParentDirectory * @param metadataParentDirectory
* @return origional set plus newly found versions * @return origional set plus newly found versions
*/ */
private Set<String> findPossibleVersions( Set<String> versions, Path metadataParentDirectory )
private Set<String> findPossibleVersions( Set<String> versions, StorageAsset metadataParentDirectory )
{ {


Set<String> result = new HashSet<String>( versions ); Set<String> result = new HashSet<String>( versions );


try (Stream<Path> stream = Files.list( metadataParentDirectory )) {
stream.filter( Files::isDirectory ).filter(
p ->
{
try(Stream<Path> substream = Files.list(p))
{
return substream.anyMatch( f -> Files.isRegularFile( f ) && f.toString().endsWith( ".pom" ));
}
catch ( IOException e )
{
return false;
}
metadataParentDirectory.list().stream().filter(asset ->
asset.isContainer()).filter(asset -> {
return asset.list().stream().anyMatch(f -> !f.isContainer() && f.getName().endsWith(".pom"));
} }
).forEach(
p -> result.add(p.getFileName().toString())
);
} catch (IOException e) {
//
}
).forEach( p -> result.add(p.getName()));

return result; return result;
} }


ManagedRepositoryContent managedRepository, String logicalResource ) ManagedRepositoryContent managedRepository, String logicalResource )
{ {
List<ArchivaRepositoryMetadata> metadatas = new ArrayList<>(); List<ArchivaRepositoryMetadata> metadatas = new ArrayList<>();
Path file = Paths.get( managedRepository.getRepoRoot(), logicalResource );
if ( Files.exists(file) )
StorageAsset file = managedRepository.getRepository().getAsset( logicalResource );

if ( file.exists() )
{ {
try try
{ {
metadatas.add( existingMetadata ); metadatas.add( existingMetadata );
} }
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
log.debug( "Could not read metadata at {}. Metadata will be removed.", file.toAbsolutePath() );
FileUtils.deleteQuietly( file );
log.debug( "Could not read metadata at {}. Metadata will be removed.", file.getPath() );
try {
file.getStorage().removeAsset(file);
} catch (IOException ex) {
log.error("Could not remove asset {}", file.getPath());
}
} }
} }


public void updateMetadata( ManagedRepositoryContent managedRepository, ProjectReference reference ) public void updateMetadata( ManagedRepositoryContent managedRepository, ProjectReference reference )
throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
{ {
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), toPath( reference ) );

StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );


long lastUpdated = getExistingLastUpdated( metadataFile ); long lastUpdated = getExistingLastUpdated( metadataFile );


// TODO: do we know this information instead? // TODO: do we know this information instead?
// Set<Plugin> allPlugins = managedRepository.getPlugins( reference ); // Set<Plugin> allPlugins = managedRepository.getPlugins( reference );
Set<Plugin> allPlugins; Set<Plugin> allPlugins;
if ( Files.exists(metadataFile))
if ( metadataFile.exists())
{ {
try try
{ {


// Save the metadata model to disk. // Save the metadata model to disk.
RepositoryMetadataWriter.write( metadata, metadataFile ); RepositoryMetadataWriter.write( metadata, metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
checksum.fixChecksums( algorithms ); checksum.fixChecksums( algorithms );
} }


} }
} }


private long getExistingLastUpdated( Path metadataFile )
private long getExistingLastUpdated( StorageAsset metadataFile )
{ {
if ( !Files.exists(metadataFile) )
if ( !metadataFile.exists() )
{ {
// Doesn't exist. // Doesn't exist.
return 0; return 0;


return getLastUpdated( metadata ); return getLastUpdated( metadata );
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
// Error. // Error.
return 0; return 0;
public void updateMetadata( ManagedRepositoryContent managedRepository, VersionedReference reference ) public void updateMetadata( ManagedRepositoryContent managedRepository, VersionedReference reference )
throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
{ {
Path metadataFile = Paths.get( managedRepository.getRepoRoot(), toPath( reference ) );
StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );


long lastUpdated = getExistingLastUpdated( metadataFile ); long lastUpdated = getExistingLastUpdated( metadataFile );




// Save the metadata model to disk. // Save the metadata model to disk.
RepositoryMetadataWriter.write( metadata, metadataFile ); RepositoryMetadataWriter.write( metadata, metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
checksum.fixChecksums( algorithms ); checksum.fixChecksums( algorithms );
} }



+ 14
- 4
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/RepositoryMetadataWriter.java View File

import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.model.ArchivaRepositoryMetadata; import org.apache.archiva.model.ArchivaRepositoryMetadata;
import org.apache.archiva.model.Plugin; import org.apache.archiva.model.Plugin;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException; import org.apache.archiva.xml.XMLException;
import org.apache.archiva.xml.XMLWriter; import org.apache.archiva.xml.XMLWriter;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.DocumentHelper; import org.dom4j.DocumentHelper;
import org.dom4j.Element; import org.dom4j.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer; import java.io.Writer;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Collections; import java.util.Collections;
*/ */
public class RepositoryMetadataWriter public class RepositoryMetadataWriter
{ {
public static void write( ArchivaRepositoryMetadata metadata, Path outputFile )
private static final Logger log = LoggerFactory.getLogger(RepositoryMetadataWriter.class);

public static void write( ArchivaRepositoryMetadata metadata, StorageAsset outputFile )
throws RepositoryMetadataException throws RepositoryMetadataException
{ {
boolean thrown = false; boolean thrown = false;
try (FileWriter writer = new FileWriter( outputFile.toFile() ))
try (OutputStreamWriter writer = new OutputStreamWriter( outputFile.getWriteStream(true)))
{ {
write( metadata, writer ); write( metadata, writer );
writer.flush(); writer.flush();
{ {
thrown = true; thrown = true;
throw new RepositoryMetadataException( throw new RepositoryMetadataException(
"Unable to write metadata file: " + outputFile.toAbsolutePath() + " - " + e.getMessage(), e );
"Unable to write metadata file: " + outputFile.getPath() + " - " + e.getMessage(), e );
} }
finally finally
{ {
if ( thrown ) if ( thrown )
{ {
FileUtils.deleteQuietly( outputFile );
try {
outputFile.getStorage().removeAsset(outputFile);
} catch (IOException e) {
log.error("Could not remove asset {}", outputFile);
}
} }
} }
} }

+ 0
- 202
archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemAssetTest.java View File

package org.apache.archiva.repository.content;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;

import static org.junit.Assert.*;

public class FilesystemAssetTest {

Path assetPathFile;
Path assetPathDir;

@Before
public void init() throws IOException {
assetPathFile = Files.createTempFile("assetFile", "dat");
assetPathDir = Files.createTempDirectory("assetDir");
}

@After
public void cleanup() {

try {
Files.deleteIfExists(assetPathFile);
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(assetPathDir);
} catch (IOException e) {
e.printStackTrace();
}
}


@Test
public void getPath() {
FilesystemAsset asset = new FilesystemAsset("/"+assetPathFile.getFileName().toString(), assetPathFile);
assertEquals("/"+assetPathFile.getFileName().toString(), asset.getPath());
}

@Test
public void getName() {
FilesystemAsset asset = new FilesystemAsset("/"+assetPathFile.getFileName().toString(), assetPathFile);
assertEquals(assetPathFile.getFileName().toString(), asset.getName());

}

@Test
public void getModificationTime() throws IOException {
Instant modTime = Files.getLastModifiedTime(assetPathFile).toInstant();
FilesystemAsset asset = new FilesystemAsset("/test123", assetPathFile);
assertTrue(modTime.equals(asset.getModificationTime()));
}

@Test
public void isContainer() {
FilesystemAsset asset = new FilesystemAsset("/test1323", assetPathFile);
assertFalse(asset.isContainer());
FilesystemAsset asset2 = new FilesystemAsset("/test1234", assetPathDir);
assertTrue(asset2.isContainer());
}

@Test
public void list() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
assertEquals(0, asset.list().size());

FilesystemAsset asset2 = new FilesystemAsset("/test1235", assetPathDir);
assertEquals(0, asset2.list().size());
Path f1 = Files.createTempFile(assetPathDir, "testfile", "dat");
Path f2 = Files.createTempFile(assetPathDir, "testfile", "dat");
Path d1 = Files.createTempDirectory(assetPathDir, "testdir");
assertEquals(3, asset2.list().size());
assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f1.getFileName().toString())));
assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f2.getFileName().toString())));
assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(d1.getFileName().toString())));
Files.deleteIfExists(f1);
Files.deleteIfExists(f2);
Files.deleteIfExists(d1);


}

@Test
public void getSize() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
assertEquals(0, asset.getSize());

Files.write(assetPathFile, new String("abcdef").getBytes("ASCII"));
assertTrue(asset.getSize()>=6);


}

@Test
public void getData() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(InputStream is = asset.getReadStream()) {
assertEquals("abcdef", IOUtils.toString(is, "ASCII"));
}

}

@Test
public void getDataExceptionOnDir() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathDir);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try {
InputStream is = asset.getReadStream();
assertFalse("Exception expected for data on dir", true);
} catch (IOException e) {
// fine
}

}

@Test
public void writeData() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(OutputStream os = asset.getWriteStream(true)) {
IOUtils.write("test12345", os, "ASCII");
}
assertEquals("test12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}

@Test
public void writeDataAppend() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(OutputStream os = asset.getWriteStream(false)) {
IOUtils.write("test12345", os, "ASCII");
}
assertEquals("abcdeftest12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}

@Test
public void writeDataExceptionOnDir() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathDir);
try {

OutputStream os = asset.getWriteStream(true);
assertTrue("Writing to a directory should throw a IOException", false);
} catch (IOException e) {
// Fine
}
}

@Test
public void storeDataFile() throws IOException {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
Path dataFile = Files.createTempFile("testdata", "dat");
try(OutputStream os = Files.newOutputStream(dataFile)) {
IOUtils.write("testkdkdkd", os, "ASCII");
}
asset.replaceDataFromFile(dataFile);
assertEquals("testkdkdkd", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}

@Test
public void exists() {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
assertTrue(asset.exists());
FilesystemAsset asset2 = new FilesystemAsset("/test1234", Paths.get("abcdefgkdkdk"));
assertFalse(asset2.exists());

}

@Test
public void getFilePath() {
FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
assertEquals(assetPathFile, asset.getFilePath());
}
}

+ 1
- 1
archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java View File

import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;


import java.util.Set; import java.util.Set;

+ 1
- 1
archiva-modules/archiva-base/archiva-repository-scanner/src/main/java/org/apache/archiva/repository/scanner/DefaultRepositoryScanner.java View File

import org.apache.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.consumers.RepositoryContentConsumer; import org.apache.archiva.consumers.RepositoryContentConsumer;
import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

+ 1
- 1
archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/RepositoryScannerTest.java View File

import org.apache.archiva.repository.EditableManagedRepository; import org.apache.archiva.repository.EditableManagedRepository;
import org.apache.archiva.repository.EditableRemoteRepository; import org.apache.archiva.repository.EditableRemoteRepository;
import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.scanner.mock.ManagedRepositoryContentMock; import org.apache.archiva.repository.scanner.mock.ManagedRepositoryContentMock;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;

+ 18
- 3
archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/mock/ManagedRepositoryContentMock.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.VersionUtil; import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.metadata.model.ArtifactMetadata; import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet; import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.model.ProjectReference; import org.apache.archiva.model.ProjectReference;
import org.apache.archiva.model.VersionedReference; import org.apache.archiva.model.VersionedReference;
import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;


import java.io.IOException;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;




private ManagedRepository repository; private ManagedRepository repository;
private FilesystemStorage fsStorage;


public ManagedRepositoryContentMock(ManagedRepository repo) { public ManagedRepositoryContentMock(ManagedRepository repo) {
this.repository = repo; this.repository = repo;
@Override @Override
public String getRepoRoot( ) public String getRepoRoot( )
{ {
return Paths.get("", "target", "test-repository", "managed").toString();
return getRepoRootAsset().getFilePath().toString();
}

private StorageAsset getRepoRootAsset() {
if (fsStorage==null) {
try {
fsStorage = new FilesystemStorage(Paths.get("", "target", "test-repository", "managed"), new DefaultFileLockManager());
} catch (IOException e) {
e.printStackTrace();
}
}
return fsStorage.getAsset("");
} }


@Override @Override
@Override @Override
public StorageAsset toFile( ArtifactReference reference ) public StorageAsset toFile( ArtifactReference reference )
{ {
return Paths.get(getRepoRoot(), refs.get(reference));
return getRepoRootAsset().resolve(refs.get(reference));
} }


@Override @Override

+ 35
- 0
archiva-modules/archiva-base/archiva-storage-api/pom.xml View File

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>archiva-base</artifactId>
<groupId>org.apache.archiva</groupId>
<version>3.0.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>

<artifactId>archiva-storage-api</artifactId>

<name>Archiva Base :: Repository API</name>

<properties>
<site.staging.base>${project.parent.parent.basedir}</site.staging.base>
</properties>


<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<basedir>${basedir}</basedir>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>

</project>

archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/RepositoryStorage.java → archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/RepositoryStorage.java View File

package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;


/* /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
import java.util.function.Consumer; import java.util.function.Consumer;


/** /**
* Repository storage gives access to the files and directories on the storage.
* The storage may be on a filesystem but can be any other storage system.
* *
* This API is low level repository access. If you use this API you must
* either have knowledge about the specific repository layout or use the structure
* This is the low level API to access artifacts in a repository. Each artifact is represented
* by one storage asset. Each asset can be accessed by a path that is independent on the underlying storage
* implementation. Paths always use '/' as path separator. The path is local to the repository and
* is unique for each asset.
* The storage API knows nothing about the repository layout or repository specific metadata.
* If you use this API you must either have knowledge about the specific repository layout or use the structure
* as it is, e.g. for browsing. * as it is, e.g. for browsing.
* *
* It is the decision of the implementation, if this API provides access to all elements, or
* just a selected view.
* The base implementation for the storage uses a directory structure on the local filesystem.
*
*
* It is the decision of the repository type specific implementation, if this API provides access to all elements, that
* is really stored or just a selected view.
* *
* Checking access is not part of this API. * Checking access is not part of this API.
*/ */
void removeAsset(StorageAsset asset) throws IOException; void removeAsset(StorageAsset asset) throws IOException;


/** /**
* Moves the asset to the given location and returns the asset object for the destination.
* Moves the asset to the given location and returns the asset object for the destination. Moves only assets that
* belong to the same storage instance. It will throw a IOException if the assets are from differents storage
* instances.
* *
* @param origin The original asset * @param origin The original asset
* @param destination The destination path pointing to the new asset. * @param destination The destination path pointing to the new asset.
StorageAsset moveAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException; StorageAsset moveAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException;


/** /**
* Moves the asset to the new path.
*
* Moves the asset to the given location and returns the asset object for the destination. Moves only assets that
* belong to the same storage instance. It will throw a IOException if the assets are from differents storage
* instances.
* *
* @param origin The original asset * @param origin The original asset
* @param destination The destination asset.
* @param destination The destination path.
* @param copyOptions The copy options (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING} * @param copyOptions The copy options (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING}
* @throws IOException If it was not possible to copy the asset. * @throws IOException If it was not possible to copy the asset.
*/ */
void moveAsset(StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException; void moveAsset(StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException;


/** /**
* Copies the given asset to the new destination.
* Copies the given asset to the new destination. Copies only assets that belong to the same storage instance.
* It will throw a IOException if the assets are from differents storage instances.
* *
* @param origin The original asset * @param origin The original asset
* @param destination The path to the new asset * @param destination The path to the new asset
StorageAsset copyAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException; StorageAsset copyAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException;


/** /**
* Copies the given asset to the new destination.
* Copies the given asset to the new destination. Copies only assets that belong to the same storage instance.
* It will throw a IOException if the assets are from differents storage instances.
* *
* @param origin The original asset * @param origin The original asset
* @param destination The path to the new asset * @param destination The path to the new asset

archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java → archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/StorageAsset.java View File

package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;


/* /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
import java.nio.file.Path; import java.nio.file.Path;
import java.time.Instant; import java.time.Instant;
import java.util.List; import java.util.List;
import java.util.function.Consumer;


/** /**
* A instance of this interface represents information about an specific asset in a repository.
* A instance of this interface represents information about a specific asset in a repository.
* The asset may be an real artifact, a directory, or a virtual asset. * The asset may be an real artifact, a directory, or a virtual asset.
* *
* Each asset has a unique path relative to the repository. * Each asset has a unique path relative to the repository.
* @return The asset, or <code>null</code>, if it does not exist. * @return The asset, or <code>null</code>, if it does not exist.
*/ */
StorageAsset getParent(); StorageAsset getParent();

/**
* Returns the asset relative to the given path
* @param toPath
* @return
*/
StorageAsset resolve(String toPath);
} }

+ 58
- 0
archiva-modules/archiva-base/archiva-storage-fs/pom.xml View File

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>archiva-base</artifactId>
<groupId>org.apache.archiva</groupId>
<version>3.0.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>

<artifactId>archiva-storage-fs</artifactId>

<name>Archiva Base :: Storage Filesystem Based</name>

<properties>
<site.staging.base>${project.parent.parent.basedir}</site.staging.base>
</properties>

<dependencies>

<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-filelock</artifactId>
</dependency>

<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>


<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<basedir>${basedir}</basedir>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>

</project>

archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java → archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemAsset.java View File

package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;


/* /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one


FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath) { FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath) {
this.assetPath = assetPath; this.assetPath = assetPath;
this.relativePath = path;
this.relativePath = normalizePath(path);
this.setPermissionsForNew=false; this.setPermissionsForNew=false;
this.basePath = basePath; this.basePath = basePath;
this.storage = storage; this.storage = storage;
*/ */
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath) { public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath) {
this.assetPath = assetPath; this.assetPath = assetPath;
this.relativePath = path;
this.relativePath = normalizePath(path);
this.setPermissionsForNew = false; this.setPermissionsForNew = false;
this.basePath = null; this.basePath = null;
this.storage = storage; this.storage = storage;
*/ */
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory) { public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory) {
this.assetPath = assetPath; this.assetPath = assetPath;
this.relativePath = path;
this.relativePath = normalizePath(path);
this.directoryHint = directory; this.directoryHint = directory;
this.setPermissionsForNew = false; this.setPermissionsForNew = false;
this.basePath = basePath; this.basePath = basePath;
*/ */
public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) { public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) {
this.assetPath = assetPath; this.assetPath = assetPath;
this.relativePath = path;
this.relativePath = normalizePath(path);
this.directoryHint = directory; this.directoryHint = directory;
this.setPermissionsForNew = setPermissionsForNew; this.setPermissionsForNew = setPermissionsForNew;
this.basePath = basePath; this.basePath = basePath;
init(); init();
} }


private String normalizePath(String path) {
if (!path.startsWith("/")) {
return "/"+path;
} else {
return path;
}
}

private void init() { private void init() {


if (setPermissionsForNew) { if (setPermissionsForNew) {
} }
} }


@Override
public StorageAsset resolve(String toPath) {
return storage.getAsset(this.getPath()+"/"+toPath);
}



public void setDefaultFileAcls(List<AclEntry> acl) { public void setDefaultFileAcls(List<AclEntry> acl) {
defaultFileAcls = acl; defaultFileAcls = acl;

archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemStorage.java → archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemStorage.java View File

package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;


/* /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
} }


@Override @Override
public void consumeData( StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
public void consumeData(StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
{ {
final Path path = asset.getFilePath(); final Path path = asset.getFilePath();
try { try {
@Override @Override
public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
{ {
if (origin.getStorage()!=this) {
throw new IOException("The origin asset does not belong to this storage instance. Cannot copy between different storage instances.");
}
if (destination.getStorage()!=this) {
throw new IOException("The destination asset does not belong to this storage instance. Cannot copy between different storage instances.");
}
Files.move(origin.getFilePath(), destination.getFilePath(), copyOptions); Files.move(origin.getFilePath(), destination.getFilePath(), copyOptions);
} }


@Override @Override
public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
{ {
if (origin.getStorage()!=this) {
throw new IOException("The origin asset does not belong to this storage instance. Cannot copy between different storage instances.");
}
if (destination.getStorage()!=this) {
throw new IOException("The destination asset does not belong to this storage instance. Cannot copy between different storage instances.");
}
Path destinationPath = destination.getFilePath(); Path destinationPath = destination.getFilePath();
boolean overwrite = false; boolean overwrite = false;
for (int i=0; i<copyOptions.length; i++) { for (int i=0; i<copyOptions.length; i++) {

+ 346
- 0
archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/StorageUtil.java View File

package org.apache.archiva.repository.storage;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.archiva.common.filelock.FileLockException;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.*;
import java.util.HashSet;
import java.util.function.Consumer;

/**
*
* Utility class for assets. Allows to copy, move between different storage instances and
* recursively consume the tree.
*
* @author Martin Stockhammer <martin_s@apache.org>
*/
public class StorageUtil
{
private static final int DEFAULT_BUFFER_SIZE = 4096;
private static final Logger log = LoggerFactory.getLogger(StorageUtil.class);

/**
* Copies the source asset to the target. The assets may be from different RepositoryStorage instances.
* If you know that source and asset are from the same storage instance, the copy method of the storage
* instance may be faster.
*
* @param source The source asset
* @param target The target asset
* @param locked If true, a readlock is set on the source and a write lock is set on the target.
* @param copyOptions Copy options
* @throws IOException
*/
public static final void copyAsset( final StorageAsset source,
final StorageAsset target,
boolean locked,
final CopyOption... copyOptions ) throws IOException
{
if (source.isFileBased() && target.isFileBased()) {
// Short cut for FS operations
final Path sourcePath = source.getFilePath();
final Path targetPath = target.getFilePath( );
if (locked) {
final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager();
final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager();
try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) )
{
Files.copy( sourcePath, targetPath, copyOptions );
}
catch ( FileLockException e )
{
throw new IOException( e );
}
catch ( FileLockTimeoutException e )
{
throw new IOException( e );
}
} else
{
Files.copy( sourcePath, targetPath, copyOptions );
}
} else {
try {
final RepositoryStorage sourceStorage = source.getStorage();
final RepositoryStorage targetStorage = target.getStorage();
sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
} catch (IOException e) {
throw e;
} catch (Throwable e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
} else
{
throw new IOException( e );
}
}
}
}

/**
* Moves a asset between different storage instances.
* If you know that source and asset are from the same storage instance, the move method of the storage
* instance may be faster.
*
* @param source The source asset
* @param target The target asset
* @param locked If true, a lock is used for the move operation.
* @param copyOptions Options for copying
* @throws IOException If the move fails
*/
public static final void moveAsset(StorageAsset source, StorageAsset target, boolean locked, CopyOption... copyOptions) throws IOException
{
if (source.isFileBased() && target.isFileBased()) {
// Short cut for FS operations
// Move is atomic operation
Files.move( source.getFilePath(), target.getFilePath(), copyOptions );
} else {
try {
final RepositoryStorage sourceStorage = source.getStorage();
final RepositoryStorage targetStorage = target.getStorage();
sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
sourceStorage.removeAsset( source );
} catch (IOException e) {
throw e;
} catch (Throwable e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
} else
{
throw new IOException( e );
}
}
}

}

private static final void wrapWriteFunction(ReadableByteChannel is, RepositoryStorage targetStorage, StorageAsset target, boolean locked) {
try {
targetStorage.writeDataToChannel( target, os -> copy(is, os), locked );
} catch (Exception e) {
throw new RuntimeException( e );
}
}


private static final void copy( final ReadableByteChannel is, final WritableByteChannel os ) {
if (is instanceof FileChannel) {
copy( (FileChannel) is, os );
} else if (os instanceof FileChannel) {
copy(is, (FileChannel)os);
} else
{
try
{
ByteBuffer buffer = ByteBuffer.allocate( DEFAULT_BUFFER_SIZE );
while ( is.read( buffer ) != -1 )
{
buffer.flip( );
while ( buffer.hasRemaining( ) )
{
os.write( buffer );
}
buffer.clear( );
}
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
}

private static final void copy( final FileChannel is, final WritableByteChannel os ) {
try
{
is.transferTo( 0, is.size( ), os );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}

private static final void copy( final ReadableByteChannel is, final FileChannel os ) {
try
{
os.transferFrom( is, 0, Long.MAX_VALUE );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}

/**
* Runs the consumer function recursively on each asset found starting at the base path
* @param baseAsset The base path where to start search
* @param consumer The consumer function applied to each found asset
* @param depthFirst If true, the deepest elements are consumed first.
* @param maxDepth The maximum depth to recurse into. 0 means, only the baseAsset is consumed, 1 the base asset and its children and so forth.
*/
public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst, final int maxDepth) throws IOException {
recurse(baseAsset, consumer, depthFirst, maxDepth, 0);
}

/**
* Runs the consumer function recursively on each asset found starting at the base path. The function descends into
* maximum depth.
*
* @param baseAsset The base path where to start search
* @param consumer The consumer function applied to each found asset
* @param depthFirst If true, the deepest elements are consumed first.
*/
public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst) throws IOException {
recurse(baseAsset, consumer, depthFirst, Integer.MAX_VALUE, 0);
}

/**
* Runs the consumer function recursively on each asset found starting at the base path. It does not recurse with
* depth first and stops only if there are no more children available.
*
* @param baseAsset The base path where to start search
* @param consumer The consumer function applied to each found asset
*/
public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer) throws IOException {
recurse(baseAsset, consumer, false, Integer.MAX_VALUE, 0);
}

private static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst, final int maxDepth, final int currentDepth)
throws IOException {
if (!depthFirst) {
consumer.accept(baseAsset);
}
if (currentDepth<maxDepth && baseAsset.isContainer()) {
for(StorageAsset asset : baseAsset.list() ) {
recurse(asset, consumer, depthFirst, maxDepth, currentDepth+1);
}
}
if (depthFirst) {
consumer.accept(baseAsset);
}
}

/**
* Deletes the given asset and all child assets recursively.
* @param baseDir The base asset to remove.
* @throws IOException
*/
public static final void deleteRecursively(StorageAsset baseDir) throws IOException {
recurse(baseDir, a -> {
try {
a.getStorage().removeAsset(a);
} catch (IOException e) {
log.error("Could not delete asset {}", a.getPath());
}
},true);
}

/**
* Returns the extension of the name of a given asset. Extension is the substring after the last occurence of '.' in the
* string. If no '.' is found, the empty string is returned.
*
* @param asset The asset from which to return the extension string.
* @return The extension.
*/
public static final String getExtension(StorageAsset asset) {
return StringUtils.substringAfterLast(asset.getName(),".");
}

public static final void copyToLocalFile(StorageAsset asset, Path destination, CopyOption... copyOptions) throws IOException {
if (asset.isFileBased()) {
Files.copy(asset.getFilePath(), destination, copyOptions);
} else {
try {

HashSet<OpenOption> openOptions = new HashSet<>();
for (CopyOption option : copyOptions) {
if (option == StandardCopyOption.REPLACE_EXISTING) {
openOptions.add(StandardOpenOption.CREATE);
openOptions.add(StandardOpenOption.TRUNCATE_EXISTING);
openOptions.add(StandardOpenOption.WRITE);
} else {
openOptions.add(StandardOpenOption.WRITE);
openOptions.add(StandardOpenOption.CREATE_NEW);
}
}
asset.getStorage().consumeDataFromChannel(asset, channel -> {
try {
FileChannel.open(destination, openOptions).transferFrom(channel, 0, Long.MAX_VALUE);
} catch (IOException e) {
throw new RuntimeException(e);
}
}, false);
} catch (Throwable e) {
if (e.getCause() instanceof IOException) {
throw (IOException)e.getCause();
} else {
throw new IOException(e);
}
}
}
}

public static class PathInformation {
final Path path ;
final boolean tmpFile;

PathInformation(Path path, boolean tmpFile) {
this.path = path;
this.tmpFile = tmpFile;
}

public Path getPath() {
return path;
}

public boolean isTmpFile() {
return tmpFile;
}

}

public static final PathInformation getAssetDataAsPath(StorageAsset asset) throws IOException {
if (!asset.exists()) {
throw new IOException("Asset does not exist");
}
if (asset.isFileBased()) {
return new PathInformation(asset.getFilePath(), false);
} else {
Path tmpFile = Files.createTempFile(asset.getName(), getExtension(asset));
copyToLocalFile(asset, tmpFile, StandardCopyOption.REPLACE_EXISTING);
return new PathInformation(tmpFile, true);
}
}

}

+ 203
- 0
archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemAssetTest.java View File

package org.apache.archiva.repository.storage;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;

public class FilesystemAssetTest {

Path assetPathFile;
Path assetPathDir;
FilesystemStorage filesystemStorage;

@Before
public void init() throws IOException {
assetPathDir = Files.createTempDirectory("assetDir");
assetPathFile = Files.createTempFile(assetPathDir,"assetFile", "dat");
filesystemStorage = new FilesystemStorage(assetPathDir, new DefaultFileLockManager());
}

@After
public void cleanup() {

try {
Files.deleteIfExists(assetPathFile);
} catch (IOException e) {
e.printStackTrace();
}
FileUtils.deleteQuietly(assetPathDir.toFile());
}


@Test
public void getPath() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, assetPathFile.getFileName().toString(), assetPathFile);
Assert.assertEquals("/"+assetPathFile.getFileName().toString(), asset.getPath());
}

@Test
public void getName() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/"+assetPathFile.getFileName().toString(), assetPathFile);
Assert.assertEquals(assetPathFile.getFileName().toString(), asset.getName());

}

@Test
public void getModificationTime() throws IOException {
Instant modTime = Files.getLastModifiedTime(assetPathFile).toInstant();
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test123", assetPathFile);
Assert.assertTrue(modTime.equals(asset.getModificationTime()));
}

@Test
public void isContainer() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1323", assetPathFile);
Assert.assertFalse(asset.isContainer());
FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
Assert.assertTrue(asset2.isContainer());
}

@Test
public void list() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Assert.assertEquals(0, asset.list().size());

FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1235", assetPathDir);
Assert.assertEquals(1, asset2.list().size());
Path f1 = Files.createTempFile(assetPathDir, "testfile", "dat");
Path f2 = Files.createTempFile(assetPathDir, "testfile", "dat");
Path d1 = Files.createTempDirectory(assetPathDir, "testdir");
Assert.assertEquals(4, asset2.list().size());
Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f1.getFileName().toString())));
Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f2.getFileName().toString())));
Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(d1.getFileName().toString())));
Files.deleteIfExists(f1);
Files.deleteIfExists(f2);
Files.deleteIfExists(d1);


}

@Test
public void getSize() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Assert.assertEquals(0, asset.getSize());

Files.write(assetPathFile, new String("abcdef").getBytes("ASCII"));
Assert.assertTrue(asset.getSize()>=6);


}

@Test
public void getData() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(InputStream is = asset.getReadStream()) {
Assert.assertEquals("abcdef", IOUtils.toString(is, "ASCII"));
}

}

@Test
public void getDataExceptionOnDir() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try {
InputStream is = asset.getReadStream();
Assert.assertFalse("Exception expected for data on dir", true);
} catch (IOException e) {
// fine
}

}

@Test
public void writeData() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(OutputStream os = asset.getWriteStream(true)) {
IOUtils.write("test12345", os, "ASCII");
}
Assert.assertEquals("test12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}

@Test
public void writeDataAppend() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
try(OutputStream os = asset.getWriteStream(false)) {
IOUtils.write("test12345", os, "ASCII");
}
Assert.assertEquals("abcdeftest12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}

@Test
public void writeDataExceptionOnDir() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
try {

OutputStream os = asset.getWriteStream(true);
Assert.assertTrue("Writing to a directory should throw a IOException", false);
} catch (IOException e) {
// Fine
}
}

@Test
public void storeDataFile() throws IOException {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Path dataFile = Files.createTempFile("testdata", "dat");
try(OutputStream os = Files.newOutputStream(dataFile)) {
IOUtils.write("testkdkdkd", os, "ASCII");
}
asset.replaceDataFromFile(dataFile);
Assert.assertEquals("testkdkdkd", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
}

@Test
public void exists() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Assert.assertTrue(asset.exists());
FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1234", Paths.get("abcdefgkdkdk"));
Assert.assertFalse(asset2.exists());

}

@Test
public void getFilePath() {
FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
Assert.assertEquals(assetPathFile, asset.getFilePath());
}
}

archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemStorageTest.java → archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemStorageTest.java View File

package org.apache.archiva.repository.content;
package org.apache.archiva.repository.storage;


/* /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.junit.After; import org.junit.After;
import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;


import java.io.OutputStream; import java.io.OutputStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.StandardCopyOption;


import static org.junit.Assert.*; import static org.junit.Assert.*;


Files.createDirectories(baseDir.resolve("dir2")); Files.createDirectories(baseDir.resolve("dir2"));
file1 = Files.createFile(baseDir.resolve("dir1/testfile1.dat")); file1 = Files.createFile(baseDir.resolve("dir1/testfile1.dat"));
dir1 = Files.createDirectories(baseDir.resolve("dir1/testdir")); dir1 = Files.createDirectories(baseDir.resolve("dir1/testdir"));
file1Asset = new FilesystemAsset("/dir1/testfile1.dat", file1);
dir1Asset = new FilesystemAsset("/dir1/testdir", dir1);
file1Asset = new FilesystemAsset(fsStorage, "/dir1/testfile1.dat", file1);
dir1Asset = new FilesystemAsset(fsStorage, "/dir1/testdir", dir1);
} }


private class StringResult { private class StringResult {


@After @After
public void cleanup() { public void cleanup() {
try {
Files.deleteIfExists(file1);
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(dir1);
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(baseDir.resolve("dir1"));
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(baseDir.resolve("dir2"));
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.deleteIfExists(baseDir);
} catch (IOException e) {
e.printStackTrace();
}
FileUtils.deleteQuietly(file1.toFile());
FileUtils.deleteQuietly(dir1.toFile());
FileUtils.deleteQuietly(baseDir.resolve("dir1").toFile());
FileUtils.deleteQuietly(baseDir.resolve("dir2").toFile());
FileUtils.deleteQuietly(baseDir.toFile());
} }




} }
StringResult result = new StringResult(); StringResult result = new StringResult();
fsStorage.consumeData(file1Asset, is -> consume(is, result), false ); fsStorage.consumeData(file1Asset, is -> consume(is, result), false );
assertEquals("abcdefghijkl" ,result.getData());
Assert.assertEquals("abcdefghijkl" ,result.getData());
} }


private void consume(InputStream is, StringResult result) { private void consume(InputStream is, StringResult result) {
@Test @Test
public void getAsset() { public void getAsset() {
StorageAsset asset = fsStorage.getAsset("/dir1/testfile1.dat"); StorageAsset asset = fsStorage.getAsset("/dir1/testfile1.dat");
assertEquals(file1, asset.getFilePath());
Assert.assertEquals(file1, asset.getFilePath());
} }


@Test @Test
public void addAsset() { public void addAsset() {
StorageAsset newAsset = fsStorage.addAsset("dir2/test", false); StorageAsset newAsset = fsStorage.addAsset("dir2/test", false);
assertNotNull(newAsset);
assertFalse(newAsset.isContainer());
assertFalse(newAsset.exists());
Assert.assertNotNull(newAsset);
Assert.assertFalse(newAsset.isContainer());
Assert.assertFalse(newAsset.exists());


StorageAsset newDirAsset = fsStorage.addAsset("/dir2/testdir2", true); StorageAsset newDirAsset = fsStorage.addAsset("/dir2/testdir2", true);
assertNotNull(newDirAsset);
assertTrue(newDirAsset.isContainer());
assertFalse(newDirAsset.exists());
Assert.assertNotNull(newDirAsset);
Assert.assertTrue(newDirAsset.isContainer());
Assert.assertFalse(newDirAsset.exists());
} }


@Test @Test
public void removeAsset() throws IOException { public void removeAsset() throws IOException {
assertTrue(Files.exists(file1));
Assert.assertTrue(Files.exists(file1));
fsStorage.removeAsset(file1Asset); fsStorage.removeAsset(file1Asset);
assertFalse(Files.exists(file1));
Assert.assertFalse(Files.exists(file1));


assertTrue(Files.exists(dir1));
Assert.assertTrue(Files.exists(dir1));
fsStorage.removeAsset(dir1Asset); fsStorage.removeAsset(dir1Asset);
assertFalse(Files.exists(dir1));
Assert.assertFalse(Files.exists(dir1));
} }


@Test @Test
Path newFile=null; Path newFile=null;
Path newDir=null; Path newDir=null;
try { try {
assertTrue(Files.exists(file1));
Assert.assertTrue(Files.exists(file1));
try (OutputStream os = Files.newOutputStream(file1)) { try (OutputStream os = Files.newOutputStream(file1)) {
IOUtils.write("testakdkkdkdkdk", os, "ASCII"); IOUtils.write("testakdkkdkdkdk", os, "ASCII");
} }
long fileSize = Files.size(file1); long fileSize = Files.size(file1);
fsStorage.moveAsset(file1Asset, "/dir2/testfile2.dat"); fsStorage.moveAsset(file1Asset, "/dir2/testfile2.dat");
assertFalse(Files.exists(file1));
Assert.assertFalse(Files.exists(file1));
newFile = baseDir.resolve("dir2/testfile2.dat"); newFile = baseDir.resolve("dir2/testfile2.dat");
assertTrue(Files.exists(newFile));
assertEquals(fileSize, Files.size(newFile));
Assert.assertTrue(Files.exists(newFile));
Assert.assertEquals(fileSize, Files.size(newFile));




assertTrue(Files.exists(dir1));
Assert.assertTrue(Files.exists(dir1));
newDir = baseDir.resolve("dir2/testdir2"); newDir = baseDir.resolve("dir2/testdir2");
fsStorage.moveAsset(dir1Asset, "dir2/testdir2"); fsStorage.moveAsset(dir1Asset, "dir2/testdir2");
assertFalse(Files.exists(dir1));
assertTrue(Files.exists(newDir));
Assert.assertFalse(Files.exists(dir1));
Assert.assertTrue(Files.exists(newDir));
} finally { } finally {
if (newFile!=null) Files.deleteIfExists(newFile); if (newFile!=null) Files.deleteIfExists(newFile);
if (newDir!=null) Files.deleteIfExists(newDir); if (newDir!=null) Files.deleteIfExists(newDir);
Path newFile=null; Path newFile=null;
Path newDir=null; Path newDir=null;
try { try {
assertTrue(Files.exists(file1));
Assert.assertTrue(Files.exists(file1));
try (OutputStream os = Files.newOutputStream(file1)) { try (OutputStream os = Files.newOutputStream(file1)) {
IOUtils.write("testakdkkdkdkdk", os, "ASCII"); IOUtils.write("testakdkkdkdkdk", os, "ASCII");
} }
long fileSize = Files.size(file1); long fileSize = Files.size(file1);
fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat");
assertTrue(Files.exists(file1));
assertEquals(fileSize, Files.size(file1));
fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat", StandardCopyOption.REPLACE_EXISTING);
Assert.assertTrue(Files.exists(file1));
Assert.assertEquals(fileSize, Files.size(file1));
newFile = baseDir.resolve("dir2/testfile2.dat"); newFile = baseDir.resolve("dir2/testfile2.dat");
assertTrue(Files.exists(newFile));
assertEquals(fileSize, Files.size(newFile));

Assert.assertTrue(Files.exists(newFile));
Assert.assertEquals(fileSize, Files.size(newFile));

try {
fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat");
Assert.assertTrue("IOException should be thrown (File exists)", false);
} catch (IOException ex) {
Assert.assertTrue("Exception must contain 'file exists'", ex.getMessage().contains("file exists"));
}


assertTrue(Files.exists(dir1));
Assert.assertTrue(Files.exists(dir1));
newDir = baseDir.resolve("dir2/testdir2"); newDir = baseDir.resolve("dir2/testdir2");
fsStorage.copyAsset(dir1Asset, "dir2/testdir2"); fsStorage.copyAsset(dir1Asset, "dir2/testdir2");
assertTrue(Files.exists(dir1));
assertTrue(Files.exists(newDir));
Assert.assertTrue(Files.exists(dir1));
Assert.assertTrue(Files.exists(newDir));
} finally { } finally {
if (newFile!=null) Files.deleteIfExists(newFile); if (newFile!=null) Files.deleteIfExists(newFile);
if (newDir!=null) Files.deleteIfExists(newDir);
if (newDir!=null) FileUtils.deleteQuietly(newDir.toFile());
} }
} }
} }

+ 2
- 0
archiva-modules/archiva-base/pom.xml View File

<module>archiva-repository-scanner</module> <module>archiva-repository-scanner</module>
<module>archiva-repository-admin</module> <module>archiva-repository-admin</module>
<module>archiva-security-common</module> <module>archiva-security-common</module>
<module>archiva-storage-api</module>
<module>archiva-storage-fs</module>
</modules> </modules>
</project> </project>

+ 1
- 2
archiva-modules/archiva-maven/archiva-maven-converter/src/main/java/org/apache/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java View File

import org.apache.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.converter.RepositoryConversionException; import org.apache.archiva.converter.RepositoryConversionException;
import org.apache.archiva.repository.BasicManagedRepository; import org.apache.archiva.repository.BasicManagedRepository;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.content.maven2.ManagedDefaultRepositoryContent; import org.apache.archiva.repository.content.maven2.ManagedDefaultRepositoryContent;
import org.apache.archiva.repository.scanner.RepositoryScanner; import org.apache.archiva.repository.scanner.RepositoryScanner;
import org.apache.archiva.repository.scanner.RepositoryScannerException; import org.apache.archiva.repository.scanner.RepositoryScannerException;
import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.MavenArtifactRepository; import org.apache.maven.artifact.repository.MavenArtifactRepository;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;

+ 27
- 2
archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexContext.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.index.context.IndexingContext; import org.apache.maven.index.context.IndexingContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.NoSuchFileException; import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.sql.Date; import java.sql.Date;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.Set; import java.util.Set;
*/ */
public class MavenIndexContext implements ArchivaIndexingContext { public class MavenIndexContext implements ArchivaIndexingContext {


private static final Logger log = LoggerFactory.getLogger(ArchivaIndexingContext.class);

private IndexingContext delegate; private IndexingContext delegate;
private Repository repository; private Repository repository;
private StorageAsset dir = null;


protected MavenIndexContext(Repository repository, IndexingContext delegate) { protected MavenIndexContext(Repository repository, IndexingContext delegate) {
this.delegate = delegate; this.delegate = delegate;
} }


@Override @Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
public StorageAsset getPath() {
if (dir==null) {
StorageAsset repositoryDirAsset = repository.getAsset("");
Path repositoryDir = repositoryDirAsset.getFilePath().toAbsolutePath();
Path indexDir = delegate.getIndexDirectoryFile().toPath();
if (indexDir.startsWith(repositoryDir)) {
dir = repository.getAsset(repositoryDir.relativize(indexDir).toString());
} else {
try {
FilesystemStorage storage = new FilesystemStorage(indexDir, new DefaultFileLockManager());
dir = storage.getAsset("");
} catch (IOException e) {
log.error("Error occured while creating storage for index dir");
}
}
}
return dir;
} }


@Override @Override

+ 42
- 43
archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexManager.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration; import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.indexer.IndexCreationFailedException; import org.apache.archiva.indexer.IndexCreationFailedException;
import org.apache.archiva.indexer.IndexUpdateFailedException; import org.apache.archiva.indexer.IndexUpdateFailedException;
import org.apache.archiva.indexer.UnsupportedBaseContextException; import org.apache.archiva.indexer.UnsupportedBaseContextException;
import org.apache.archiva.indexer.merger.IndexMergerException;
import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
import org.apache.archiva.proxy.ProxyRegistry; import org.apache.archiva.proxy.ProxyRegistry;
import org.apache.archiva.proxy.maven.WagonFactory; import org.apache.archiva.proxy.maven.WagonFactory;
import org.apache.archiva.proxy.maven.WagonFactoryException; import org.apache.archiva.proxy.maven.WagonFactoryException;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException; import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.index.ArtifactContext; import org.apache.maven.index.ArtifactContext;
import org.apache.maven.index.ArtifactContextProducer; import org.apache.maven.index.ArtifactContextProducer;
private ProxyRegistry proxyRegistry; private ProxyRegistry proxyRegistry;




private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
private ConcurrentSkipListSet<StorageAsset> activeContexts = new ConcurrentSkipListSet<>( );


private static final int WAIT_TIME = 100; private static final int WAIT_TIME = 100;
private static final int MAX_WAIT = 10; private static final int MAX_WAIT = 10;
return context.getBaseContext( IndexingContext.class ); return context.getBaseContext( IndexingContext.class );
} }


private Path getIndexPath( ArchivaIndexingContext ctx )
private StorageAsset getIndexPath( ArchivaIndexingContext ctx )
{ {
return PathUtil.getPathFromUri( ctx.getPath( ) );
return ctx.getPath( );
} }


@FunctionalInterface @FunctionalInterface
{ {
throw new IndexUpdateFailedException( "Maven index is not supported by this context", e ); throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
} }
final Path ctxPath = getIndexPath( context );
final StorageAsset ctxPath = getIndexPath( context );
int loop = MAX_WAIT; int loop = MAX_WAIT;
boolean active = false; boolean active = false;
while ( loop-- > 0 && !active ) while ( loop-- > 0 && !active )
@Override @Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{ {
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> { executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try { try {
indexer.addArtifactsToIndex(artifacts, indexingContext); indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) { } catch (IOException e) {
@Override @Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{ {
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> { executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try { try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext); indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) { } catch (IOException e) {
throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( ) throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
+ ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e ); + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
} }
MavenIndexContext context = new MavenIndexContext( repository, mvnCtx );


return context;
return new MavenIndexContext( repository, mvnCtx );
} }


@Override @Override
log.warn("Index close failed"); log.warn("Index close failed");
} }
try { try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
StorageUtil.deleteRecursively(context.getPath());
} catch (IOException e) { } catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files"); throw new IndexUpdateFailedException("Could not delete index files");
} }
} }
} }


private StorageAsset getIndexPath(URI indexDir, Path repoDir, String defaultDir) throws IOException
private StorageAsset getIndexPath(URI indexDirUri, RepositoryStorage storage, String defaultDir) throws IOException
{ {
String indexPath = indexDir.getPath();
Path indexDirectory = null;
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
Path indexDirectory;
Path repositoryPath = storage.getAsset("").getFilePath().toAbsolutePath();
StorageAsset indexDir;
if ( ! StringUtils.isEmpty(indexDirUri.toString( ) ) )
{ {


indexDirectory = PathUtil.getPathFromUri( indexDir );
indexDirectory = PathUtil.getPathFromUri( indexDirUri );
// not absolute so create it in repository directory // not absolute so create it in repository directory
if ( indexDirectory.isAbsolute( ) )
if ( indexDirectory.isAbsolute( ) && !indexDirectory.startsWith(repositoryPath))
{ {
indexPath = indexDirectory.getFileName().toString();
if (storage instanceof FilesystemStorage) {
FilesystemStorage fsStorage = (FilesystemStorage) storage;
FilesystemStorage indexStorage = new FilesystemStorage(indexDirectory.getParent(), fsStorage.getFileLockManager());
indexDir = indexStorage.getAsset(indexDirectory.getFileName().toString());
} else {
throw new IOException("The given storage is not file based.");
}
} else if (indexDirectory.isAbsolute()) {
indexDir = storage.getAsset(repositoryPath.relativize(indexDirectory).toString());
} }
else else
{ {
indexDirectory = repoDir.resolve( indexDirectory );
indexDir = storage.getAsset(indexDirectory.toString());
} }
} }
else else
{ {
indexDirectory = repoDir.resolve( defaultDir );
indexPath = defaultDir;
indexDir = storage.getAsset( defaultDir );
} }


if ( !Files.exists( indexDirectory ) )
if ( !indexDir.exists() )
{ {
Files.createDirectories( indexDirectory );
indexDir.create();
} }
return new FilesystemAsset( indexPath, indexDirectory);
return indexDir;
} }


private StorageAsset getIndexPath( Repository repo) throws IOException { private StorageAsset getIndexPath( Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get(); IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
return getIndexPath( icf.getIndexPath(), repo.getAsset( "" ).getFilePath(), DEFAULT_INDEX_PATH);
return getIndexPath( icf.getIndexPath(), repo, DEFAULT_INDEX_PATH);
} }


private StorageAsset getPackedIndexPath(Repository repo) throws IOException { private StorageAsset getPackedIndexPath(Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get(); IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
return getIndexPath(icf.getPackedIndexPath(), repo.getAsset( "" ).getFilePath(), DEFAULT_PACKED_INDEX_PATH);
return getIndexPath(icf.getPackedIndexPath(), repo, DEFAULT_PACKED_INDEX_PATH);
} }


private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
{ {
Path appServerBase = archivaConfiguration.getAppServerBaseDir( );

String contextKey = "remote-" + remoteRepository.getId( ); String contextKey = "remote-" + remoteRepository.getId( );




Files.createDirectories( repoDir ); Files.createDirectories( repoDir );
} }


StorageAsset indexDirectory = null;
StorageAsset indexDirectory;


// is there configured indexDirectory ? // is there configured indexDirectory ?
if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) ) if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
} }
} }


StorageAsset indexDirectory = null;
StorageAsset indexDirectory;


if ( repository.supportsFeature( IndexCreationFeature.class ) ) if ( repository.supportsFeature( IndexCreationFeature.class ) )
{ {
} }


@Override @Override
public void connect( String id, String url )
throws IOException
{
public void connect( String id, String url ) {
//no op //no op
} }


@Override @Override
public void disconnect( )
throws IOException
{
public void disconnect( ) {
// no op // no op
} }


@Override @Override
public InputStream retrieve( String name ) public InputStream retrieve( String name )
throws IOException, FileNotFoundException
{
throws IOException {
try try
{ {
log.info( "index update retrieve file, name:{}", name ); log.info( "index update retrieve file, name:{}", name );

+ 1
- 1
archiva-modules/archiva-maven/archiva-maven-indexer/src/test/java/org/apache/archiva/indexer/maven/MavenIndexManagerTest.java View File

assertNotNull(ctx); assertNotNull(ctx);
assertEquals(repository, ctx.getRepository()); assertEquals(repository, ctx.getRepository());
assertEquals("test-repo", ctx.getId()); assertEquals("test-repo", ctx.getId());
assertEquals(indexPath.toAbsolutePath(), Paths.get(ctx.getPath()).toAbsolutePath());
assertEquals(indexPath.toAbsolutePath(), ctx.getPath().getFilePath().toAbsolutePath());
assertTrue(Files.exists(indexPath)); assertTrue(Files.exists(indexPath));
List<Path> li = Files.list(indexPath).collect(Collectors.toList()); List<Path> li = Files.list(indexPath).collect(Collectors.toList());
assertTrue(li.size()>0); assertTrue(li.size()>0);

+ 4
- 0
archiva-modules/archiva-maven/archiva-maven-metadata/pom.xml View File

<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId> <artifactId>archiva-model</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-xml-tools</artifactId> <artifactId>archiva-xml-tools</artifactId>

+ 12
- 20
archiva-modules/archiva-maven/archiva-maven-metadata/src/main/java/org/apache/archiva/maven2/metadata/MavenMetadataReader.java View File

import org.apache.archiva.model.ArchivaRepositoryMetadata; import org.apache.archiva.model.ArchivaRepositoryMetadata;
import org.apache.archiva.model.Plugin; import org.apache.archiva.model.Plugin;
import org.apache.archiva.model.SnapshotVersion; import org.apache.archiva.model.SnapshotVersion;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException; import org.apache.archiva.xml.XMLException;
import org.apache.archiva.xml.XMLReader; import org.apache.archiva.xml.XMLReader;
import org.apache.commons.lang.math.NumberUtils; import org.apache.commons.lang.math.NumberUtils;


private static final Logger log = LoggerFactory.getLogger( MavenMetadataReader.class ); private static final Logger log = LoggerFactory.getLogger( MavenMetadataReader.class );


public static ArchivaRepositoryMetadata read(StorageAsset metadataFile) throws XMLException, IOException {
if (metadataFile.isFileBased()) {
return read(metadataFile.getFilePath());
} else {
throw new IOException("StorageAsset is not file based");
}
}

/** /**
* Read and return the {@link org.apache.archiva.model.ArchivaRepositoryMetadata} object from the provided xml file. * Read and return the {@link org.apache.archiva.model.ArchivaRepositoryMetadata} object from the provided xml file.
* *
* @throws XMLException * @throws XMLException
*/ */
public static ArchivaRepositoryMetadata read( Path metadataFile ) public static ArchivaRepositoryMetadata read( Path metadataFile )
throws XMLException
{
throws XMLException, IOException {


XMLReader xml = new XMLReader( "metadata", metadataFile ); XMLReader xml = new XMLReader( "metadata", metadataFile );
// invoke this to remove namespaces, see MRM-1136 // invoke this to remove namespaces, see MRM-1136
metadata.setArtifactId( xml.getElementText( "//metadata/artifactId" ) ); metadata.setArtifactId( xml.getElementText( "//metadata/artifactId" ) );
metadata.setVersion( xml.getElementText( "//metadata/version" ) ); metadata.setVersion( xml.getElementText( "//metadata/version" ) );
Date modTime; Date modTime;
try
{
modTime = new Date(Files.getLastModifiedTime( metadataFile ).toMillis( ));
}
catch ( IOException e )
{
modTime = new Date();
log.error("Could not read modification time of {}", metadataFile);
}
modTime = new Date(Files.getLastModifiedTime(metadataFile).toMillis());
metadata.setFileLastModified( modTime ); metadata.setFileLastModified( modTime );
try
{
metadata.setFileSize( Files.size( metadataFile ) );
}
catch ( IOException e )
{
metadata.setFileSize( 0 );
log.error("Could not read file size of {}", metadataFile);
}
metadata.setFileSize( Files.size(metadataFile) );


metadata.setLastUpdated( xml.getElementText( "//metadata/versioning/lastUpdated" ) ); metadata.setLastUpdated( xml.getElementText( "//metadata/versioning/lastUpdated" ) );
metadata.setLatestVersion( xml.getElementText( "//metadata/versioning/latest" ) ); metadata.setLatestVersion( xml.getElementText( "//metadata/versioning/latest" ) );

+ 6
- 6
archiva-modules/archiva-maven/archiva-maven-proxy/src/main/java/org/apache/archiva/proxy/maven/MavenRepositoryProxyHandler.java View File

import org.apache.archiva.proxy.model.NetworkProxy; import org.apache.archiva.proxy.model.NetworkProxy;
import org.apache.archiva.proxy.model.ProxyConnector; import org.apache.archiva.proxy.model.ProxyConnector;
import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.wagon.ConnectionException; import org.apache.maven.wagon.ConnectionException;
import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.ResourceDoesNotExistException;
* @throws NotModifiedException * @throws NotModifiedException
*/ */
protected void transferResources( ProxyConnector connector, RemoteRepositoryContent remoteRepository, protected void transferResources( ProxyConnector connector, RemoteRepositoryContent remoteRepository,
Path tmpResource, Path[] checksumFiles, String url, String remotePath, StorageAsset resource,
StorageAsset tmpResource, StorageAsset[] checksumFiles, String url, String remotePath, StorageAsset resource,
Path workingDirectory, ManagedRepositoryContent repository ) Path workingDirectory, ManagedRepositoryContent repository )
throws ProxyException, NotModifiedException { throws ProxyException, NotModifiedException {
Wagon wagon = null; Wagon wagon = null;
// to // to
// save on connections since md5 is rarely used // save on connections since md5 is rarely used
for (int i=0; i<checksumFiles.length; i++) { for (int i=0; i<checksumFiles.length; i++) {
String ext = "."+StringUtils.substringAfterLast( checksumFiles[i].getFileName( ).toString( ), "." );
String ext = "."+StringUtils.substringAfterLast(checksumFiles[i].getName( ), "." );
transferChecksum(wagon, remoteRepository, remotePath, repository, resource.getFilePath(), ext, transferChecksum(wagon, remoteRepository, remotePath, repository, resource.getFilePath(), ext,
checksumFiles[i]);
checksumFiles[i].getFilePath());
} }
} }
} catch (NotFoundException e) { } catch (NotFoundException e) {


protected void transferArtifact(Wagon wagon, RemoteRepositoryContent remoteRepository, String remotePath, protected void transferArtifact(Wagon wagon, RemoteRepositoryContent remoteRepository, String remotePath,
ManagedRepositoryContent repository, Path resource, Path tmpDirectory, ManagedRepositoryContent repository, Path resource, Path tmpDirectory,
Path destFile)
StorageAsset destFile)
throws ProxyException { throws ProxyException {
transferSimpleFile(wagon, remoteRepository, remotePath, repository, resource, destFile);
transferSimpleFile(wagon, remoteRepository, remotePath, repository, resource, destFile.getFilePath());
} }


/** /**

+ 7
- 6
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/CacheFailuresTransferTest.java View File

import org.apache.archiva.policies.ReleasesPolicy; import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy; import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.policies.urlcache.UrlFailureCache; import org.apache.archiva.policies.urlcache.UrlFailureCache;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.easymock.EasyMock; import org.easymock.EasyMock;
import org.junit.Test; import org.junit.Test;


wagonMockControl.replay(); wagonMockControl.replay();


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


wagonMockControl.verify(); wagonMockControl.verify();


downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
wagonMockControl.verify(); wagonMockControl.verify();


assertNotDownloaded( downloadedFile);
assertNotDownloaded( downloadedFile.getFilePath());
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }




wagonMockControl.replay(); wagonMockControl.replay();


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


wagonMockControl.verify(); wagonMockControl.verify();




wagonMockControl.verify(); wagonMockControl.verify();


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


saveConnector( ID_DEFAULT_MANAGED, "proxied2", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied2", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.YES, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.YES, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


// Validate that file actually came from proxied2 (as intended). // Validate that file actually came from proxied2 (as intended).
Path proxied2File = Paths.get( REPOPATH_PROXIED2, path ); Path proxied2File = Paths.get( REPOPATH_PROXIED2, path );
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }



+ 38
- 37
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ChecksumTransferTest.java View File

import org.apache.archiva.policies.ChecksumPolicy; import org.apache.archiva.policies.ChecksumPolicy;
import org.apache.archiva.policies.ReleasesPolicy; import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy; import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.easymock.EasyMock; import org.easymock.EasyMock;
import org.junit.Test; import org.junit.Test;
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, true ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, true );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNull( downloadedFile ); assertNull( downloadedFile );
} }
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get( REPOPATH_PROXIED1, path ); Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar", assertChecksums( expectedFile, "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar",
"e58f30c6a150a2e843552438d18e15cb *get-checksum-both-right-1.0.jar" ); "e58f30c6a150a2e843552438d18e15cb *get-checksum-both-right-1.0.jar" );
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get( REPOPATH_PROXIED1, path ); Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar", assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
null ); null );
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get( REPOPATH_PROXIED1, path ); Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" ); assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
} }
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get( REPOPATH_PROXIED1, path ); Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, null ); assertChecksums( expectedFile, null, null );
} }
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get( REPOPATH_PROXIED1, path ); Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" ); assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" );
} }
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertChecksums( expectedFile, null, null ); assertChecksums( expectedFile, null, null );
} }


saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get( REPOPATH_PROXIED1, path ); Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "4ec20a12dc91557330bd0b39d1805be5e329ae56 get-checksum-both-bad-1.0.jar", assertChecksums( expectedFile, "4ec20a12dc91557330bd0b39d1805be5e329ae56 get-checksum-both-bad-1.0.jar",
"a292491a35925465e693a44809a078b5 get-checksum-both-bad-1.0.jar" ); "a292491a35925465e693a44809a078b5 get-checksum-both-bad-1.0.jar" );
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertChecksums( expectedFile, null, null ); assertChecksums( expectedFile, null, null );
} }


saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


// This is a success situation. No SHA1 with a Good MD5. // This is a success situation. No SHA1 with a Good MD5.
Path proxied1File = Paths.get(REPOPATH_PROXIED1, path); Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" ); assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
} }
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertChecksums( expectedFile, null, null ); assertChecksums( expectedFile, null, null );
} }


saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get(REPOPATH_PROXIED1, path); Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar", assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
"invalid checksum file" ); "invalid checksum file" );
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );




Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get(REPOPATH_PROXIED1, path); Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar", assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
"c35f3b76268b73a4ba617f6f275c49ab get-checksum-sha1-bad-md5-1.0.jar" ); "c35f3b76268b73a4ba617f6f275c49ab get-checksum-sha1-bad-md5-1.0.jar" );
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get(REPOPATH_PROXIED1, path); Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "71f7dc3f72053a3f2d9fdd6fef9db055ef957ffb get-checksum-md5-only-1.0.jar", assertChecksums( expectedFile, "71f7dc3f72053a3f2d9fdd6fef9db055ef957ffb get-checksum-md5-only-1.0.jar",
"f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" ); "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get(REPOPATH_PROXIED1, path); Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "1f12821c5e43e1a0b76b9564a6ddb0548ccb9486 get-default-layout-1.0.jar", assertChecksums( expectedFile, "1f12821c5e43e1a0b76b9564a6ddb0548ccb9486 get-default-layout-1.0.jar",
"3f7341545f21226b6f49a3c2704cb9be get-default-layout-1.0.jar" ); "3f7341545f21226b6f49a3c2704cb9be get-default-layout-1.0.jar" );


wagonMockControl.replay(); wagonMockControl.replay();


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


wagonMockControl.verify(); wagonMockControl.verify();




// Test results. // Test results.
Path proxied1File = Paths.get( REPOPATH_PROXIED1, path ); Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar", assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
null ); null );
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get( REPOPATH_PROXIED1, path ); Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
// There are no hashcodes on the proxy side to download, hence the local ones should remain invalid. // There are no hashcodes on the proxy side to download, hence the local ones should remain invalid.
assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" ); assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" );
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
// There are no hashcodes on the proxy side to download. // There are no hashcodes on the proxy side to download.
// The FAIL policy will delete the checksums as bad. // The FAIL policy will delete the checksums as bad.
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get(REPOPATH_PROXIED1, path); Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "96a08dc80a108cba8efd3b20aec91b32a0b2cbd4 get-bad-local-checksum-1.0.jar", assertChecksums( expectedFile, "96a08dc80a108cba8efd3b20aec91b32a0b2cbd4 get-bad-local-checksum-1.0.jar",
"46fdd6ca55bf1d7a7eb0c858f41e0ccd get-bad-local-checksum-1.0.jar" ); "46fdd6ca55bf1d7a7eb0c858f41e0ccd get-bad-local-checksum-1.0.jar" );

+ 9
- 8
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ErrorHandlingTest.java View File

import org.apache.archiva.policies.ReleasesPolicy; import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy; import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.LayoutException; import org.apache.archiva.repository.LayoutException;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.apache.maven.wagon.TransferFailedException; import org.apache.maven.wagon.TransferFailedException;
import org.apache.maven.wagon.authorization.AuthorizationException; import org.apache.maven.wagon.authorization.AuthorizationException;
wagonMockControl.replay(); wagonMockControl.replay();


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = null;
StorageAsset downloadedFile = null;
try try
{ {
downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,


wagonMockControl.verify(); wagonMockControl.verify();


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
} }


private void confirmSuccess( String path, Path expectedFile, String basedir ) private void confirmSuccess( String path, Path expectedFile, String basedir )
throws Exception throws Exception
{ {
Path downloadedFile = performDownload( path );
StorageAsset downloadedFile = performDownload( path );


Path proxied1File = Paths.get( basedir, path ); Path proxied1File = Paths.get( basedir, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
} }


private void confirmNotDownloadedNoError( String path ) private void confirmNotDownloadedNoError( String path )
throws Exception throws Exception
{ {
Path downloadedFile = performDownload( path );
StorageAsset downloadedFile = performDownload( path );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
} }


private Path performDownload( String path )
private StorageAsset performDownload( String path )
throws ProxyDownloadException, LayoutException throws ProxyDownloadException, LayoutException
{ {
wagonMockControl.replay(); wagonMockControl.replay();


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,
managedDefaultRepository.toArtifactReference( path ) ); managedDefaultRepository.toArtifactReference( path ) );


wagonMockControl.verify(); wagonMockControl.verify();

+ 6
- 5
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/HttpProxyTransferTest.java View File

import org.apache.archiva.policies.SnapshotsPolicy; import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.proxy.model.RepositoryProxyHandler; import org.apache.archiva.proxy.model.RepositoryProxyHandler;
import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.assertj.core.api.Assertions; import org.assertj.core.api.Assertions;
ArtifactReference artifact = managedDefaultRepository.toArtifactReference( path ); ArtifactReference artifact = managedDefaultRepository.toArtifactReference( path );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path sourceFile = Paths.get( PROXIED_BASEDIR, path ); Path sourceFile = Paths.get( PROXIED_BASEDIR, path );
assertNotNull( "Expected File should not be null.", expectedFile ); assertNotNull( "Expected File should not be null.", expectedFile );
assertNotNull( "Actual File should not be null.", downloadedFile ); assertNotNull( "Actual File should not be null.", downloadedFile );


assertTrue( "Check actual file exists.", Files.exists(downloadedFile));
assertTrue( "Check filename path is appropriate.", Files.isSameFile( expectedFile, downloadedFile));
assertTrue( "Check file path matches.", Files.isSameFile( expectedFile, downloadedFile));
assertTrue( "Check actual file exists.", Files.exists(downloadedFile.getFilePath()));
assertTrue( "Check filename path is appropriate.", Files.isSameFile( expectedFile, downloadedFile.getFilePath()));
assertTrue( "Check file path matches.", Files.isSameFile( expectedFile, downloadedFile.getFilePath()));


String expectedContents = FileUtils.readFileToString( sourceFile.toFile(), Charset.defaultCharset() ); String expectedContents = FileUtils.readFileToString( sourceFile.toFile(), Charset.defaultCharset() );
String actualContents = FileUtils.readFileToString( downloadedFile.toFile(), Charset.defaultCharset() );
String actualContents = FileUtils.readFileToString( downloadedFile.getFilePath().toFile(), Charset.defaultCharset() );
assertEquals( "Check file contents.", expectedContents, actualContents ); assertEquals( "Check file contents.", expectedContents, actualContents );


Assertions.assertThat( System.getProperty( "http.proxyHost" , "") ).isEmpty(); Assertions.assertThat( System.getProperty( "http.proxyHost" , "") ).isEmpty();

+ 29
- 28
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ManagedDefaultTransferTest.java View File

import org.apache.archiva.policies.ChecksumPolicy; import org.apache.archiva.policies.ChecksumPolicy;
import org.apache.archiva.policies.ReleasesPolicy; import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy; import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.ResourceDoesNotExistException;
CachedFailuresPolicy.NO, true ); CachedFailuresPolicy.NO, true );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNull( "File should not have been downloaded", downloadedFile ); assertNull( "File should not have been downloaded", downloadedFile );
} }


CachedFailuresPolicy.NO, false ); CachedFailuresPolicy.NO, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path sourceFile = Paths.get(REPOPATH_PROXIED1, path); Path sourceFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, sourceFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), sourceFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


CachedFailuresPolicy.NO, false ); CachedFailuresPolicy.NO, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );


Path sourceFile = Paths.get(REPOPATH_PROXIED1, path); Path sourceFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, sourceFile );
assertFalse( Files.exists( downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".sha1" )) );
assertFalse( Files.exists(downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".md5" ) ));
assertFalse( Files.exists( downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".asc" ) ));
assertFileEquals( expectedFile, downloadedFile.getFilePath(), sourceFile );
assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".sha1" )) );
assertFalse( Files.exists(downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".md5" ) ));
assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".asc" ) ));
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


CachedFailuresPolicy.NO, false ); CachedFailuresPolicy.NO, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertFileEquals( expectedFile, downloadedFile, expectedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), expectedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


CachedFailuresPolicy.NO, false ); CachedFailuresPolicy.NO, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNotModified( expectedFile, originalModificationTime ); assertNotModified( expectedFile, originalModificationTime );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNotModified( expectedFile, originalModificationTime ); assertNotModified( expectedFile, originalModificationTime );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false ); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path); Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


CachedFailuresPolicy.NO, false ); CachedFailuresPolicy.NO, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path); Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false ); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied1File = Paths.get(REPOPATH_PROXIED1, path); Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
Path proxied2File = Paths.get(REPOPATH_PROXIED2, path); Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );


// TODO: is this check even needed if it passes above? // TODO: is this check even needed if it passes above?
String actualContents = FileUtils.readFileToString( downloadedFile.toFile(), Charset.defaultCharset() );
String actualContents = FileUtils.readFileToString( downloadedFile.getFilePath().toFile(), Charset.defaultCharset() );
String badContents = FileUtils.readFileToString( proxied2File.toFile(), Charset.defaultCharset() ); String badContents = FileUtils.readFileToString( proxied2File.toFile(), Charset.defaultCharset() );
assertFalse( "Downloaded file contents should not be that of proxy 2", assertFalse( "Downloaded file contents should not be that of proxy 2",
StringUtils.equals( actualContents, badContents ) ); StringUtils.equals( actualContents, badContents ) );
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false ); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxied2File = Paths.get(REPOPATH_PROXIED2, path); Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false ); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNull( "File returned was: " + downloadedFile + "; should have got a not found exception", assertNull( "File returned was: " + downloadedFile + "; should have got a not found exception",
downloadedFile ); downloadedFile );
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false ); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );


// Attempt the proxy fetch. // Attempt the proxy fetch.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


wagonMockControl.verify(); wagonMockControl.verify();


Path proxied2File = Paths.get(REPOPATH_PROXIED2, path); Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }




wagonMockControl.replay(); wagonMockControl.replay();


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );


wagonMockControl.verify(); wagonMockControl.verify();
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );

+ 11
- 6
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/MetadataTransferTest.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.VersionUtil; import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.configuration.ProxyConnectorConfiguration; import org.apache.archiva.configuration.ProxyConnectorConfiguration;
import org.apache.archiva.maven2.metadata.MavenMetadataReader; import org.apache.archiva.maven2.metadata.MavenMetadataReader;
import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.metadata.RepositoryMetadataException; import org.apache.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.archiva.repository.metadata.RepositoryMetadataWriter; import org.apache.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.wagon.TransferFailedException; import org.apache.maven.wagon.TransferFailedException;
import org.custommonkey.xmlunit.DetailedDiff; import org.custommonkey.xmlunit.DetailedDiff;


ProjectReference metadata = createProjectReference( requestedResource ); ProjectReference metadata = createProjectReference( requestedResource );


Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath( managedDefaultRepository.toMetadataPath(
metadata ) ).getFile(); metadata ) ).getFile();




ProjectReference metadata = createProjectReference( requestedResource ); ProjectReference metadata = createProjectReference( requestedResource );


Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath( managedDefaultRepository.toMetadataPath(
metadata ) ).getFile(); metadata ) ).getFile();


Path expectedFile = managedDefaultDir.resolve(requestedResource); Path expectedFile = managedDefaultDir.resolve(requestedResource);
ProjectReference metadata = createProjectReference( requestedResource ); ProjectReference metadata = createProjectReference( requestedResource );


Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath( managedDefaultRepository.toMetadataPath(
metadata ) ).getFile(); metadata ) ).getFile();




VersionedReference metadata = createVersionedReference( requestedResource ); VersionedReference metadata = createVersionedReference( requestedResource );


Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath( managedDefaultRepository.toMetadataPath(
metadata ) ).getFile(); metadata ) ).getFile();


Path expectedFile = managedDefaultDir.resolve(requestedResource); Path expectedFile = managedDefaultDir.resolve(requestedResource);
VersionedReference metadata = createVersionedReference( requestedResource ); VersionedReference metadata = createVersionedReference( requestedResource );


Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
managedDefaultRepository.toMetadataPath( managedDefaultRepository.toMetadataPath(
metadata ) ).getFile(); metadata ) ).getFile();


assertTrue( "Actual file exists.", Files.exists(actualFile) ); assertTrue( "Actual file exists.", Files.exists(actualFile) );


StringWriter actualContents = new StringWriter(); StringWriter actualContents = new StringWriter();
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( actualFile );
FilesystemStorage fsStorage = new FilesystemStorage(actualFile.getParent(), new DefaultFileLockManager());
StorageAsset actualFileAsset = fsStorage.getAsset(actualFile.getFileName().toString());
ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( actualFileAsset );
RepositoryMetadataWriter.write( metadata, actualContents ); RepositoryMetadataWriter.write( metadata, actualContents );


DetailedDiff detailedDiff = new DetailedDiff( new Diff( expectedMetadataXml, actualContents.toString() ) ); DetailedDiff detailedDiff = new DetailedDiff( new Diff( expectedMetadataXml, actualContents.toString() ) );

+ 19
- 18
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/SnapshotTransferTest.java View File

import org.apache.archiva.policies.ChecksumPolicy; import org.apache.archiva.policies.ChecksumPolicy;
import org.apache.archiva.policies.ReleasesPolicy; import org.apache.archiva.policies.ReleasesPolicy;
import org.apache.archiva.policies.SnapshotsPolicy; import org.apache.archiva.policies.SnapshotsPolicy;
import org.apache.archiva.repository.storage.StorageAsset;
import org.junit.Test; import org.junit.Test;


import java.nio.file.Files; import java.nio.file.Files;
// Configure Connector (usually done within archiva.xml configuration) // Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


// Configure Connector (usually done within archiva.xml configuration) // Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path); Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


// Configure Connector (usually done within archiva.xml configuration) // Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path); Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false ); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false );


// Attempt to download. // Attempt to download.
Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


// Should not have downloaded as managed is newer than remote. // Should not have downloaded as managed is newer than remote.
assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


// Configure Connector (usually done within archiva.xml configuration) // Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


// Configure Connector (usually done within archiva.xml configuration) // Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


assertNotDownloaded( downloadedFile );
assertNotDownloaded( downloadedFile.getFilePath() );
assertNotModified( expectedFile, expectedTimestamp ); assertNotModified( expectedFile, expectedTimestamp );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.YES , false); SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.YES , false);


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path); Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


// Configure Connector (usually done within archiva.xml configuration) // Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path); Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }


// Configure Connector (usually done within archiva.xml configuration) // Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false); saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);


Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );


Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path); Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
assertNoTempFiles( expectedFile ); assertNoTempFiles( expectedFile );
} }
} }

+ 18
- 3
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.VersionUtil; import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.metadata.model.ArtifactMetadata; import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet; import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.model.ProjectReference; import org.apache.archiva.model.ProjectReference;
import org.apache.archiva.model.VersionedReference; import org.apache.archiva.model.VersionedReference;
import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;


import java.io.IOException;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;




private ManagedRepository repository; private ManagedRepository repository;
private FilesystemStorage fsStorage;


ManagedRepositoryContentMock(ManagedRepository repo) { ManagedRepositoryContentMock(ManagedRepository repo) {
this.repository = repo; this.repository = repo;
@Override @Override
public String getRepoRoot( ) public String getRepoRoot( )
{ {
return Paths.get("", "target", "test-repository", "managed").toString();
return getRepoRootAsset().getFilePath().toString();
}

private StorageAsset getRepoRootAsset() {
if (fsStorage==null) {
try {
fsStorage = new FilesystemStorage(Paths.get("", "target", "test-repository", "managed"), new DefaultFileLockManager());
} catch (IOException e) {
e.printStackTrace();
}
}
return fsStorage.getAsset("");
} }


@Override @Override
@Override @Override
public StorageAsset toFile( ArtifactReference reference ) public StorageAsset toFile( ArtifactReference reference )
{ {
return Paths.get(getRepoRoot(), refs.get(reference));
return getRepoRootAsset().resolve( refs.get(reference));
} }


@Override @Override

+ 10
- 10
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/dependency/tree/maven2/Maven3DependencyTreeBuilder.java View File

import org.apache.archiva.repository.RepositoryRegistry; import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.maven2.MavenSystemManager; import org.apache.archiva.repository.maven2.MavenSystemManager;
import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException; import org.apache.archiva.xml.XMLException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.Artifact;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Named; import javax.inject.Named;
import java.nio.file.Files;
import java.nio.file.Path;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
for ( String repoId : repositoryIds ) for ( String repoId : repositoryIds )
{ {
ManagedRepository managedRepo = repositoryRegistry.getManagedRepository(repoId); ManagedRepository managedRepo = repositoryRegistry.getManagedRepository(repoId);
Path repoDir = managedRepo.getAsset("").getFilePath();
StorageAsset repoDir = managedRepo.getAsset("");


Path file = pathTranslator.toFile( repoDir, projectArtifact.getGroupId(), projectArtifact.getArtifactId(),
StorageAsset file = pathTranslator.toFile( repoDir, projectArtifact.getGroupId(), projectArtifact.getArtifactId(),
projectArtifact.getBaseVersion(), projectArtifact.getBaseVersion(),
projectArtifact.getArtifactId() + "-" + projectArtifact.getVersion() projectArtifact.getArtifactId() + "-" + projectArtifact.getVersion()
+ ".pom" ); + ".pom" );


if ( Files.exists(file) )
if ( file.exists() )
{ {
return managedRepo; return managedRepo;
} }
// try with snapshot version // try with snapshot version
if ( StringUtils.endsWith( projectArtifact.getBaseVersion(), VersionUtil.SNAPSHOT ) ) if ( StringUtils.endsWith( projectArtifact.getBaseVersion(), VersionUtil.SNAPSHOT ) )
{ {
Path metadataFile = file.getParent().resolve( MetadataTools.MAVEN_METADATA );
if ( Files.exists(metadataFile) )
StorageAsset metadataFile = file.getParent().resolve( MetadataTools.MAVEN_METADATA );
if ( metadataFile.exists() )
{ {
try try
{ {
"-" + VersionUtil.SNAPSHOT ) ).append( '-' ).append( "-" + VersionUtil.SNAPSHOT ) ).append( '-' ).append(
timeStamp ).append( '-' ).append( Integer.toString( buildNumber ) ).append( timeStamp ).append( '-' ).append( Integer.toString( buildNumber ) ).append(
".pom" ).toString(); ".pom" ).toString();
Path timeStampFile = file.getParent().resolve( timeStampFileName );
StorageAsset timeStampFile = file.getParent().resolve( timeStampFileName );
log.debug( "try to find timestamped snapshot version file: {}", timeStampFile); log.debug( "try to find timestamped snapshot version file: {}", timeStampFile);
if ( Files.exists(timeStampFile) )
if ( timeStampFile.exists() )
{ {
return managedRepo; return managedRepo;
} }
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
log.warn( "skip fail to find timestamped snapshot pom: {}", e.getMessage() ); log.warn( "skip fail to find timestamped snapshot pom: {}", e.getMessage() );
} }

+ 5
- 4
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/Maven2RepositoryPathTranslator.java View File

import org.apache.archiva.metadata.model.ArtifactMetadata; import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet; import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator; import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
import org.apache.archiva.repository.storage.StorageAsset;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
} }


@Override @Override
public Path toFile(Path basedir, String namespace, String projectId, String projectVersion, String filename )
public StorageAsset toFile(StorageAsset basedir, String namespace, String projectId, String projectVersion, String filename )
{ {
return basedir.resolve( toPath( namespace, projectId, projectVersion, filename ) ); return basedir.resolve( toPath( namespace, projectId, projectVersion, filename ) );
} }


@Override @Override
public Path toFile( Path basedir, String namespace, String projectId, String projectVersion )
public StorageAsset toFile( StorageAsset basedir, String namespace, String projectId, String projectVersion )
{ {
return basedir.resolve( toPath( namespace, projectId, projectVersion ) ); return basedir.resolve( toPath( namespace, projectId, projectVersion ) );
} }
} }


@Override @Override
public Path toFile( Path basedir, String namespace, String projectId )
public StorageAsset toFile( StorageAsset basedir, String namespace, String projectId )
{ {
return basedir.resolve( toPath( namespace, projectId ) ); return basedir.resolve( toPath( namespace, projectId ) );
} }


@Override @Override
public Path toFile( Path basedir, String namespace )
public StorageAsset toFile( StorageAsset basedir, String namespace )
{ {
return basedir.resolve( toPath( namespace ) ); return basedir.resolve( toPath( namespace ) );
} }

+ 72
- 103
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/Maven2RepositoryStorage.java View File

import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.PathParser; import org.apache.archiva.repository.content.PathParser;
import org.apache.archiva.repository.maven2.MavenSystemManager; import org.apache.archiva.repository.maven2.MavenSystemManager;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException; import org.apache.archiva.xml.XMLException;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.Reader; import java.io.Reader;
import java.nio.channels.Channels;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException; import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;


// import java.io.FileNotFoundException; // import java.io.FileNotFoundException;


} }
} }
} }
Path basedir = Paths.get(managedRepository.getLocation());
StorageAsset basedir = managedRepository.getAsset("");
if (VersionUtil.isSnapshot(artifactVersion)) { if (VersionUtil.isSnapshot(artifactVersion)) {
Path metadataFile = pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(),
StorageAsset metadataFile = pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(),
readMetadataRequest.getProjectId(), artifactVersion, readMetadataRequest.getProjectId(), artifactVersion,
METADATA_FILENAME); METADATA_FILENAME);
try { try {
artifactVersion = artifactVersion =
artifactVersion + snapshotVersion.getTimestamp() + "-" + snapshotVersion.getBuildNumber(); artifactVersion + snapshotVersion.getTimestamp() + "-" + snapshotVersion.getBuildNumber();
} }
} catch (XMLException e) {
} catch (XMLException | IOException e) {
// unable to parse metadata - LOGGER it, and continue with the version as the original SNAPSHOT version // unable to parse metadata - LOGGER it, and continue with the version as the original SNAPSHOT version
LOGGER.warn("Invalid metadata: {} - {}", metadataFile, e.getMessage()); LOGGER.warn("Invalid metadata: {} - {}", metadataFile, e.getMessage());
} }


// TODO: won't work well with some other layouts, might need to convert artifact parts to ID by path translator // TODO: won't work well with some other layouts, might need to convert artifact parts to ID by path translator
String id = readMetadataRequest.getProjectId() + "-" + artifactVersion + ".pom"; String id = readMetadataRequest.getProjectId() + "-" + artifactVersion + ".pom";
Path file =
StorageAsset file =
pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(), pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(),
readMetadataRequest.getProjectVersion(), id); readMetadataRequest.getProjectVersion(), id);


if (!Files.exists(file)) {
if (!file.exists()) {
// metadata could not be resolved // metadata could not be resolved
throw new RepositoryStorageMetadataNotFoundException( throw new RepositoryStorageMetadataNotFoundException(
"The artifact's POM file '" + file.toAbsolutePath() + "' was missing");
"The artifact's POM file '" + file.getPath() + "' was missing");
} }


// TODO: this is a workaround until we can properly resolve using proxies as well - this doesn't cache // TODO: this is a workaround until we can properly resolve using proxies as well - this doesn't cache
} }


ModelBuildingRequest req = ModelBuildingRequest req =
new DefaultModelBuildingRequest().setProcessPlugins(false).setPomFile(file.toFile()).setTwoPhaseBuilding(
new DefaultModelBuildingRequest().setProcessPlugins(false).setPomFile(file.getFilePath().toFile()).setTwoPhaseBuilding(
false).setValidationLevel(ModelBuildingRequest.VALIDATION_LEVEL_MINIMAL); false).setValidationLevel(ModelBuildingRequest.VALIDATION_LEVEL_MINIMAL);


//MRM-1607. olamy this will resolve jdk profiles on the current running archiva jvm //MRM-1607. olamy this will resolve jdk profiles on the current running archiva jvm
@Override @Override
public Collection<String> listRootNamespaces(String repoId, Filter<String> filter) public Collection<String> listRootNamespaces(String repoId, Filter<String> filter)
throws RepositoryStorageRuntimeException { throws RepositoryStorageRuntimeException {
Path dir = getRepositoryBasedir(repoId);
StorageAsset dir = getRepositoryBasedir(repoId);


return getSortedFiles(dir, filter); return getSortedFiles(dir, filter);
} }


private static Collection<String> getSortedFiles(Path dir, Filter<String> filter) {
private static Collection<String> getSortedFiles(StorageAsset dir, Filter<String> filter) {


try (Stream<Path> stream = Files.list(dir)) {
final Predicate<Path> dFilter = new DirectoryFilter(filter);
return stream.filter(Files::isDirectory)
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
return dir.list().stream().filter(f -> f.isContainer())
.filter(dFilter) .filter(dFilter)
.map(path -> path.getFileName().toString())
.map(path -> path.getName().toString())
.sorted().collect(Collectors.toList()); .sorted().collect(Collectors.toList());


} catch (IOException e) {
LOGGER.error("Could not read directory list {}: {}", dir, e.getMessage(), e);
return Collections.emptyList();
}
} }


private Path getRepositoryBasedir(String repoId)
private StorageAsset getRepositoryBasedir(String repoId)
throws RepositoryStorageRuntimeException { throws RepositoryStorageRuntimeException {
ManagedRepository repositoryConfiguration = repositoryRegistry.getManagedRepository(repoId); ManagedRepository repositoryConfiguration = repositoryRegistry.getManagedRepository(repoId);


return Paths.get(repositoryConfiguration.getLocation());
return repositoryConfiguration.getAsset("");
} }


@Override @Override
public Collection<String> listNamespaces(String repoId, String namespace, Filter<String> filter) public Collection<String> listNamespaces(String repoId, String namespace, Filter<String> filter)
throws RepositoryStorageRuntimeException { throws RepositoryStorageRuntimeException {
Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
if (!(Files.exists(dir) && Files.isDirectory(dir))) {
StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
if (!(dir.exists()) && !dir.isContainer()) {
return Collections.emptyList(); return Collections.emptyList();
} }
// scan all the directories which are potential namespaces. Any directories known to be projects are excluded // scan all the directories which are potential namespaces. Any directories known to be projects are excluded
Predicate<Path> dFilter = new DirectoryFilter(filter);
try (Stream<Path> stream = Files.list(dir)) {
return stream.filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getFileName().toString())
Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
return dir.list().stream().filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getName().toString())
.sorted().collect(Collectors.toList()); .sorted().collect(Collectors.toList());
} catch (IOException e) {
LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
return Collections.emptyList();
}
} }


@Override @Override
public Collection<String> listProjects(String repoId, String namespace, Filter<String> filter) public Collection<String> listProjects(String repoId, String namespace, Filter<String> filter)
throws RepositoryStorageRuntimeException { throws RepositoryStorageRuntimeException {
Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
if (!(Files.exists(dir) && Files.isDirectory(dir))) {
StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
if (!(dir.exists() && dir.isContainer())) {
return Collections.emptyList(); return Collections.emptyList();
} }
// scan all directories in the namespace, and only include those that are known to be projects // scan all directories in the namespace, and only include those that are known to be projects
final Predicate<Path> dFilter = new DirectoryFilter(filter);
try (Stream<Path> stream = Files.list(dir)) {
return stream.filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getFileName().toString())
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
return dir.list().stream().filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getName().toString())
.sorted().collect(Collectors.toList()); .sorted().collect(Collectors.toList());
} catch (IOException e) {
LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
return Collections.emptyList();
}


} }


public Collection<String> listProjectVersions(String repoId, String namespace, String projectId, public Collection<String> listProjectVersions(String repoId, String namespace, String projectId,
Filter<String> filter) Filter<String> filter)
throws RepositoryStorageRuntimeException { throws RepositoryStorageRuntimeException {
Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace, projectId);
if (!(Files.exists(dir) && Files.isDirectory(dir))) {
StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace, projectId);
if (!(dir.exists() && dir.isContainer())) {
return Collections.emptyList(); return Collections.emptyList();
} }


@Override @Override
public Collection<ArtifactMetadata> readArtifactsMetadata(ReadMetadataRequest readMetadataRequest) public Collection<ArtifactMetadata> readArtifactsMetadata(ReadMetadataRequest readMetadataRequest)
throws RepositoryStorageRuntimeException { throws RepositoryStorageRuntimeException {
Path dir = pathTranslator.toFile(getRepositoryBasedir(readMetadataRequest.getRepositoryId()),
StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(readMetadataRequest.getRepositoryId()),
readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(), readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(),
readMetadataRequest.getProjectVersion()); readMetadataRequest.getProjectVersion());
if (!(Files.exists(dir) && Files.isDirectory(dir))) {
if (!(dir.exists() && dir.isContainer())) {
return Collections.emptyList(); return Collections.emptyList();
} }


// all files that are not metadata and not a checksum / signature are considered artifacts // all files that are not metadata and not a checksum / signature are considered artifacts
final Predicate<Path> dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter());
try (Stream<Path> stream = Files.list(dir)) {
final Predicate<StorageAsset> dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter());
// Returns a map TRUE -> (success values), FALSE -> (Exceptions) // Returns a map TRUE -> (success values), FALSE -> (Exceptions)
Map<Boolean, List<Try<ArtifactMetadata>>> result = stream.filter(dFilter).map(path -> {
Map<Boolean, List<Try<ArtifactMetadata>>> result = dir.list().stream().filter(dFilter).map(path -> {
try { try {
return Try.success(getArtifactFromFile(readMetadataRequest.getRepositoryId(), readMetadataRequest.getNamespace(), return Try.success(getArtifactFromFile(readMetadataRequest.getRepositoryId(), readMetadataRequest.getNamespace(),
readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion(), readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion(),
} }
return result.get(Boolean.TRUE).stream().map(tr -> tr.get()).collect(Collectors.toList()); return result.get(Boolean.TRUE).stream().map(tr -> tr.get()).collect(Collectors.toList());
} }
} catch (IOException e) {
LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
}
return Collections.emptyList();


} }


} }


private ArtifactMetadata getArtifactFromFile(String repoId, String namespace, String projectId, private ArtifactMetadata getArtifactFromFile(String repoId, String namespace, String projectId,
String projectVersion, Path file) throws IOException {
String projectVersion, StorageAsset file) throws IOException {
ArtifactMetadata metadata = ArtifactMetadata metadata =
pathTranslator.getArtifactFromId(repoId, namespace, projectId, projectVersion, file.getFileName().toString());
pathTranslator.getArtifactFromId(repoId, namespace, projectId, projectVersion, file.getName());


populateArtifactMetadataFromFile(metadata, file); populateArtifactMetadataFromFile(metadata, file);


proxyHandler.fetchFromProxies(managedRepository, pomReference); proxyHandler.fetchFromProxies(managedRepository, pomReference);


// Open and read the POM from the managed repo // Open and read the POM from the managed repo
Path pom = managedRepository.toFile(pomReference);
StorageAsset pom = managedRepository.toFile(pomReference);


if (!Files.exists(pom)) {
if (!pom.exists()) {
return; return;
} }


try { try {
// MavenXpp3Reader leaves the file open, so we need to close it ourselves. // MavenXpp3Reader leaves the file open, so we need to close it ourselves.


Model model = null;
try (Reader reader = Files.newBufferedReader(pom, Charset.defaultCharset())) {
Model model;
try (Reader reader = Channels.newReader(pom.getReadChannel(), Charset.defaultCharset().name())) {
model = MAVEN_XPP_3_READER.read(reader); model = MAVEN_XPP_3_READER.read(reader);
} }




@Override @Override
public String getFilePathWithVersion(final String requestPath, ManagedRepositoryContent managedRepositoryContent) public String getFilePathWithVersion(final String requestPath, ManagedRepositoryContent managedRepositoryContent)
throws XMLException, RelocationException {
throws RelocationException, XMLException, IOException {


if (StringUtils.endsWith(requestPath, METADATA_FILENAME)) { if (StringUtils.endsWith(requestPath, METADATA_FILENAME)) {
return getFilePath(requestPath, managedRepositoryContent.getRepository()); return getFilePath(requestPath, managedRepositoryContent.getRepository());


if (StringUtils.endsWith(artifactReference.getVersion(), VersionUtil.SNAPSHOT)) { if (StringUtils.endsWith(artifactReference.getVersion(), VersionUtil.SNAPSHOT)) {
// read maven metadata to get last timestamp // read maven metadata to get last timestamp
Path metadataDir = Paths.get(managedRepositoryContent.getRepoRoot(), filePath).getParent();
if (!Files.exists(metadataDir)) {
StorageAsset metadataDir = managedRepositoryContent.getRepository().getAsset( filePath).getParent();
if (!metadataDir.exists()) {
return filePath; return filePath;
} }
Path metadataFile = metadataDir.resolve(METADATA_FILENAME);
if (!Files.exists(metadataFile)) {
StorageAsset metadataFile = metadataDir.resolve(METADATA_FILENAME);
if (!metadataFile.exists()) {
return filePath; return filePath;
} }
ArchivaRepositoryMetadata archivaRepositoryMetadata = MavenMetadataReader.read(metadataFile); ArchivaRepositoryMetadata archivaRepositoryMetadata = MavenMetadataReader.read(metadataFile);
return joinedString; return joinedString;
} }


private static void populateArtifactMetadataFromFile(ArtifactMetadata metadata, Path file) throws IOException {
private static void populateArtifactMetadataFromFile(ArtifactMetadata metadata, StorageAsset file) throws IOException {
metadata.setWhenGathered(new Date()); metadata.setWhenGathered(new Date());
metadata.setFileLastModified(Files.getLastModifiedTime(file).toMillis());
ChecksummedFile checksummedFile = new ChecksummedFile(file);
metadata.setFileLastModified(file.getModificationTime().toEpochMilli());
ChecksummedFile checksummedFile = new ChecksummedFile(file.getFilePath());
try { try {
metadata.setMd5(checksummedFile.calculateChecksum(ChecksumAlgorithm.MD5)); metadata.setMd5(checksummedFile.calculateChecksum(ChecksumAlgorithm.MD5));
} catch (IOException e) { } catch (IOException e) {
} catch (IOException e) { } catch (IOException e) {
LOGGER.error("Unable to checksum file {}: {},SHA1", file, e.getMessage()); LOGGER.error("Unable to checksum file {}: {},SHA1", file, e.getMessage());
} }
metadata.setSize(Files.size(file));
metadata.setSize(file.getSize());
} }


private boolean isProject(Path dir, Filter<String> filter) {
private boolean isProject(StorageAsset dir, Filter<String> filter) {
// scan directories for a valid project version subdirectory, meaning this must be a project directory // scan directories for a valid project version subdirectory, meaning this must be a project directory
final Predicate<Path> dFilter = new DirectoryFilter(filter);
try (Stream<Path> stream = Files.list(dir)) {
boolean projFound = stream.filter(dFilter)
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
boolean projFound = dir.list().stream().filter(dFilter)
.anyMatch(path -> isProjectVersion(path)); .anyMatch(path -> isProjectVersion(path));
if (projFound) { if (projFound) {
return true; return true;
} }
} catch (IOException e) {
LOGGER.error("Could not read directory list {}: {}", dir, e.getMessage(), e);
}


// if a metadata file is present, check if this is the "artifactId" directory, marking it as a project // if a metadata file is present, check if this is the "artifactId" directory, marking it as a project
ArchivaRepositoryMetadata metadata = readMetadata(dir); ArchivaRepositoryMetadata metadata = readMetadata(dir);
if (metadata != null && dir.getFileName().toString().equals(metadata.getArtifactId())) {
if (metadata != null && dir.getName().toString().equals(metadata.getArtifactId())) {
return true; return true;
} }


return false; return false;
} }


private boolean isProjectVersion(Path dir) {
final String artifactId = dir.getParent().getFileName().toString();
final String projectVersion = dir.getFileName().toString();
private boolean isProjectVersion(StorageAsset dir) {
final String artifactId = dir.getParent().getName();
final String projectVersion = dir.getName();


// check if there is a POM artifact file to ensure it is a version directory // check if there is a POM artifact file to ensure it is a version directory


Predicate<Path> filter;
Predicate<StorageAsset> filter;
if (VersionUtil.isSnapshot(projectVersion)) { if (VersionUtil.isSnapshot(projectVersion)) {
filter = new PomFilenameFilter(artifactId, projectVersion); filter = new PomFilenameFilter(artifactId, projectVersion);
} else { } else {
final String pomFile = artifactId + "-" + projectVersion + ".pom"; final String pomFile = artifactId + "-" + projectVersion + ".pom";
filter = new PomFileFilter(pomFile); filter = new PomFileFilter(pomFile);
} }
try (Stream<Path> stream = Files.list(dir)) {
if (stream.filter(Files::isRegularFile).anyMatch(filter)) {
if (dir.list().stream().filter(f -> !f.isContainer()).anyMatch(filter)) {
return true; return true;
} }
} catch (IOException e) {
LOGGER.error("Could not list directory {}: {}", dir, e.getMessage(), e);
}

// if a metadata file is present, check if this is the "version" directory, marking it as a project version // if a metadata file is present, check if this is the "version" directory, marking it as a project version
ArchivaRepositoryMetadata metadata = readMetadata(dir); ArchivaRepositoryMetadata metadata = readMetadata(dir);
if (metadata != null && projectVersion.equals(metadata.getVersion())) { if (metadata != null && projectVersion.equals(metadata.getVersion())) {
return false; return false;
} }


private ArchivaRepositoryMetadata readMetadata(Path directory) {
private ArchivaRepositoryMetadata readMetadata(StorageAsset directory) {
ArchivaRepositoryMetadata metadata = null; ArchivaRepositoryMetadata metadata = null;
Path metadataFile = directory.resolve(METADATA_FILENAME);
if (Files.exists(metadataFile)) {
StorageAsset metadataFile = directory.resolve(METADATA_FILENAME);
if (metadataFile.exists()) {
try { try {
metadata = MavenMetadataReader.read(metadataFile); metadata = MavenMetadataReader.read(metadataFile);
} catch (XMLException e) {
} catch (XMLException | IOException e) {
// ignore missing or invalid metadata // ignore missing or invalid metadata
} }
} }
} }


private static class DirectoryFilter private static class DirectoryFilter
implements Predicate<Path> {
implements Predicate<StorageAsset> {
private final Filter<String> filter; private final Filter<String> filter;


public DirectoryFilter(Filter<String> filter) { public DirectoryFilter(Filter<String> filter) {
} }


@Override @Override
public boolean test(Path dir) {
final String name = dir.getFileName().toString();
public boolean test(StorageAsset dir) {
final String name = dir.getName();
if (!filter.accept(name)) { if (!filter.accept(name)) {
return false; return false;
} else if (name.startsWith(".")) { } else if (name.startsWith(".")) {
return false; return false;
} else if (!Files.isDirectory(dir)) {
} else if (!dir.isContainer()) {
return false; return false;
} }
return true; return true;
} }


private static class ArtifactDirectoryFilter private static class ArtifactDirectoryFilter
implements Predicate<Path> {
implements Predicate<StorageAsset> {
private final Filter<String> filter; private final Filter<String> filter;


private ArtifactDirectoryFilter(Filter<String> filter) { private ArtifactDirectoryFilter(Filter<String> filter) {
} }


@Override @Override
public boolean test(Path dir) {
final String name = dir.getFileName().toString();
public boolean test(StorageAsset dir) {
final String name = dir.getName().toString();
// TODO compare to logic in maven-repository-layer // TODO compare to logic in maven-repository-layer
if (!filter.accept(name)) { if (!filter.accept(name)) {
return false; return false;
return false; return false;
} else if (Arrays.binarySearch(IGNORED_FILES, name) >= 0) { } else if (Arrays.binarySearch(IGNORED_FILES, name) >= 0) {
return false; return false;
} else if (Files.isDirectory(dir)) {
} else if (dir.isContainer()) {
return false; return false;
} }
// some files from remote repositories can have name like maven-metadata-archiva-vm-all-public.xml // some files from remote repositories can have name like maven-metadata-archiva-vm-all-public.xml




private static final class PomFilenameFilter private static final class PomFilenameFilter
implements Predicate<Path> {
implements Predicate<StorageAsset> {


private final String artifactId, projectVersion; private final String artifactId, projectVersion;


} }


@Override @Override
public boolean test(Path dir) {
final String name = dir.getFileName().toString();
public boolean test(StorageAsset dir) {
final String name = dir.getName();
if (name.startsWith(artifactId + "-") && name.endsWith(".pom")) { if (name.startsWith(artifactId + "-") && name.endsWith(".pom")) {
String v = name.substring(artifactId.length() + 1, name.length() - 4); String v = name.substring(artifactId.length() + 1, name.length() - 4);
v = VersionUtil.getBaseVersion(v); v = VersionUtil.getBaseVersion(v);
} }


private static class PomFileFilter private static class PomFileFilter
implements Predicate<Path> {
implements Predicate<StorageAsset> {
private final String pomFile; private final String pomFile;


private PomFileFilter(String pomFile) { private PomFileFilter(String pomFile) {
} }


@Override @Override
public boolean test(Path dir) {
return pomFile.equals(dir.getFileName().toString());
public boolean test(StorageAsset dir) {
return pomFile.equals(dir.getName());
} }
} }



+ 15
- 14
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/RepositoryModelResolver.java View File

import org.apache.archiva.repository.RemoteRepository; import org.apache.archiva.repository.RemoteRepository;
import org.apache.archiva.repository.RepositoryCredentials; import org.apache.archiva.repository.RepositoryCredentials;
import org.apache.archiva.repository.maven2.MavenSystemManager; import org.apache.archiva.repository.maven2.MavenSystemManager;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.xml.XMLException; import org.apache.archiva.xml.XMLException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.auth.UsernamePasswordCredentials;
private RepositorySystemSession session; private RepositorySystemSession session;
private VersionRangeResolver versionRangeResolver; private VersionRangeResolver versionRangeResolver;


private Path basedir;
private StorageAsset basedir;


private RepositoryPathTranslator pathTranslator; private RepositoryPathTranslator pathTranslator;




private ManagedRepository managedRepository; private ManagedRepository managedRepository;


public RepositoryModelResolver( Path basedir, RepositoryPathTranslator pathTranslator )
public RepositoryModelResolver(StorageAsset basedir, RepositoryPathTranslator pathTranslator )
{ {
this.basedir = basedir; this.basedir = basedir;


Map<String, NetworkProxy> networkProxiesMap, ManagedRepository targetRepository, Map<String, NetworkProxy> networkProxiesMap, ManagedRepository targetRepository,
MavenSystemManager mavenSystemManager) MavenSystemManager mavenSystemManager)
{ {
this( Paths.get( managedRepository.getLocation() ), pathTranslator );
this( managedRepository.getAsset(""), pathTranslator );


this.managedRepository = managedRepository; this.managedRepository = managedRepository;


String filename = artifactId + "-" + version + ".pom"; String filename = artifactId + "-" + version + ".pom";
// TODO: we need to convert 1.0-20091120.112233-1 type paths to baseVersion for the below call - add a test // TODO: we need to convert 1.0-20091120.112233-1 type paths to baseVersion for the below call - add a test


Path model = pathTranslator.toFile( basedir, groupId, artifactId, version, filename );
StorageAsset model = pathTranslator.toFile( basedir, groupId, artifactId, version, filename );


if ( !Files.exists(model) )
if ( !model.exists() )
{ {
/** /**
* *
try try
{ {
boolean success = getModelFromProxy( remoteRepository, groupId, artifactId, version, filename ); boolean success = getModelFromProxy( remoteRepository, groupId, artifactId, version, filename );
if ( success && Files.exists(model) )
if ( success && model.exists() )
{ {
log.info( "Model '{}' successfully retrieved from remote repository '{}'", log.info( "Model '{}' successfully retrieved from remote repository '{}'",
model.toAbsolutePath(), remoteRepository.getId() );
model.getPath(), remoteRepository.getId() );
break; break;
} }
} }
{ {
log.info( log.info(
"An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}", "An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}",
model.toAbsolutePath(), remoteRepository.getId(), e.getMessage() );
model.getPath(), remoteRepository.getId(), e.getMessage() );
} }
catch ( Exception e ) catch ( Exception e )
{ {
log.warn( log.warn(
"An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}", "An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}",
model.toAbsolutePath(), remoteRepository.getId(), e.getMessage() );
model.getPath(), remoteRepository.getId(), e.getMessage() );


continue; continue;
} }
} }
} }


return new FileModelSource( model.toFile() );
return new FileModelSource( model.getFilePath().toFile() );
} }


public ModelSource resolveModel(Parent parent) throws UnresolvableModelException { public ModelSource resolveModel(Parent parent) throws UnresolvableModelException {
log.debug( "use snapshot path {} for maven coordinate {}:{}:{}", snapshotPath, groupId, artifactId, log.debug( "use snapshot path {} for maven coordinate {}:{}:{}", snapshotPath, groupId, artifactId,
version ); version );


Path model = basedir.resolve( snapshotPath );
StorageAsset model = basedir.resolve( snapshotPath );
//model = pathTranslator.toFile( basedir, groupId, artifactId, lastVersion, filename ); //model = pathTranslator.toFile( basedir, groupId, artifactId, lastVersion, filename );
if ( Files.exists(model) )
if ( model.exists() )
{ {
return model;
return model.getFilePath();
} }
} }
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
log.warn( "fail to read {}, {}", mavenMetadata.toAbsolutePath(), e.getCause() ); log.warn( "fail to read {}, {}", mavenMetadata.toAbsolutePath(), e.getCause() );
} }

+ 1
- 1
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java View File

import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;


import java.io.IOException; import java.io.IOException;

+ 1
- 3
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenManagedRepository.java View File

import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.content.maven2.MavenRepositoryRequestInfo; import org.apache.archiva.repository.content.maven2.MavenRepositoryRequestInfo;
import org.apache.archiva.repository.features.ArtifactCleanupFeature; import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Locale; import java.util.Locale;
import java.util.function.Function;


/** /**
* Maven2 managed repository implementation. * Maven2 managed repository implementation.

+ 1
- 1
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRemoteRepository.java View File

import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.StandardCapabilities; import org.apache.archiva.repository.StandardCapabilities;
import org.apache.archiva.repository.UnsupportedFeatureException; import org.apache.archiva.repository.UnsupportedFeatureException;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.features.RepositoryFeature; import org.apache.archiva.repository.features.RepositoryFeature;

+ 1
- 2
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryGroup.java View File

import org.apache.archiva.common.filelock.DefaultFileLockManager; import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;


import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Locale; import java.util.Locale;



+ 12
- 5
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryProvider.java View File

import org.apache.archiva.common.filelock.FileLockManager; import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.configuration.*; import org.apache.archiva.configuration.*;
import org.apache.archiva.repository.*; import org.apache.archiva.repository.*;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.FilesystemStorage;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.features.ArtifactCleanupFeature; import org.apache.archiva.repository.features.ArtifactCleanupFeature;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.archiva.repository.features.RemoteIndexFeature;
IndexCreationFeature indexCreationFeature = repositoryGroup.getFeature( IndexCreationFeature.class ).get(); IndexCreationFeature indexCreationFeature = repositoryGroup.getFeature( IndexCreationFeature.class ).get();
indexCreationFeature.setIndexPath( getURIFromString(configuration.getMergedIndexPath()) ); indexCreationFeature.setIndexPath( getURIFromString(configuration.getMergedIndexPath()) );
Path localPath = Paths.get(configuration.getMergedIndexPath()); Path localPath = Paths.get(configuration.getMergedIndexPath());
if (localPath.isAbsolute()) {
indexCreationFeature.setLocalIndexPath( new FilesystemAsset(localPath.getFileName().toString(), localPath) );
Path repoGroupPath = repositoryGroup.getAsset("").getFilePath().toAbsolutePath();
if (localPath.isAbsolute() && !localPath.startsWith(repoGroupPath)) {
try {
FilesystemStorage storage = new FilesystemStorage(localPath.getParent(), fileLockManager);
indexCreationFeature.setLocalIndexPath(storage.getAsset(localPath.getFileName().toString()));
} catch (IOException e) {
throw new RepositoryException("Could not initialize storage for index path "+localPath);
}
} else if (localPath.isAbsolute()) {
indexCreationFeature.setLocalIndexPath(repositoryGroup.getAsset(repoGroupPath.relativize(localPath).toString()));
} else } else
{ {
indexCreationFeature.setLocalIndexPath( new FilesystemAsset(localPath.toString(), archivaConfiguration.getRepositoryGroupBaseDir( ).resolve( localPath )));
indexCreationFeature.setLocalIndexPath(repositoryGroup.getAsset(localPath.toString()));
} }
} }
// References to other repositories are set filled by the registry // References to other repositories are set filled by the registry

+ 4
- 6
archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/MavenRepositoryMetadataReaderTest.java View File

import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;


import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.Arrays; import java.util.Arrays;


@Test @Test
public void testGroupMetadata() public void testGroupMetadata()
throws XMLException
{
throws XMLException, IOException {
Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/plugins/maven-metadata.xml" ); Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/plugins/maven-metadata.xml" );


ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile ); ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile );


@Test @Test
public void testProjectMetadata() public void testProjectMetadata()
throws XMLException
{
throws XMLException, IOException {
Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" ); Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" );


ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile); ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile);


@Test @Test
public void testProjectVersionMetadata() public void testProjectVersionMetadata()
throws XMLException
{
throws XMLException, IOException {
Path metadataFile = defaultRepoDir.resolve( "org/apache/apache/5-SNAPSHOT/maven-metadata.xml" ); Path metadataFile = defaultRepoDir.resolve( "org/apache/apache/5-SNAPSHOT/maven-metadata.xml" );


ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile ); ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile );

+ 20
- 11
archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/ArchivaIndexManagerMock.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration; import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException; import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;


private Path getIndexPath( ArchivaIndexingContext ctx ) private Path getIndexPath( ArchivaIndexingContext ctx )
{ {
return PathUtil.getPathFromUri( ctx.getPath( ) );
return ctx.getPath( ).getFilePath();
} }


@FunctionalInterface @FunctionalInterface
@Override @Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{ {
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> { executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try { try {
indexer.addArtifactsToIndex(artifacts, indexingContext); indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) { } catch (IOException e) {
@Override @Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{ {
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> { executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try { try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext); indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) { } catch (IOException e) {
throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( ) throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
+ ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e ); + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
} }
MavenIndexContextMock context = new MavenIndexContextMock( repository, mvnCtx );
MavenIndexContextMock context = null;
try {
context = new MavenIndexContextMock( repository, mvnCtx );
} catch (IOException e) {
throw new IndexCreationFailedException(e);
}


return context; return context;
} }
log.warn("Index close failed"); log.warn("Index close failed");
} }
try { try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
FileUtils.deleteDirectory(context.getPath().getFilePath());
} catch (IOException e) { } catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files"); throw new IndexUpdateFailedException("Could not delete index files");
} }
} }





private StorageAsset getIndexPath( Repository repo) throws IOException { private StorageAsset getIndexPath( Repository repo) throws IOException {
IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get(); IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
Path repoDir = repo.getAsset( "" ).getFilePath(); Path repoDir = repo.getAsset( "" ).getFilePath();
URI indexDir = icf.getIndexPath(); URI indexDir = icf.getIndexPath();
String indexPath = indexDir.getPath(); String indexPath = indexDir.getPath();
Path indexDirectory = null; Path indexDirectory = null;
FilesystemStorage fsStorage = (FilesystemStorage) repo.getAsset("").getStorage();
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) ) if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
{ {


if ( indexDirectory.isAbsolute( ) ) if ( indexDirectory.isAbsolute( ) )
{ {
indexPath = indexDirectory.getFileName().toString(); indexPath = indexDirectory.getFileName().toString();
fsStorage = new FilesystemStorage(indexDirectory.getParent(), new DefaultFileLockManager());
} }
else else
{ {
{ {
Files.createDirectories( indexDirectory ); Files.createDirectories( indexDirectory );
} }
return new FilesystemAsset( indexPath, indexDirectory );
return new FilesystemAsset( fsStorage, indexPath, indexDirectory );
} }


private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException

+ 8
- 4
archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/MavenIndexContextMock.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.index.context.IndexingContext; import org.apache.maven.index.context.IndexingContext;


import java.io.IOException; import java.io.IOException;
import java.net.URI;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.NoSuchFileException; import java.nio.file.NoSuchFileException;
import java.sql.Date; import java.sql.Date;


private IndexingContext delegate; private IndexingContext delegate;
private Repository repository; private Repository repository;
private FilesystemStorage indexStorage;


MavenIndexContextMock(Repository repository, IndexingContext delegate) {
MavenIndexContextMock(Repository repository, IndexingContext delegate) throws IOException {
this.delegate = delegate; this.delegate = delegate;
this.repository = repository; this.repository = repository;
indexStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath(), new DefaultFileLockManager());


} }


} }


@Override @Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
public StorageAsset getPath() {
return indexStorage.getAsset("");
} }


@Override @Override

+ 3
- 4
archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/metadata/RepositoryMetadataReaderTest.java View File

import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;


import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;


{ {
@Test @Test
public void testLoadSimple() public void testLoadSimple()
throws XMLException
{
throws XMLException, IOException {
Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" ); Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" );
Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" ); Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" );




@Test @Test
public void testLoadComplex() public void testLoadComplex()
throws XMLException
{
throws XMLException, IOException {
Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" ); Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" );
Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/samplejar/maven-metadata.xml" ); Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/samplejar/maven-metadata.xml" );



+ 1
- 1
archiva-modules/archiva-maven/archiva-maven-scheduler/src/test/java/org/apache/archiva/scheduler/indexing/maven/ArchivaIndexingTaskExecutorTest.java View File

import org.apache.archiva.repository.ManagedRepository; import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ReleaseScheme; import org.apache.archiva.repository.ReleaseScheme;
import org.apache.archiva.repository.RepositoryRegistry; import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask; import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;

+ 1
- 3
archiva-modules/archiva-scheduler/archiva-scheduler-repository-api/src/main/java/org/apache/archiva/scheduler/repository/model/RepositoryTask.java View File

package org.apache.archiva.scheduler.repository.model; package org.apache.archiva.scheduler.repository.model;


import org.apache.archiva.redback.components.taskqueue.Task; import org.apache.archiva.redback.components.taskqueue.Task;
import org.apache.archiva.repository.content.StorageAsset;

import java.nio.file.Path;
import org.apache.archiva.repository.storage.StorageAsset;




/* /*

+ 14
- 10
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/ArchivaIndexManagerMock.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.common.utils.FileUtils; import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.common.utils.PathUtil; import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.ArchivaConfiguration; import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.indexer.IndexCreationFailedException; import org.apache.archiva.indexer.IndexCreationFailedException;
import org.apache.archiva.indexer.IndexUpdateFailedException; import org.apache.archiva.indexer.IndexUpdateFailedException;
import org.apache.archiva.indexer.UnsupportedBaseContextException; import org.apache.archiva.indexer.UnsupportedBaseContextException;
import org.apache.archiva.proxy.ProxyRegistry;
import org.apache.archiva.proxy.maven.WagonFactory; import org.apache.archiva.proxy.maven.WagonFactory;
import org.apache.archiva.proxy.maven.WagonFactoryException; import org.apache.archiva.proxy.maven.WagonFactoryException;
import org.apache.archiva.proxy.maven.WagonFactoryRequest; import org.apache.archiva.proxy.maven.WagonFactoryRequest;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.RepositoryType; import org.apache.archiva.repository.RepositoryType;
import org.apache.archiva.repository.UnsupportedRepositoryTypeException; import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
import org.apache.archiva.repository.content.FilesystemAsset;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.features.IndexCreationFeature; import org.apache.archiva.repository.features.IndexCreationFeature;
import org.apache.archiva.repository.features.RemoteIndexFeature; import org.apache.archiva.repository.features.RemoteIndexFeature;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.index.ArtifactContext; import org.apache.maven.index.ArtifactContext;
import org.apache.maven.index.ArtifactContextProducer; import org.apache.maven.index.ArtifactContextProducer;


private Path getIndexPath( ArchivaIndexingContext ctx ) private Path getIndexPath( ArchivaIndexingContext ctx )
{ {
return PathUtil.getPathFromUri( ctx.getPath( ) );
return ctx.getPath().getFilePath();
} }


@FunctionalInterface @FunctionalInterface
@Override @Override
public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
{ {
final URI ctxUri = context.getPath();
StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> { executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try { try {
indexer.addArtifactsToIndex(artifacts, indexingContext); indexer.addArtifactsToIndex(artifacts, indexingContext);
} catch (IOException e) { } catch (IOException e) {
@Override @Override
public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
{ {
final URI ctxUri = context.getPath();
final StorageAsset ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> { executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
try { try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext); indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) { } catch (IOException e) {
log.warn("Index close failed"); log.warn("Index close failed");
} }
try { try {
FileUtils.deleteDirectory(Paths.get(context.getPath()));
StorageUtil.deleteRecursively(context.getPath());
} catch (IOException e) { } catch (IOException e) {
throw new IndexUpdateFailedException("Could not delete index files"); throw new IndexUpdateFailedException("Could not delete index files");
} }
URI indexDir = icf.getIndexPath(); URI indexDir = icf.getIndexPath();
String indexPath = indexDir.getPath(); String indexPath = indexDir.getPath();
Path indexDirectory = null; Path indexDirectory = null;
FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getAsset("").getStorage();
if ( ! StringUtils.isEmpty(indexDir.toString( ) ) ) if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
{ {


if ( indexDirectory.isAbsolute( ) ) if ( indexDirectory.isAbsolute( ) )
{ {
indexPath = indexDirectory.getFileName().toString(); indexPath = indexDirectory.getFileName().toString();
filesystemStorage = new FilesystemStorage(indexDirectory, new DefaultFileLockManager());
} }
else else
{ {
{ {
Files.createDirectories( indexDirectory ); Files.createDirectories( indexDirectory );
} }
return new FilesystemAsset( indexPath, indexDirectory);
return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory);
} }


private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException

+ 13
- 2
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MavenIndexContextMock.java View File

* under the License. * under the License.
*/ */


import org.apache.archiva.common.filelock.DefaultFileLockManager;
import org.apache.archiva.indexer.ArchivaIndexingContext; import org.apache.archiva.indexer.ArchivaIndexingContext;
import org.apache.archiva.repository.Repository; import org.apache.archiva.repository.Repository;
import org.apache.archiva.repository.storage.FilesystemAsset;
import org.apache.archiva.repository.storage.FilesystemStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.maven.index.context.IndexingContext; import org.apache.maven.index.context.IndexingContext;


import java.io.IOException; import java.io.IOException;


private IndexingContext delegate; private IndexingContext delegate;
private Repository repository; private Repository repository;
private FilesystemStorage filesystemStorage;


MavenIndexContextMock( Repository repository, IndexingContext delegate) { MavenIndexContextMock( Repository repository, IndexingContext delegate) {
this.delegate = delegate; this.delegate = delegate;
this.repository = repository; this.repository = repository;
try {
filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath().getParent(), new DefaultFileLockManager());
} catch (IOException e) {
e.printStackTrace();
}


} }


} }


@Override @Override
public URI getPath() {
return delegate.getIndexDirectoryFile().toURI();
public StorageAsset getPath() {
return new FilesystemAsset(filesystemStorage, delegate.getIndexDirectoryFile().toPath().getFileName().toString(), delegate.getIndexDirectoryFile().toPath());

} }


@Override @Override

+ 4
- 0
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/pom.xml View File

<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-security</artifactId> <artifactId>archiva-security</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-storage-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-admin-api</artifactId> <artifactId>archiva-repository-admin-api</artifactId>

+ 32
- 17
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultBrowseService.java View File

import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryNotFoundException; import org.apache.archiva.repository.RepositoryNotFoundException;
import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.archiva.rest.api.model.*; import org.apache.archiva.rest.api.model.*;
import org.apache.archiva.rest.api.services.ArchivaRestServiceException; import org.apache.archiva.rest.api.services.ArchivaRestServiceException;
import org.apache.archiva.rest.api.services.BrowseService; import org.apache.archiva.rest.api.services.BrowseService;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.*; import java.util.*;
import java.util.jar.JarEntry; import java.util.jar.JarEntry;
import java.util.jar.JarFile; import java.util.jar.JarFile;
ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier, ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier,
StringUtils.isEmpty( type ) ? "jar" : type, StringUtils.isEmpty( type ) ? "jar" : type,
repoId ); repoId );
Path file = managedRepositoryContent.toFile( archivaArtifact );
if ( Files.exists(file) )
StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );
if ( file.exists() )
{ {
return readFileEntries( file, path, repoId ); return readFileEntries( file, path, repoId );
} }
ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier, ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier,
StringUtils.isEmpty( type ) ? "jar" : type, StringUtils.isEmpty( type ) ? "jar" : type,
repoId ); repoId );
Path file = managedRepositoryContent.toFile( archivaArtifact );
if ( !Files.exists(file) )
StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );
if ( !file.exists() )
{ {
log.debug( "file: {} not exists for repository: {} try next repository", file, repoId ); log.debug( "file: {} not exists for repository: {} try next repository", file, repoId );
continue; continue;
if ( StringUtils.isNotBlank( path ) ) if ( StringUtils.isNotBlank( path ) )
{ {
// zip entry of the path -> path must a real file entry of the archive // zip entry of the path -> path must a real file entry of the archive
JarFile jarFile = new JarFile( file.toFile() );
StorageUtil.PathInformation pathInfo = StorageUtil.getAssetDataAsPath(file);
JarFile jarFile = new JarFile( pathInfo.getPath().toFile());
ZipEntry zipEntry = jarFile.getEntry( path ); ZipEntry zipEntry = jarFile.getEntry( path );
try (InputStream inputStream = jarFile.getInputStream( zipEntry )) try (InputStream inputStream = jarFile.getInputStream( zipEntry ))
{ {
finally finally
{ {
closeQuietly( jarFile ); closeQuietly( jarFile );
if (pathInfo.isTmpFile()) {
Files.deleteIfExists(pathInfo.getPath());
}
} }
} }
return new ArtifactContent( new String(Files.readAllBytes( file ), ARTIFACT_CONTENT_ENCODING), repoId );
try(InputStream readStream = file.getReadStream()) {
return new ArtifactContent(IOUtils.toString(readStream, ARTIFACT_CONTENT_ENCODING), repoId);
}
} }
} }
catch ( IOException e ) catch ( IOException e )
StringUtils.isEmpty( classifier ) StringUtils.isEmpty( classifier )
? "" ? ""
: classifier, "jar", repoId ); : classifier, "jar", repoId );
Path file = managedRepositoryContent.toFile( archivaArtifact );
StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );


if ( file != null && Files.exists(file) )
if ( file != null && file.exists() )
{ {
return true; return true;
} }
// in case of SNAPSHOT we can have timestamped version locally ! // in case of SNAPSHOT we can have timestamped version locally !
if ( StringUtils.endsWith( version, VersionUtil.SNAPSHOT ) ) if ( StringUtils.endsWith( version, VersionUtil.SNAPSHOT ) )
{ {
Path metadataFile = file.getParent().resolve(MetadataTools.MAVEN_METADATA );
if ( Files.exists(metadataFile) )
StorageAsset metadataFile = file.getStorage().getAsset(file.getParent().getPath()+"/"+MetadataTools.MAVEN_METADATA );
if ( metadataFile.exists() )
{ {
try try
{ {
.append( ( StringUtils.isEmpty( classifier ) ? "" : "-" + classifier ) ) // .append( ( StringUtils.isEmpty( classifier ) ? "" : "-" + classifier ) ) //
.append( ".jar" ).toString(); .append( ".jar" ).toString();


Path timeStampFile = file.getParent().resolve( timeStampFileName );
log.debug( "try to find timestamped snapshot version file: {}", timeStampFile.toAbsolutePath() );
if ( Files.exists(timeStampFile) )
StorageAsset timeStampFile = file.getStorage().getAsset(file.getParent().getPath() + "/" + timeStampFileName );
log.debug( "try to find timestamped snapshot version file: {}", timeStampFile.getPath() );
if ( timeStampFile.exists() )
{ {
return true; return true;
} }
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
log.warn( "skip fail to find timestamped snapshot file: {}", e.getMessage() ); log.warn( "skip fail to find timestamped snapshot file: {}", e.getMessage() );
} }


file = proxyHandler.fetchFromProxies( managedRepositoryContent, path ); file = proxyHandler.fetchFromProxies( managedRepositoryContent, path );


if ( file != null && Files.exists(file) )
if ( file != null && file.exists() )
{ {
// download pom now // download pom now
String pomPath = StringUtils.substringBeforeLast( path, ".jar" ) + ".pom"; String pomPath = StringUtils.substringBeforeLast( path, ".jar" ) + ".pom";
} }
} }


protected List<ArtifactContentEntry> readFileEntries(final Path file, final String filterPath, final String repoId )
protected List<ArtifactContentEntry> readFileEntries(final StorageAsset file, final String filterPath, final String repoId )
throws IOException throws IOException
{ {
String cleanedfilterPath = filterPath==null ? "" : (StringUtils.startsWith(filterPath, "/") ? String cleanedfilterPath = filterPath==null ? "" : (StringUtils.startsWith(filterPath, "/") ?
if (!StringUtils.endsWith(cleanedfilterPath,"/") && !StringUtils.isEmpty(cleanedfilterPath)) { if (!StringUtils.endsWith(cleanedfilterPath,"/") && !StringUtils.isEmpty(cleanedfilterPath)) {
filterDepth++; filterDepth++;
} }
JarFile jarFile = new JarFile( file.toFile() );

StorageUtil.PathInformation pathInfo = StorageUtil.getAssetDataAsPath(file);
JarFile jarFile = new JarFile(pathInfo.getPath().toFile());
try try
{ {
Enumeration<JarEntry> jarEntryEnumeration = jarFile.entries(); Enumeration<JarEntry> jarEntryEnumeration = jarFile.entries();
{ {
jarFile.close(); jarFile.close();
} }
if (pathInfo.isTmpFile()) {
Files.deleteIfExists(pathInfo.getPath());
}
} }
List<ArtifactContentEntry> sorted = new ArrayList<>( artifactContentEntryMap.values() ); List<ArtifactContentEntry> sorted = new ArrayList<>( artifactContentEntryMap.values() );
Collections.sort( sorted, ArtifactContentEntryComparator.INSTANCE ); Collections.sort( sorted, ArtifactContentEntryComparator.INSTANCE );

+ 17
- 10
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultRepositoriesService.java View File

import org.apache.archiva.repository.RepositoryException; import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.RepositoryNotFoundException; import org.apache.archiva.repository.RepositoryNotFoundException;
import org.apache.archiva.repository.RepositoryRegistry; import org.apache.archiva.repository.RepositoryRegistry;
import org.apache.archiva.repository.content.RepositoryStorage;
import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.content.StorageUtil;
import org.apache.archiva.repository.storage.RepositoryStorage;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.archiva.repository.events.RepositoryListener; import org.apache.archiva.repository.events.RepositoryListener;
import org.apache.archiva.repository.metadata.MetadataTools; import org.apache.archiva.repository.metadata.MetadataTools;
import org.apache.archiva.repository.metadata.RepositoryMetadataException; import org.apache.archiva.repository.metadata.RepositoryMetadataException;
import javax.inject.Named; import javax.inject.Named;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.io.IOException; import java.io.IOException;
import java.nio.file.FileSystems;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.text.DateFormat; import java.text.DateFormat;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.ArrayList; import java.util.ArrayList;
{ {
metadata = MavenMetadataReader.read( metadataFile.getFilePath() ); metadata = MavenMetadataReader.read( metadataFile.getFilePath() );
} }
catch ( XMLException e )
catch (XMLException | IOException e )
{ {
throw new RepositoryMetadataException( e.getMessage(), e ); throw new RepositoryMetadataException( e.getMessage(), e );
} }
throws IOException throws IOException
{ {


StorageUtil.copyAsset( sourceStorage, sourceFile, targetStorage, targetPath, true );
StorageUtil.copyAsset( sourceFile, targetPath, true );
if ( fixChecksums ) if ( fixChecksums )
{ {
fixChecksums( targetPath ); fixChecksums( targetPath );
projectMetadata.setReleasedVersion( latestVersion ); projectMetadata.setReleasedVersion( latestVersion );
} }


RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile.getFilePath());
try(OutputStreamWriter writer = new OutputStreamWriter(projectMetadataFile.getWriteStream(true))) {
RepositoryMetadataWriter.write(projectMetadata, writer);
} catch (IOException e) {
throw new RepositoryMetadataException(e);
}


if ( fixChecksums ) if ( fixChecksums )
{ {
metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp ); metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp );
metadata.setAvailableVersions( availableVersions ); metadata.setAvailableVersions( availableVersions );


RepositoryMetadataWriter.write( metadata, metadataFile.getFilePath());
try (OutputStreamWriter writer = new OutputStreamWriter(metadataFile.getWriteStream(true))) {
RepositoryMetadataWriter.write(metadata, writer);
} catch (IOException e) {
throw new RepositoryMetadataException(e);
}
ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() ); ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
checksum.fixChecksums( algorithms ); checksum.fixChecksums( algorithms );
} }

+ 6
- 4
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/utils/ArtifactBuilder.java View File

import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet; import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.model.ArtifactReference; import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.repository.ManagedRepositoryContent; import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.storage.StorageAsset;
import org.apache.archiva.repository.storage.StorageUtil;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;


import java.nio.file.Path; import java.nio.file.Path;


ref.setClassifier( classifier ); ref.setClassifier( classifier );
ref.setType( type ); ref.setType( type );
Path file = managedRepositoryContent.toFile( ref );
StorageAsset file = managedRepositoryContent.toFile( ref );


String extension = getExtensionFromFile(file); String extension = getExtensionFromFile(file);
/** /**
* Extract file extension * Extract file extension
*/ */
String getExtensionFromFile( Path file )
String getExtensionFromFile( StorageAsset file )
{ {
// we are just interested in the section after the last - // we are just interested in the section after the last -
String[] parts = file.getFileName().toString().split( "-" );
String[] parts = file.getName().split( "-" );
if ( parts.length > 0 ) if ( parts.length > 0 )
{ {
// get anything after a dot followed by a letter a-z, including other dots // get anything after a dot followed by a letter a-z, including other dots
} }
} }
// just in case // just in case
return FilenameUtils.getExtension( file.toFile().getName() );
return StorageUtil.getExtension( file );
} }


} }

+ 0
- 0
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/ArtifactContentEntriesTests.java View File


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save