@@ -166,7 +166,7 @@ public class ArchivaCli | |||
private void doScan( String path, String[] consumers ) | |||
throws ConsumerException, IOException | |||
{ | |||
BasicManagedRepository repo = BasicManagedRepository.newFilesystemInstance( Paths.get(path).getFileName().toString(), "Archiva CLI Provided Repo", Paths.get(path).getParent()); | |||
BasicManagedRepository repo = BasicManagedRepository.newFilesystemInstance( Paths.get(path).getFileName().toString(), "Archiva CLI Provided Repo", Paths.get(path)); | |||
repo.setLocation( Paths.get(path).toUri() ); | |||
List<KnownRepositoryContentConsumer> knownConsumerList = new ArrayList<>(); |
@@ -46,7 +46,8 @@ public enum ChecksumAlgorithm { | |||
SHA1("SHA-1", "SHA1", "sha1", "sha128", "sha-128"), | |||
SHA256("SHA-256", "SHA256", "sha256", "sha2", "sha-256"), | |||
SHA384("SHA-384", "SHA384", "sha384", "sha3", "sha-384"), | |||
SHA512("SHA-512", "SHA512", "sha512", "sha5", "sha-512"); | |||
SHA512("SHA-512", "SHA512", "sha512", "sha5", "sha-512"), | |||
ASC("ASC", "ASC", "asc"); | |||
public static ChecksumAlgorithm getByExtension( Path file ) | |||
{ |
@@ -404,7 +404,7 @@ public abstract class AbstractRepositoryPurge | |||
{ | |||
StorageUtil.recurse(parentDir, a -> { | |||
if (!artifactFile.isContainer() && artifactFile.getName().startsWith(artifactName)) deleteSilently(a); | |||
if (!a.isContainer() && a.getName().startsWith(artifactName)) deleteSilently(a); | |||
}, true, 3 ); | |||
} | |||
catch ( IOException e ) |
@@ -49,7 +49,7 @@ public class ArtifactMissingChecksumsConsumerTest | |||
super.setUp(); | |||
Path basePath = Paths.get("target/test-classes"); | |||
repoConfig = BasicManagedRepository.newFilesystemInstance( "test-repo", "Test Repository", basePath); | |||
repoConfig = BasicManagedRepository.newFilesystemInstance( "test-repo", "Test Repository", basePath.resolve("test-repo")); | |||
repoConfig.setLayout( "default" ); | |||
repoConfig.setLocation(basePath.resolve("test-repo/" ).toUri() ); | |||
@@ -148,7 +148,7 @@ public abstract class AbstractRepositoryPurgeTest | |||
public org.apache.archiva.repository.ManagedRepository getRepoConfiguration( String repoId, String repoName ) throws URISyntaxException, IOException { | |||
Path basePath = Paths.get("target/test-" + getName()).toAbsolutePath(); | |||
config = BasicManagedRepository.newFilesystemInstance( repoId, repoName, basePath); | |||
config = BasicManagedRepository.newFilesystemInstance( repoId, repoName, basePath.resolve(repoId)); | |||
config.addActiveReleaseScheme( ReleaseScheme.RELEASE ); | |||
config.addActiveReleaseScheme( ReleaseScheme.SNAPSHOT ); | |||
ArtifactCleanupFeature atf = config.getFeature( ArtifactCleanupFeature.class ).get(); |
@@ -76,7 +76,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
public EditableManagedRepository createManagedInstance( String id, String name ) | |||
{ | |||
try { | |||
return BasicManagedRepository.newFilesystemInstance( id, name, Paths.get("target/repositories") ); | |||
return BasicManagedRepository.newFilesystemInstance( id, name, Paths.get("target/repositories").resolve(id) ); | |||
} catch (IOException e) { | |||
throw new RuntimeException(e); | |||
} | |||
@@ -104,7 +104,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
{ | |||
BasicManagedRepository managedRepository = null; | |||
try { | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( configuration.getId( ), configuration.getName( ), Paths.get("target/repositories") ); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( configuration.getId( ), configuration.getName( ), Paths.get("target/repositories").resolve(configuration.getId()) ); | |||
} catch (IOException e) { | |||
throw new RepositoryException(e); | |||
} | |||
@@ -156,7 +156,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
String id = configuration.getId( ) + StagingRepositoryFeature.STAGING_REPO_POSTFIX; | |||
BasicManagedRepository managedRepository = null; | |||
try { | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( id, configuration.getName( ), Paths.get("target/repositories") ); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance(id, configuration.getName(), Paths.get("target/repositories").resolve(id)); | |||
} catch (IOException e) { | |||
throw new RepositoryException(e); | |||
} |
@@ -116,7 +116,7 @@ public class NexusIndexerConsumerTest | |||
// initialize to set the file types to be processed | |||
nexusIndexerConsumer.initialize(); | |||
repositoryConfig = BasicManagedRepository.newFilesystemInstance( "test-repo", "Test Repository", Paths.get("target/test-classes") ); | |||
repositoryConfig = BasicManagedRepository.newFilesystemInstance( "test-repo", "Test Repository", Paths.get("target/test-classes").resolve("test-repo") ); | |||
repositoryConfig.setLocation( new URI("target/test-classes/test-repo") ); | |||
repositoryConfig.setLayout( "default" ); | |||
repositoryConfig.setScanned( true ); |
@@ -110,17 +110,18 @@ public class Lock implements Closeable | |||
IOException ioException = null; | |||
try | |||
{ | |||
this.fileLock.release(); | |||
if (this.fileLock!=null) { | |||
this.fileLock.release(); | |||
} | |||
} | |||
catch ( IOException e ) | |||
{ | |||
ioException = e; | |||
} finally { | |||
closeQuietly( fileChannel ); | |||
fileClients.remove( Thread.currentThread() ); | |||
} | |||
closeQuietly( fileChannel ); | |||
fileClients.remove( Thread.currentThread() ); | |||
if ( ioException != null ) | |||
{ | |||
throw ioException; |
@@ -368,7 +368,7 @@ public class ChecksumPolicyTest | |||
} | |||
StorageAsset localAsset = fs.getAsset("artifact.jar"); | |||
StorageAsset localAsset = destDir.resolve("artifact.jar"); | |||
return localAsset; | |||
} | |||
@@ -58,6 +58,7 @@ import javax.inject.Named; | |||
import java.io.IOException; | |||
import java.nio.file.Files; | |||
import java.nio.file.Path; | |||
import java.nio.file.StandardCopyOption; | |||
import java.util.*; | |||
import java.util.concurrent.ConcurrentHashMap; | |||
import java.util.concurrent.ConcurrentMap; | |||
@@ -282,7 +283,7 @@ public abstract class DefaultRepositoryProxyHandler implements RepositoryProxyHa | |||
transferFile( connector, targetRepository, targetPath, repository, localFile, requestProperties, | |||
true ); | |||
if ( downloadedFile.exists() ) | |||
if ( fileExists(downloadedFile) ) | |||
{ | |||
log.debug( "Successfully transferred: {}", downloadedFile.getPath() ); | |||
return downloadedFile; | |||
@@ -827,7 +828,7 @@ public abstract class DefaultRepositoryProxyHandler implements RepositoryProxyHa | |||
try | |||
{ | |||
StorageUtil.moveAsset( temp, target, true ); | |||
StorageUtil.moveAsset( temp, target, true , StandardCopyOption.REPLACE_EXISTING); | |||
} | |||
catch ( IOException e ) | |||
{ | |||
@@ -841,7 +842,8 @@ public abstract class DefaultRepositoryProxyHandler implements RepositoryProxyHa | |||
} | |||
catch ( IOException ex ) | |||
{ | |||
throw new ProxyException("Could not move temp file "+temp.getPath()+" to target "+target.getPath(), e); | |||
log.error("Copy failed from {} to {}: ({}) {}", temp, target, e.getClass(), e.getMessage()); | |||
throw new ProxyException("Could not move temp file "+temp.getPath()+" to target "+target.getPath()+": ("+e.getClass()+") "+e.getMessage(), e); | |||
} | |||
} | |||
} |
@@ -37,18 +37,14 @@ public abstract class AbstractManagedRepository extends AbstractRepository imple | |||
private Set<ReleaseScheme> activeReleaseSchemes = new HashSet<>( ); | |||
private Set<ReleaseScheme> uActiveReleaseSchemes = Collections.unmodifiableSet( activeReleaseSchemes ); | |||
private RepositoryStorage storage; | |||
public AbstractManagedRepository(RepositoryType type, String id, String name, RepositoryStorage storage) | |||
{ | |||
super( type, id, name, storage ); | |||
this.storage = storage; | |||
} | |||
public AbstractManagedRepository( Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage storage ) | |||
{ | |||
super( primaryLocale, type, id, name, storage ); | |||
this.storage = storage; | |||
} | |||
@Override |
@@ -22,6 +22,7 @@ package org.apache.archiva.repository; | |||
import org.apache.archiva.repository.storage.RepositoryStorage; | |||
import java.net.URI; | |||
import java.nio.file.Path; | |||
import java.time.Duration; | |||
import java.util.Collections; | |||
@@ -156,4 +157,10 @@ public abstract class AbstractRemoteRepository extends AbstractRepository implem | |||
return str.append("checkPath=").append(checkPath) | |||
.append(",creds:").append(credentials).toString(); | |||
} | |||
@Override | |||
public void setLocation(URI location) { | |||
// Location of remote repositories is not for the local filestore | |||
super.location = location; | |||
} | |||
} |
@@ -67,7 +67,7 @@ public abstract class AbstractRepository implements EditableRepository, Reposito | |||
private Map<Locale, String> descriptions = new HashMap<>( ); | |||
private Locale primaryLocale = new Locale("en_US"); | |||
private URI location; | |||
protected URI location; | |||
private URI baseUri; | |||
private Set<URI> failoverLocations = new HashSet<>( ); | |||
private Set<URI> uFailoverLocations = Collections.unmodifiableSet( failoverLocations ); | |||
@@ -88,6 +88,7 @@ public abstract class AbstractRepository implements EditableRepository, Reposito | |||
this.names.put( primaryLocale, name); | |||
this.type = type; | |||
this.storage = repositoryStorage; | |||
this.location = repositoryStorage.getLocation(); | |||
} | |||
public AbstractRepository(Locale primaryLocale, RepositoryType type, String id, String name, RepositoryStorage repositoryStorage) { | |||
@@ -96,6 +97,7 @@ public abstract class AbstractRepository implements EditableRepository, Reposito | |||
this.names.put( primaryLocale, name); | |||
this.type = type; | |||
this.storage = repositoryStorage; | |||
this.location = repositoryStorage.getLocation(); | |||
} | |||
protected void setPrimaryLocale(Locale locale) { | |||
@@ -227,9 +229,21 @@ public abstract class AbstractRepository implements EditableRepository, Reposito | |||
} | |||
@Override | |||
public void setLocation( URI location ) | |||
public void setLocation( final URI location ) | |||
{ | |||
this.location = location; | |||
if (location!=null && ( this.location == null || !this.location.equals(location))) { | |||
try { | |||
updateLocation(location); | |||
} catch (IOException e) { | |||
log.error("Could not update location of repository {} to {}", getId(), location, e); | |||
} | |||
} | |||
} | |||
@Override | |||
public void updateLocation(URI newLocation) throws IOException { | |||
storage.updateLocation(newLocation); | |||
this.location = newLocation; | |||
} | |||
@Override |
@@ -90,9 +90,18 @@ public class BasicManagedRepository extends AbstractManagedRepository | |||
return null; | |||
} | |||
public static BasicManagedRepository newFilesystemInstance(String id, String name, Path basePath) throws IOException { | |||
/** | |||
* Creates a filesystem based repository instance. The path is built by basePath/repository-id | |||
* | |||
* @param id The repository id | |||
* @param name The name of the repository | |||
* @param repositoryPath The path to the repository | |||
* @return The repository instance | |||
* @throws IOException | |||
*/ | |||
public static BasicManagedRepository newFilesystemInstance(String id, String name, Path repositoryPath) throws IOException { | |||
FileLockManager lockManager = new DefaultFileLockManager(); | |||
FilesystemStorage storage = new FilesystemStorage(basePath.resolve(id), lockManager); | |||
FilesystemStorage storage = new FilesystemStorage(repositoryPath, lockManager); | |||
return new BasicManagedRepository(id, name, storage); | |||
} | |||
@@ -192,14 +192,14 @@ public class RepositoryRegistryTest | |||
@Test | |||
public void putManagedRepository( ) throws Exception | |||
{ | |||
BasicManagedRepository managedRepository = BasicManagedRepository.newFilesystemInstance( "test001", "Test repo", archivaConfiguration.getRepositoryBaseDir() ); | |||
BasicManagedRepository managedRepository = BasicManagedRepository.newFilesystemInstance("test001", "Test repo", archivaConfiguration.getRepositoryBaseDir().resolve("test001")); | |||
managedRepository.setDescription( managedRepository.getPrimaryLocale(), "This is just a test" ); | |||
repositoryRegistry.putRepository(managedRepository); | |||
assertNotNull(managedRepository.getContent()); | |||
assertEquals(6, repositoryRegistry.getRepositories().size()); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( "central", "Test repo", archivaConfiguration.getRepositoryBaseDir() ); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance("central", "Test repo", archivaConfiguration.getRepositoryBaseDir().resolve("central")); | |||
managedRepository.setDescription( managedRepository.getPrimaryLocale(), "This is just a test" ); | |||
ManagedRepository updatedRepo = null; | |||
try { | |||
@@ -208,7 +208,7 @@ public class RepositoryRegistryTest | |||
} catch (RepositoryException e) { | |||
// OK | |||
} | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( "internal", "Test repo", archivaConfiguration.getRepositoryBaseDir() ); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance("internal", "Test repo", archivaConfiguration.getRepositoryBaseDir().resolve("internal")); | |||
managedRepository.setDescription( managedRepository.getPrimaryLocale(), "This is just a test" ); | |||
updatedRepo = repositoryRegistry.putRepository( managedRepository ); | |||
@@ -60,7 +60,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
@Override | |||
public EditableManagedRepository createManagedInstance( String id, String name ) throws IOException { | |||
return BasicManagedRepository.newFilesystemInstance( id, name, Paths.get("target/repositories") ); | |||
return BasicManagedRepository.newFilesystemInstance(id, name, Paths.get("target/repositories").resolve(id)); | |||
} | |||
@Override | |||
@@ -82,7 +82,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
public ManagedRepository createManagedInstance( ManagedRepositoryConfiguration configuration ) throws RepositoryException { | |||
BasicManagedRepository managedRepository = null; | |||
try { | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( configuration.getId( ), configuration.getName( ), Paths.get("target/repositories") ); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance(configuration.getId(), configuration.getName(), Paths.get("target/repositories").resolve(configuration.getId())); | |||
} catch (IOException e) { | |||
throw new RepositoryException(e); | |||
} | |||
@@ -133,7 +133,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
String id = configuration.getId( ) + StagingRepositoryFeature.STAGING_REPO_POSTFIX; | |||
BasicManagedRepository managedRepository = null; | |||
try { | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( id, configuration.getName( ) , Paths.get("target/repositories")); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance(id, configuration.getName(), Paths.get("target/repositories").resolve(id)); | |||
} catch (IOException e) { | |||
throw new RepositoryException(e); | |||
} |
@@ -77,7 +77,7 @@ public class RepositoryContentConsumersTest | |||
ApplicationContext applicationContext; | |||
protected ManagedRepository createRepository( String id, String name, Path location ) throws IOException { | |||
BasicManagedRepository repo = BasicManagedRepository.newFilesystemInstance( id, name , location.getParent() ); | |||
BasicManagedRepository repo = BasicManagedRepository.newFilesystemInstance(id, name, location.getParent().resolve(id)); | |||
repo.setLocation( location.toAbsolutePath().toUri() ); | |||
return repo; | |||
} |
@@ -1,4 +1,23 @@ | |||
<?xml version="1.0" encoding="UTF-8"?> | |||
<!-- | |||
~ Licensed to the Apache Software Foundation (ASF) under one | |||
~ or more contributor license agreements. See the NOTICE file | |||
~ distributed with this work for additional information | |||
~ regarding copyright ownership. The ASF licenses this file | |||
~ to you under the Apache License, Version 2.0 (the | |||
~ "License"); you may not use this file except in compliance | |||
~ with the License. You may obtain a copy of the License at | |||
~ | |||
~ http://www.apache.org/licenses/LICENSE-2.0 | |||
~ | |||
~ Unless required by applicable law or agreed to in writing, | |||
~ software distributed under the License is distributed on an | |||
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |||
~ KIND, either express or implied. See the License for the | |||
~ specific language governing permissions and limitations | |||
~ under the License. | |||
--> | |||
<project xmlns="http://maven.apache.org/POM/4.0.0" | |||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | |||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> |
@@ -22,6 +22,7 @@ package org.apache.archiva.repository.storage; | |||
import java.io.IOException; | |||
import java.io.InputStream; | |||
import java.io.OutputStream; | |||
import java.net.URI; | |||
import java.nio.channels.ReadableByteChannel; | |||
import java.nio.channels.WritableByteChannel; | |||
import java.nio.file.CopyOption; | |||
@@ -46,6 +47,26 @@ import java.util.function.Consumer; | |||
* Checking access is not part of this API. | |||
*/ | |||
public interface RepositoryStorage { | |||
/** | |||
* Returns a URI representation of the storage location. | |||
* | |||
* @return The URI that is pointing to the storage. | |||
*/ | |||
URI getLocation(); | |||
/** | |||
* Updates the base location of the repository storage. The method does not move any data. | |||
* It just points to the new location. Artifacts may not be accessible anymore if the data has | |||
* not been moved or copied. Assets retrieved before the relocation may still be pointing to the | |||
* old location. | |||
* | |||
* @param newLocation The URI to the new location | |||
* | |||
* @throws IOException If the repository cannot be relocated | |||
*/ | |||
void updateLocation(URI newLocation) throws IOException; | |||
/** | |||
* Returns information about a specific storage asset. | |||
* @param path |
@@ -1,4 +1,23 @@ | |||
<?xml version="1.0" encoding="UTF-8"?> | |||
<!-- | |||
~ Licensed to the Apache Software Foundation (ASF) under one | |||
~ or more contributor license agreements. See the NOTICE file | |||
~ distributed with this work for additional information | |||
~ regarding copyright ownership. The ASF licenses this file | |||
~ to you under the Apache License, Version 2.0 (the | |||
~ "License"); you may not use this file except in compliance | |||
~ with the License. You may obtain a copy of the License at | |||
~ | |||
~ http://www.apache.org/licenses/LICENSE-2.0 | |||
~ | |||
~ Unless required by applicable law or agreed to in writing, | |||
~ software distributed under the License is distributed on an | |||
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |||
~ KIND, either express or implied. See the License for the | |||
~ specific language governing permissions and limitations | |||
~ under the License. | |||
--> | |||
<project xmlns="http://maven.apache.org/POM/4.0.0" | |||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | |||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | |||
@@ -19,6 +38,10 @@ | |||
<dependencies> | |||
<dependency> | |||
<groupId>org.apache.archiva</groupId> | |||
<artifactId>archiva-common</artifactId> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.archiva</groupId> | |||
<artifactId>archiva-storage-api</artifactId> | |||
@@ -28,6 +51,7 @@ | |||
<artifactId>archiva-filelock</artifactId> | |||
</dependency> | |||
<dependency> | |||
<groupId>commons-io</groupId> | |||
<artifactId>commons-io</artifactId> |
@@ -48,7 +48,7 @@ import java.util.stream.Collectors; | |||
* | |||
* @author Martin Stockhammer <martin_s@apache.org> | |||
*/ | |||
public class FilesystemAsset implements StorageAsset { | |||
public class FilesystemAsset implements StorageAsset, Comparable { | |||
private final static Logger log = LoggerFactory.getLogger(FilesystemAsset.class); | |||
@@ -159,11 +159,15 @@ public class FilesystemAsset implements StorageAsset { | |||
init(); | |||
} | |||
private String normalizePath(String path) { | |||
private String normalizePath(final String path) { | |||
if (!path.startsWith("/")) { | |||
return "/"+path; | |||
} else { | |||
return path; | |||
String tmpPath = path; | |||
while (tmpPath.startsWith("//")) { | |||
tmpPath = tmpPath.substring(1); | |||
} | |||
return tmpPath; | |||
} | |||
} | |||
@@ -491,4 +495,15 @@ public class FilesystemAsset implements StorageAsset { | |||
return relativePath+":"+assetPath; | |||
} | |||
@Override | |||
public int compareTo(Object o) { | |||
if (o instanceof FilesystemAsset) { | |||
if (this.getPath()!=null) { | |||
return this.getPath().compareTo(((FilesystemAsset) o).getPath()); | |||
} else { | |||
return 0; | |||
} | |||
} | |||
return 0; | |||
} | |||
} |
@@ -23,6 +23,7 @@ import org.apache.archiva.common.filelock.FileLockException; | |||
import org.apache.archiva.common.filelock.FileLockManager; | |||
import org.apache.archiva.common.filelock.FileLockTimeoutException; | |||
import org.apache.archiva.common.filelock.Lock; | |||
import org.apache.archiva.common.utils.PathUtil; | |||
import org.apache.commons.io.FileUtils; | |||
import org.slf4j.Logger; | |||
import org.slf4j.LoggerFactory; | |||
@@ -31,15 +32,11 @@ import java.io.FileNotFoundException; | |||
import java.io.IOException; | |||
import java.io.InputStream; | |||
import java.io.OutputStream; | |||
import java.net.URI; | |||
import java.nio.channels.FileChannel; | |||
import java.nio.channels.ReadableByteChannel; | |||
import java.nio.channels.WritableByteChannel; | |||
import java.nio.file.CopyOption; | |||
import java.nio.file.Files; | |||
import java.nio.file.Path; | |||
import java.nio.file.Paths; | |||
import java.nio.file.StandardCopyOption; | |||
import java.nio.file.StandardOpenOption; | |||
import java.nio.file.*; | |||
import java.util.function.Consumer; | |||
/** | |||
@@ -52,7 +49,7 @@ public class FilesystemStorage implements RepositoryStorage { | |||
private static final Logger log = LoggerFactory.getLogger(FilesystemStorage.class); | |||
private final Path basePath; | |||
private Path basePath; | |||
private final FileLockManager fileLockManager; | |||
public FilesystemStorage(Path basePath, FileLockManager fileLockManager) throws IOException { | |||
@@ -295,6 +292,30 @@ public class FilesystemStorage implements RepositoryStorage { | |||
} | |||
} | |||
@Override | |||
public URI getLocation() { | |||
return basePath.toUri(); | |||
} | |||
/** | |||
* Updates the location and releases all locks. | |||
* | |||
* @param newLocation The URI to the new location | |||
* | |||
* @throws IOException If the directory cannot be created. | |||
*/ | |||
@Override | |||
public void updateLocation(URI newLocation) throws IOException { | |||
Path newPath = PathUtil.getPathFromUri(newLocation).toAbsolutePath(); | |||
if (!Files.exists(newPath)) { | |||
Files.createDirectories(newPath); | |||
} | |||
basePath = newPath; | |||
if (fileLockManager!=null) { | |||
fileLockManager.clearLockFiles(); | |||
} | |||
} | |||
@Override | |||
public StorageAsset getAsset( String path ) | |||
{ |
@@ -71,17 +71,37 @@ public class StorageUtil | |||
if (locked) { | |||
final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager(); | |||
final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager(); | |||
try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) ) | |||
{ | |||
Files.copy( sourcePath, targetPath, copyOptions ); | |||
Lock lockRead = null; | |||
Lock lockWrite = null; | |||
try { | |||
lockRead = lmSource.readFileLock(sourcePath); | |||
} catch (Exception e) { | |||
log.error("Could not create read lock on {}", sourcePath); | |||
throw new IOException(e); | |||
} | |||
catch ( FileLockException e ) | |||
{ | |||
throw new IOException( e ); | |||
try { | |||
lockWrite = lmTarget.writeFileLock(targetPath); | |||
} catch (Exception e) { | |||
log.error("Could not create write lock on {}", targetPath); | |||
throw new IOException(e); | |||
} | |||
catch ( FileLockTimeoutException e ) | |||
{ | |||
throw new IOException( e ); | |||
try { | |||
Files.copy(sourcePath, targetPath, copyOptions); | |||
} finally { | |||
if (lockRead!=null) { | |||
try { | |||
lmSource.release(lockRead); | |||
} catch (FileLockException e) { | |||
log.error("Error during lock release of read lock {}", lockRead.getFile()); | |||
} | |||
} | |||
if (lockWrite!=null) { | |||
try { | |||
lmTarget.release(lockWrite); | |||
} catch (FileLockException e) { | |||
log.error("Error during lock release of write lock {}", lockWrite.getFile()); | |||
} | |||
} | |||
} | |||
} else | |||
{ | |||
@@ -122,6 +142,9 @@ public class StorageUtil | |||
if (source.isFileBased() && target.isFileBased()) { | |||
// Short cut for FS operations | |||
// Move is atomic operation | |||
if (!Files.exists(target.getFilePath().getParent())) { | |||
Files.createDirectories(target.getFilePath().getParent()); | |||
} | |||
Files.move( source.getFilePath(), target.getFilePath(), copyOptions ); | |||
} else { | |||
try { |
@@ -98,7 +98,7 @@ public class DefaultLegacyRepositoryConverter | |||
{ | |||
String defaultRepositoryUrl = PathUtil.toUrl( repositoryDirectory ); | |||
BasicManagedRepository legacyRepository = BasicManagedRepository.newFilesystemInstance( "legacy", "Legacy Repository", repositoryDirectory.getParent()); | |||
BasicManagedRepository legacyRepository = BasicManagedRepository.newFilesystemInstance( "legacy", "Legacy Repository", repositoryDirectory); | |||
legacyRepository.setLocation( legacyRepositoryDirectory.toAbsolutePath().toUri() ); | |||
legacyRepository.setLayout( "legacy" ); | |||
DefaultFileLockManager lockManager = new DefaultFileLockManager(); |
@@ -591,10 +591,12 @@ public class MavenIndexManager implements ArchivaIndexManager { | |||
} | |||
} | |||
private StorageAsset getIndexPath(URI indexDirUri, RepositoryStorage storage, String defaultDir) throws IOException | |||
private StorageAsset getIndexPath(URI indexDirUri, RepositoryStorage repoStorage, String defaultDir) throws IOException | |||
{ | |||
StorageAsset rootAsset = repoStorage.getAsset(""); | |||
RepositoryStorage storage = rootAsset.getStorage(); | |||
Path indexDirectory; | |||
Path repositoryPath = storage.getAsset("").getFilePath().toAbsolutePath(); | |||
Path repositoryPath = rootAsset.getFilePath().toAbsolutePath(); | |||
StorageAsset indexDir; | |||
if ( ! StringUtils.isEmpty(indexDirUri.toString( ) ) ) | |||
{ | |||
@@ -625,7 +627,7 @@ public class MavenIndexManager implements ArchivaIndexManager { | |||
if ( !indexDir.exists() ) | |||
{ | |||
indexDir.create(); | |||
indexDir = storage.addAsset(indexDir.getPath(), true); | |||
} | |||
return indexDir; | |||
} |
@@ -81,7 +81,7 @@ public class MavenMetadataReader | |||
* @throws XMLException | |||
*/ | |||
public static ArchivaRepositoryMetadata read( Path metadataFile ) | |||
throws XMLException, IOException { | |||
throws XMLException { | |||
XMLReader xml = new XMLReader( "metadata", metadataFile ); | |||
// invoke this to remove namespaces, see MRM-1136 | |||
@@ -93,10 +93,19 @@ public class MavenMetadataReader | |||
metadata.setArtifactId( xml.getElementText( "//metadata/artifactId" ) ); | |||
metadata.setVersion( xml.getElementText( "//metadata/version" ) ); | |||
Date modTime; | |||
modTime = new Date(Files.getLastModifiedTime(metadataFile).toMillis()); | |||
try { | |||
modTime = new Date(Files.getLastModifiedTime(metadataFile).toMillis()); | |||
} catch (IOException e) { | |||
modTime = new Date(); | |||
log.error("Could not read modification time of {}", metadataFile); | |||
} | |||
metadata.setFileLastModified( modTime ); | |||
metadata.setFileSize( Files.size(metadataFile) ); | |||
try { | |||
metadata.setFileSize(Files.size(metadataFile)); | |||
} catch (IOException e) { | |||
metadata.setFileSize( 0 ); | |||
log.error("Could not read file size of {}", metadataFile); | |||
} | |||
metadata.setLastUpdated( xml.getElementText( "//metadata/versioning/lastUpdated" ) ); | |||
metadata.setLatestVersion( xml.getElementText( "//metadata/versioning/latest" ) ); | |||
metadata.setReleasedVersion( xml.getElementText( "//metadata/versioning/release" ) ); |
@@ -20,10 +20,7 @@ package org.apache.archiva.proxy; | |||
*/ | |||
import net.sf.ehcache.CacheManager; | |||
import org.apache.archiva.configuration.ArchivaConfiguration; | |||
import org.apache.archiva.configuration.ManagedRepositoryConfiguration; | |||
import org.apache.archiva.configuration.ProxyConnectorConfiguration; | |||
import org.apache.archiva.configuration.RemoteRepositoryConfiguration; | |||
import org.apache.archiva.configuration.*; | |||
import org.apache.archiva.policies.CachedFailuresPolicy; | |||
import org.apache.archiva.policies.ChecksumPolicy; | |||
import org.apache.archiva.policies.PropagateErrorsDownloadPolicy; | |||
@@ -32,6 +29,7 @@ import org.apache.archiva.policies.ReleasesPolicy; | |||
import org.apache.archiva.policies.SnapshotsPolicy; | |||
import org.apache.archiva.proxy.model.RepositoryProxyHandler; | |||
import org.apache.archiva.repository.*; | |||
import org.apache.archiva.repository.storage.StorageAsset; | |||
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; | |||
import org.apache.maven.wagon.Wagon; | |||
import org.easymock.EasyMock; | |||
@@ -54,11 +52,7 @@ import java.nio.file.Paths; | |||
import java.nio.file.attribute.FileTime; | |||
import java.text.ParseException; | |||
import java.text.SimpleDateFormat; | |||
import java.util.ArrayList; | |||
import java.util.Calendar; | |||
import java.util.Collection; | |||
import java.util.Date; | |||
import java.util.Locale; | |||
import java.util.*; | |||
import java.util.concurrent.TimeUnit; | |||
import java.util.stream.Collectors; | |||
@@ -126,22 +120,27 @@ public abstract class AbstractProxyTestCase | |||
config.getConfiguration().setManagedRepositories( new ArrayList<ManagedRepositoryConfiguration>() ); | |||
config.getConfiguration().setRemoteRepositories( new ArrayList<RemoteRepositoryConfiguration>() ); | |||
config.getConfiguration().setProxyConnectors( new ArrayList<ProxyConnectorConfiguration>() ); | |||
ArchivaRuntimeConfiguration runtimeConfiguration = new ArchivaRuntimeConfiguration(); | |||
List<String> checksumTypes = new ArrayList<>(); | |||
checksumTypes.add("md5"); | |||
checksumTypes.add("sha256"); | |||
checksumTypes.add("sha1"); | |||
checksumTypes.add("asc"); | |||
runtimeConfiguration.setChecksumTypes(checksumTypes); | |||
config.getConfiguration().setArchivaRuntimeConfiguration(runtimeConfiguration); | |||
repositoryRegistry.setArchivaConfiguration( config ); | |||
// Setup source repository (using default layout) | |||
String name = getClass().getSimpleName(); | |||
String repoPath = "target/test-repository/managed/" + name; | |||
Path repoPath = Paths.get("target/test-repository/managed/" + name); | |||
managedDefaultRepository = | |||
createRepository( ID_DEFAULT_MANAGED, "Default Managed Repository", repoPath, "default" ); | |||
createRepository( ID_DEFAULT_MANAGED, "Default Managed Repository", repoPath.toString(), "default" ); | |||
managedDefaultDir = Paths.get( managedDefaultRepository.getRepoRoot() ); | |||
managedDefaultDir = repoPath.resolve(ID_DEFAULT_MANAGED) ; | |||
org.apache.archiva.repository.ManagedRepository repoConfig = repositoryRegistry.getManagedRepository(ID_DEFAULT_MANAGED); | |||
applicationContext.getBean( RepositoryRegistry.class ).putRepository( repoConfig ); | |||
repositoryRegistry.setArchivaConfiguration( config ); | |||
// Setup target (proxied to) repository. | |||
saveRemoteRepositoryConfig( ID_PROXIED1, "Proxied Repository 1", | |||
Paths.get( REPOPATH_PROXIED1 ).toUri().toURL().toExternalForm(), "default" ); | |||
@@ -152,14 +151,7 @@ public abstract class AbstractProxyTestCase | |||
repositoryRegistry.reload(); | |||
if ( repositoryRegistry.getManagedRepository( repoConfig.getId() ) != null ) | |||
{ | |||
org.apache.archiva.repository.ManagedRepository managedRepository = repositoryRegistry.getManagedRepository( repoConfig.getId() ); | |||
repositoryRegistry.removeRepository( managedRepository ); | |||
} | |||
repositoryRegistry.putRepository( repoConfig ); | |||
repositoryRegistry.putRepository(repoConfig); | |||
// Setup the proxy handler. | |||
@@ -217,6 +209,7 @@ public abstract class AbstractProxyTestCase | |||
assertNotNull( "Actual File should not be null.", actualFile ); | |||
assertTrue( "Check actual file exists.", Files.exists(actualFile) ); | |||
assertTrue("Check expected file exists", Files.exists(expectedFile)); | |||
assertTrue( "Check file is the same.", Files.isSameFile( expectedFile, | |||
actualFile)); | |||
String expectedContents = | |||
@@ -226,7 +219,7 @@ public abstract class AbstractProxyTestCase | |||
assertEquals( "Check file contents.", expectedContents, actualContents ); | |||
} | |||
protected void assertNotDownloaded( Path downloadedFile ) | |||
protected void assertNotDownloaded( StorageAsset downloadedFile ) | |||
{ | |||
assertNull( "Found file: " + downloadedFile + "; but was expecting a failure", downloadedFile ); | |||
} | |||
@@ -319,7 +312,7 @@ public abstract class AbstractProxyTestCase | |||
protected ManagedRepositoryContent createRepository( String id, String name, String path, String layout ) | |||
throws Exception | |||
{ | |||
ManagedRepository repo = BasicManagedRepository.newFilesystemInstance(id, name, Paths.get(path)); | |||
ManagedRepository repo = BasicManagedRepository.newFilesystemInstance(id, name, Paths.get(path).resolve(id)); | |||
repositoryRegistry.putRepository(repo); | |||
return repositoryRegistry.getManagedRepository(id).getContent(); | |||
} |
@@ -83,6 +83,7 @@ public class CacheFailuresTransferTest | |||
wagonMockControl.replay(); | |||
//noinspection UnusedAssignment | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
wagonMockControl.verify(); | |||
@@ -93,7 +94,7 @@ public class CacheFailuresTransferTest | |||
downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
wagonMockControl.verify(); | |||
assertNotDownloaded( downloadedFile.getFilePath()); | |||
assertNotDownloaded( downloadedFile); | |||
assertNoTempFiles( expectedFile ); | |||
} | |||
@@ -141,7 +142,7 @@ public class CacheFailuresTransferTest | |||
wagonMockControl.verify(); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile); | |||
assertNoTempFiles( expectedFile ); | |||
} | |||
@@ -173,6 +174,7 @@ public class CacheFailuresTransferTest | |||
// Validate that file actually came from proxied2 (as intended). | |||
Path proxied2File = Paths.get( REPOPATH_PROXIED2, path ); | |||
assertNotNull(downloadedFile); | |||
assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File ); | |||
assertNoTempFiles( expectedFile ); | |||
} |
@@ -215,7 +215,7 @@ public class ChecksumTransferTest | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertChecksums( expectedFile, null, null ); | |||
} | |||
@@ -264,7 +264,7 @@ public class ChecksumTransferTest | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertChecksums( expectedFile, null, null ); | |||
} | |||
@@ -313,7 +313,7 @@ public class ChecksumTransferTest | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertChecksums( expectedFile, null, null ); | |||
} | |||
@@ -519,7 +519,7 @@ public class ChecksumTransferTest | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertNoTempFiles( expectedFile ); | |||
// There are no hashcodes on the proxy side to download. | |||
// The FAIL policy will delete the checksums as bad. |
@@ -617,7 +617,7 @@ public class ErrorHandlingTest | |||
wagonMockControl.verify(); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
} | |||
private void confirmSuccess( String path, Path expectedFile, String basedir ) | |||
@@ -634,7 +634,7 @@ public class ErrorHandlingTest | |||
{ | |||
StorageAsset downloadedFile = performDownload( path ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
} | |||
private StorageAsset performDownload( String path ) |
@@ -95,7 +95,7 @@ public class HttpProxyTransferTest | |||
protected ManagedRepositoryContent createRepository( String id, String name, String path, String layout ) | |||
throws Exception | |||
{ | |||
ManagedRepository repo = BasicManagedRepository.newFilesystemInstance(id, name, Paths.get(path)); | |||
ManagedRepository repo = BasicManagedRepository.newFilesystemInstance(id, name, Paths.get(path).resolve(id)); | |||
repositoryRegistry.putRepository(repo); | |||
return repositoryRegistry.getManagedRepository(id).getContent(); | |||
} |
@@ -114,10 +114,11 @@ public class ManagedDefaultTransferTest | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path ); | |||
Path sourceFile = Paths.get(REPOPATH_PROXIED1, path); | |||
assertNotNull(downloadedFile); | |||
assertFileEquals( expectedFile, downloadedFile.getFilePath(), sourceFile ); | |||
assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".sha1" )) ); | |||
assertFalse( Files.exists(downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".md5" ) )); | |||
assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".asc" ) )); | |||
assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".sha256" ) )); | |||
assertNoTempFiles( expectedFile ); | |||
} | |||
@@ -182,7 +183,7 @@ public class ManagedDefaultTransferTest | |||
// Attempt the proxy fetch. | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertNotModified( expectedFile, originalModificationTime ); | |||
assertNoTempFiles( expectedFile ); | |||
} | |||
@@ -228,7 +229,7 @@ public class ManagedDefaultTransferTest | |||
// Attempt the proxy fetch. | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertNotModified( expectedFile, originalModificationTime ); | |||
assertNoTempFiles( expectedFile ); | |||
} | |||
@@ -454,7 +455,7 @@ public class ManagedDefaultTransferTest | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
wagonMockControl.verify(); | |||
assertNoTempFiles( expectedFile ); |
@@ -61,7 +61,7 @@ public class SnapshotTransferTest | |||
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false); | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertNoTempFiles( expectedFile ); | |||
} | |||
@@ -132,7 +132,7 @@ public class SnapshotTransferTest | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
// Should not have downloaded as managed is newer than remote. | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertNoTempFiles( expectedFile ); | |||
} | |||
@@ -247,7 +247,7 @@ public class SnapshotTransferTest | |||
StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact ); | |||
assertNotDownloaded( downloadedFile.getFilePath() ); | |||
assertNotDownloaded( downloadedFile ); | |||
assertNotModified( expectedFile, expectedTimestamp ); | |||
assertNoTempFiles( expectedFile ); | |||
} |
@@ -29,6 +29,7 @@ import org.apache.archiva.model.ProjectReference; | |||
import org.apache.archiva.model.VersionedReference; | |||
import org.apache.archiva.repository.*; | |||
import org.apache.archiva.repository.storage.FilesystemStorage; | |||
import org.apache.archiva.repository.storage.RepositoryStorage; | |||
import org.apache.archiva.repository.storage.StorageAsset; | |||
import org.apache.commons.lang.StringUtils; | |||
import org.springframework.stereotype.Service; | |||
@@ -53,10 +54,11 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent | |||
private ManagedRepository repository; | |||
private FilesystemStorage fsStorage; | |||
private RepositoryStorage fsStorage; | |||
ManagedRepositoryContentMock(ManagedRepository repo) { | |||
this.repository = repo; | |||
this.fsStorage = repo; | |||
} | |||
@Override |
@@ -74,7 +74,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
@Override | |||
public EditableManagedRepository createManagedInstance( String id, String name ) throws IOException { | |||
return BasicManagedRepository.newFilesystemInstance( id, name , Paths.get("target/repositories")); | |||
return BasicManagedRepository.newFilesystemInstance(id, name, Paths.get("target/repositories").resolve(id)); | |||
} | |||
@Override | |||
@@ -98,7 +98,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
{ | |||
BasicManagedRepository managedRepository = null; | |||
try { | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( configuration.getId( ), configuration.getName( ) , Paths.get("target/repositories")); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( configuration.getId( ), configuration.getName( ) , Paths.get("target/repositories").resolve(configuration.getId())); | |||
} catch (IOException e) { | |||
throw new RepositoryException(e); | |||
} | |||
@@ -150,7 +150,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
String id = configuration.getId( ) + StagingRepositoryFeature.STAGING_REPO_POSTFIX; | |||
BasicManagedRepository managedRepository = null; | |||
try { | |||
managedRepository = BasicManagedRepository.newFilesystemInstance( id, configuration.getName( ), Paths.get("target/repositories") ); | |||
managedRepository = BasicManagedRepository.newFilesystemInstance(id, configuration.getName(), Paths.get("target/repositories").resolve(id)); | |||
} catch (IOException e) { | |||
throw new RepositoryException(e); | |||
} | |||
@@ -200,7 +200,7 @@ public class RepositoryProviderMock implements RepositoryProvider | |||
} | |||
catch ( Exception e ) | |||
{ | |||
throw new RepositoryException( "Error", e ); | |||
throw new RepositoryException( "Error while updating remote instance: "+e.getMessage(), e ); | |||
} | |||
} |
@@ -23,19 +23,13 @@ import org.apache.archiva.checksum.ChecksumAlgorithm; | |||
import org.apache.archiva.checksum.ChecksummedFile; | |||
import org.apache.archiva.common.Try; | |||
import org.apache.archiva.common.utils.VersionUtil; | |||
import org.apache.archiva.filter.Filter; | |||
import org.apache.archiva.maven2.metadata.MavenMetadataReader; | |||
import org.apache.archiva.metadata.model.ArtifactMetadata; | |||
import org.apache.archiva.metadata.model.ProjectMetadata; | |||
import org.apache.archiva.metadata.model.ProjectVersionMetadata; | |||
import org.apache.archiva.metadata.model.facets.RepositoryProblemFacet; | |||
import org.apache.archiva.filter.Filter; | |||
import org.apache.archiva.metadata.repository.storage.ReadMetadataRequest; | |||
import org.apache.archiva.metadata.repository.storage.RelocationException; | |||
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator; | |||
import org.apache.archiva.metadata.repository.storage.RepositoryStorage; | |||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException; | |||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException; | |||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageRuntimeException; | |||
import org.apache.archiva.metadata.repository.storage.*; | |||
import org.apache.archiva.model.ArchivaRepositoryMetadata; | |||
import org.apache.archiva.model.ArtifactReference; | |||
import org.apache.archiva.model.SnapshotVersion; | |||
@@ -52,22 +46,8 @@ import org.apache.archiva.repository.storage.StorageAsset; | |||
import org.apache.archiva.xml.XMLException; | |||
import org.apache.commons.lang.ArrayUtils; | |||
import org.apache.commons.lang.StringUtils; | |||
import org.apache.maven.model.CiManagement; | |||
import org.apache.maven.model.Dependency; | |||
import org.apache.maven.model.DistributionManagement; | |||
import org.apache.maven.model.IssueManagement; | |||
import org.apache.maven.model.License; | |||
import org.apache.maven.model.MailingList; | |||
import org.apache.maven.model.Model; | |||
import org.apache.maven.model.Organization; | |||
import org.apache.maven.model.Relocation; | |||
import org.apache.maven.model.Scm; | |||
import org.apache.maven.model.building.DefaultModelBuilderFactory; | |||
import org.apache.maven.model.building.DefaultModelBuildingRequest; | |||
import org.apache.maven.model.building.ModelBuilder; | |||
import org.apache.maven.model.building.ModelBuildingException; | |||
import org.apache.maven.model.building.ModelBuildingRequest; | |||
import org.apache.maven.model.building.ModelProblem; | |||
import org.apache.maven.model.*; | |||
import org.apache.maven.model.building.*; | |||
import org.apache.maven.model.io.xpp3.MavenXpp3Reader; | |||
import org.codehaus.plexus.util.xml.pull.XmlPullParserException; | |||
import org.slf4j.Logger; | |||
@@ -84,14 +64,7 @@ import java.io.Reader; | |||
import java.nio.channels.Channels; | |||
import java.nio.charset.Charset; | |||
import java.nio.file.NoSuchFileException; | |||
import java.util.ArrayList; | |||
import java.util.Arrays; | |||
import java.util.Collection; | |||
import java.util.Collections; | |||
import java.util.Date; | |||
import java.util.HashMap; | |||
import java.util.List; | |||
import java.util.Map; | |||
import java.util.*; | |||
import java.util.function.Predicate; | |||
import java.util.stream.Collectors; | |||
@@ -465,11 +438,11 @@ public class Maven2RepositoryStorage | |||
private static Collection<String> getSortedFiles(StorageAsset dir, Filter<String> filter) { | |||
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter); | |||
return dir.list().stream().filter(f -> f.isContainer()) | |||
.filter(dFilter) | |||
.map(path -> path.getName().toString()) | |||
.sorted().collect(Collectors.toList()); | |||
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter); | |||
return dir.list().stream().filter(f -> f.isContainer()) | |||
.filter(dFilter) | |||
.map(path -> path.getName().toString()) | |||
.sorted().collect(Collectors.toList()); | |||
} | |||
@@ -489,8 +462,8 @@ public class Maven2RepositoryStorage | |||
} | |||
// scan all the directories which are potential namespaces. Any directories known to be projects are excluded | |||
Predicate<StorageAsset> dFilter = new DirectoryFilter(filter); | |||
return dir.list().stream().filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getName().toString()) | |||
.sorted().collect(Collectors.toList()); | |||
return dir.list().stream().filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getName().toString()) | |||
.sorted().collect(Collectors.toList()); | |||
} | |||
@Override | |||
@@ -502,8 +475,8 @@ public class Maven2RepositoryStorage | |||
} | |||
// scan all directories in the namespace, and only include those that are known to be projects | |||
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter); | |||
return dir.list().stream().filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getName().toString()) | |||
.sorted().collect(Collectors.toList()); | |||
return dir.list().stream().filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getName().toString()) | |||
.sorted().collect(Collectors.toList()); | |||
} | |||
@@ -532,29 +505,29 @@ public class Maven2RepositoryStorage | |||
// all files that are not metadata and not a checksum / signature are considered artifacts | |||
final Predicate<StorageAsset> dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter()); | |||
// Returns a map TRUE -> (success values), FALSE -> (Exceptions) | |||
Map<Boolean, List<Try<ArtifactMetadata>>> result = dir.list().stream().filter(dFilter).map(path -> { | |||
try { | |||
return Try.success(getArtifactFromFile(readMetadataRequest.getRepositoryId(), readMetadataRequest.getNamespace(), | |||
readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion(), | |||
path)); | |||
} catch (Exception e) { | |||
LOGGER.debug("Could not create metadata for {}: {}", path, e.getMessage(), e); | |||
return Try.<ArtifactMetadata>failure(e); | |||
} | |||
// Returns a map TRUE -> (success values), FALSE -> (Exceptions) | |||
Map<Boolean, List<Try<ArtifactMetadata>>> result = dir.list().stream().filter(dFilter).map(path -> { | |||
try { | |||
return Try.success(getArtifactFromFile(readMetadataRequest.getRepositoryId(), readMetadataRequest.getNamespace(), | |||
readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion(), | |||
path)); | |||
} catch (Exception e) { | |||
LOGGER.debug("Could not create metadata for {}: {}", path, e.getMessage(), e); | |||
return Try.<ArtifactMetadata>failure(e); | |||
} | |||
).collect(Collectors.groupingBy(Try::isSuccess)); | |||
if (result.containsKey(Boolean.FALSE) && result.get(Boolean.FALSE).size() > 0 && (!result.containsKey(Boolean.TRUE) || result.get(Boolean.TRUE).size() == 0)) { | |||
LOGGER.error("Could not get artifact metadata. Directory: {}. Number of errors {}.", dir, result.get(Boolean.FALSE).size()); | |||
Try<ArtifactMetadata> failure = result.get(Boolean.FALSE).get(0); | |||
LOGGER.error("Sample exception {}", failure.getError().getMessage(), failure.getError()); | |||
throw new RepositoryStorageRuntimeException(readMetadataRequest.getRepositoryId(), "Could not retrieve metadata of the files"); | |||
} else { | |||
if (!result.containsKey(Boolean.TRUE) || result.get(Boolean.TRUE) == null) { | |||
return Collections.emptyList(); | |||
} | |||
return result.get(Boolean.TRUE).stream().map(tr -> tr.get()).collect(Collectors.toList()); | |||
).collect(Collectors.groupingBy(Try::isSuccess)); | |||
if (result.containsKey(Boolean.FALSE) && result.get(Boolean.FALSE).size() > 0 && (!result.containsKey(Boolean.TRUE) || result.get(Boolean.TRUE).size() == 0)) { | |||
LOGGER.error("Could not get artifact metadata. Directory: {}. Number of errors {}.", dir, result.get(Boolean.FALSE).size()); | |||
Try<ArtifactMetadata> failure = result.get(Boolean.FALSE).get(0); | |||
LOGGER.error("Sample exception {}", failure.getError().getMessage(), failure.getError()); | |||
throw new RepositoryStorageRuntimeException(readMetadataRequest.getRepositoryId(), "Could not retrieve metadata of the files"); | |||
} else { | |||
if (!result.containsKey(Boolean.TRUE) || result.get(Boolean.TRUE) == null) { | |||
return Collections.emptyList(); | |||
} | |||
return result.get(Boolean.TRUE).stream().map(tr -> tr.get()).collect(Collectors.toList()); | |||
} | |||
} | |||
@@ -598,7 +571,7 @@ public class Maven2RepositoryStorage | |||
RepositoryType repositoryType = managedRepository.getRepository().getType(); | |||
if (!proxyRegistry.hasHandler(repositoryType)) { | |||
throw new ProxyDownloadException("No proxy handler found for repository type "+repositoryType, new HashMap<>()); | |||
throw new ProxyDownloadException("No proxy handler found for repository type " + repositoryType, new HashMap<>()); | |||
} | |||
RepositoryProxyHandler proxyHandler = proxyRegistry.getHandler(repositoryType).get(0); | |||
@@ -703,7 +676,7 @@ public class Maven2RepositoryStorage | |||
if (StringUtils.endsWith(artifactReference.getVersion(), VersionUtil.SNAPSHOT)) { | |||
// read maven metadata to get last timestamp | |||
StorageAsset metadataDir = managedRepositoryContent.getRepository().getAsset( filePath).getParent(); | |||
StorageAsset metadataDir = managedRepositoryContent.getRepository().getAsset(filePath).getParent(); | |||
if (!metadataDir.exists()) { | |||
return filePath; | |||
} | |||
@@ -786,11 +759,11 @@ public class Maven2RepositoryStorage | |||
private boolean isProject(StorageAsset dir, Filter<String> filter) { | |||
// scan directories for a valid project version subdirectory, meaning this must be a project directory | |||
final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter); | |||
boolean projFound = dir.list().stream().filter(dFilter) | |||
.anyMatch(path -> isProjectVersion(path)); | |||
if (projFound) { | |||
return true; | |||
} | |||
boolean projFound = dir.list().stream().filter(dFilter) | |||
.anyMatch(path -> isProjectVersion(path)); | |||
if (projFound) { | |||
return true; | |||
} | |||
// if a metadata file is present, check if this is the "artifactId" directory, marking it as a project | |||
ArchivaRepositoryMetadata metadata = readMetadata(dir); | |||
@@ -814,9 +787,9 @@ public class Maven2RepositoryStorage | |||
final String pomFile = artifactId + "-" + projectVersion + ".pom"; | |||
filter = new PomFileFilter(pomFile); | |||
} | |||
if (dir.list().stream().filter(f -> !f.isContainer()).anyMatch(filter)) { | |||
return true; | |||
} | |||
if (dir.list().stream().filter(f -> !f.isContainer()).anyMatch(filter)) { | |||
return true; | |||
} | |||
// if a metadata file is present, check if this is the "version" directory, marking it as a project version | |||
ArchivaRepositoryMetadata metadata = readMetadata(dir); | |||
if (metadata != null && projectVersion.equals(metadata.getVersion())) { | |||
@@ -870,18 +843,21 @@ public class Maven2RepositoryStorage | |||
} | |||
@Override | |||
public boolean test(StorageAsset dir) { | |||
final String name = dir.getName().toString(); | |||
public boolean test(StorageAsset file) { | |||
final Set<String> checksumExts = ChecksumAlgorithm.getAllExtensions(); | |||
final String path = file.getPath(); | |||
final String name = file.getName(); | |||
final String extension = StringUtils.substringAfterLast(name, ".").toLowerCase(); | |||
// TODO compare to logic in maven-repository-layer | |||
if (!filter.accept(name)) { | |||
if (file.isContainer()) { | |||
return false; | |||
} else if (name.startsWith(".")) { | |||
} else if (!filter.accept(name)) { | |||
return false; | |||
} else if (name.endsWith(".md5") || name.endsWith(".sha1") || name.endsWith(".asc")) { | |||
} else if (name.startsWith(".") || path.contains("/.") ) { | |||
return false; | |||
} else if (Arrays.binarySearch(IGNORED_FILES, name) >= 0) { | |||
} else if (checksumExts.contains(extension)) { | |||
return false; | |||
} else if (dir.isContainer()) { | |||
} else if (Arrays.binarySearch(IGNORED_FILES, name) >= 0) { | |||
return false; | |||
} | |||
// some files from remote repositories can have name like maven-metadata-archiva-vm-all-public.xml |
@@ -149,7 +149,7 @@ public class RepositoryModelResolver | |||
// is a SNAPSHOT ? so we can try to find locally before asking remote repositories. | |||
if ( StringUtils.contains( version, VersionUtil.SNAPSHOT ) ) | |||
{ | |||
Path localSnapshotModel = findTimeStampedSnapshotPom( groupId, artifactId, version, model.getParent().toString() ); | |||
Path localSnapshotModel = findTimeStampedSnapshotPom( groupId, artifactId, version, model.getParent().getFilePath() ); | |||
if ( localSnapshotModel != null ) | |||
{ | |||
return new FileModelSource( localSnapshotModel.toFile() ); | |||
@@ -227,11 +227,11 @@ public class RepositoryModelResolver | |||
} | |||
protected Path findTimeStampedSnapshotPom( String groupId, String artifactId, String version, | |||
String parentDirectory ) | |||
Path parentDirectory ) | |||
{ | |||
// reading metadata if there | |||
Path mavenMetadata = Paths.get( parentDirectory, METADATA_FILENAME ); | |||
Path mavenMetadata = parentDirectory.resolve( METADATA_FILENAME ); | |||
if ( Files.exists(mavenMetadata) ) | |||
{ | |||
try | |||
@@ -258,7 +258,7 @@ public class RepositoryModelResolver | |||
} | |||
} | |||
} | |||
catch (XMLException | IOException e ) | |||
catch (XMLException e ) | |||
{ | |||
log.warn( "fail to read {}, {}", mavenMetadata.toAbsolutePath(), e.getCause() ); | |||
} |
@@ -425,9 +425,15 @@ public class ManagedDefaultRepositoryContent | |||
if ( ( path != null ) && path.startsWith( repoPath ) && repoPath.length() > 0 ) | |||
{ | |||
return super.toArtifactReference( path.substring( repoPath.length() + 1 ) ); | |||
} else { | |||
repoPath = path; | |||
if (repoPath!=null) { | |||
while (repoPath.startsWith("/")) { | |||
repoPath = repoPath.substring(1); | |||
} | |||
} | |||
return super.toArtifactReference( repoPath ); | |||
} | |||
return super.toArtifactReference( path ); | |||
} | |||
// The variant with runtime exception for stream usage |
@@ -121,39 +121,6 @@ public class MavenManagedRepository extends AbstractManagedRepository | |||
return indexCreationFeature.hasIndex(); | |||
} | |||
@Override | |||
public void setLocation( URI location ) | |||
{ | |||
URI previousLocation = super.getLocation(); | |||
Path previousLoc = PathUtil.getPathFromUri(previousLocation); | |||
Path newLoc = PathUtil.getPathFromUri( location ); | |||
if (!newLoc.toAbsolutePath().equals(previousLoc.toAbsolutePath())) { | |||
super.setLocation(location); | |||
if (!Files.exists(newLoc)) { | |||
try { | |||
Files.createDirectories(newLoc); | |||
} catch (IOException e) { | |||
log.error("Could not create directory {}", location, e); | |||
} | |||
} | |||
FilesystemStorage previous = (FilesystemStorage) getStorage(); | |||
try { | |||
FilesystemStorage fs = new FilesystemStorage(newLoc, previous.getFileLockManager()); | |||
setStorage(fs); | |||
} catch (IOException e) { | |||
log.error("Could not create new filesystem storage at {}", newLoc); | |||
try { | |||
Path tmpDir = Files.createTempDirectory("tmp-repo-"+getId()); | |||
FilesystemStorage fs = new FilesystemStorage(tmpDir, previous.getFileLockManager()); | |||
setStorage(fs); | |||
} catch (IOException ex) { | |||
throw new RuntimeException("Could not setup storage for repository "+getId()); | |||
} | |||
} | |||
} | |||
} | |||
@Override | |||
public RepositoryRequestInfo getRequestInfo() { | |||
return new MavenRepositoryRequestInfo(this); |
@@ -26,6 +26,7 @@ import org.apache.archiva.configuration.Configuration; | |||
import org.apache.archiva.configuration.ManagedRepositoryConfiguration; | |||
import org.apache.archiva.maven2.model.Artifact; | |||
import org.apache.archiva.maven2.model.TreeEntry; | |||
import org.apache.archiva.repository.RepositoryRegistry; | |||
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner; | |||
import org.junit.Before; | |||
import org.junit.Test; | |||
@@ -65,6 +66,9 @@ public class DependencyTreeBuilderTestMaven3 | |||
@Named( "archivaConfiguration#test" ) | |||
ArchivaConfiguration config; | |||
@Inject | |||
RepositoryRegistry repositoryRegistry; | |||
@Before | |||
@Override | |||
public void setUp() | |||
@@ -79,6 +83,8 @@ public class DependencyTreeBuilderTestMaven3 | |||
configuration.addManagedRepository( repoConfig ); | |||
config.save( configuration ); | |||
repositoryRegistry.reload(); | |||
//artifactFactory = ((DefaultDependencyTreeBuilder)this.builder).getFactory(); | |||
} | |||
@@ -20,6 +20,7 @@ package org.apache.archiva.metadata.repository.storage.maven2.conf; | |||
*/ | |||
import org.apache.archiva.configuration.ArchivaConfiguration; | |||
import org.apache.archiva.configuration.ArchivaRuntimeConfiguration; | |||
import org.apache.archiva.configuration.Configuration; | |||
import org.apache.archiva.configuration.ConfigurationListener; | |||
import org.apache.archiva.redback.components.registry.Registry; | |||
@@ -61,6 +62,10 @@ public class MockConfiguration | |||
{ | |||
registryControl = createNiceControl(); | |||
registryMock = registryControl.createMock( Registry.class ); | |||
configuration.setArchivaRuntimeConfiguration(new ArchivaRuntimeConfiguration()); | |||
configuration.getArchivaRuntimeConfiguration().addChecksumType("sha1"); | |||
configuration.getArchivaRuntimeConfiguration().addChecksumType("sha256"); | |||
configuration.getArchivaRuntimeConfiguration().addChecksumType("md5"); | |||
} | |||
@Override | |||
@@ -174,4 +179,6 @@ public class MockConfiguration | |||
} | |||
} |
@@ -220,7 +220,7 @@ public class MavenRepositoryProviderTest | |||
public void getManagedConfiguration() throws Exception { | |||
MavenManagedRepository repo = MavenManagedRepository.newLocalInstance( "test01", "My Test repo", Paths.get("target/repositories") ); | |||
repo.setLocation( new URI("file:///this.is/a/test") ); | |||
repo.setLocation( new URI("target/this.is/a/test") ); | |||
repo.setScanned( true ); | |||
repo.setDescription( repo.getPrimaryLocale(), "This is a description" ); | |||
repo.setLayout( "maven2" ); | |||
@@ -240,7 +240,7 @@ public class MavenRepositoryProviderTest | |||
artifactCleanupFeature.setDeleteReleasedSnapshots( true ); | |||
ManagedRepositoryConfiguration cfg = provider.getManagedConfiguration( repo ); | |||
assertEquals("/this.is/a/test", cfg.getLocation()); | |||
assertEquals("target/this.is/a/test", cfg.getLocation()); | |||
assertTrue(cfg.isScanned()); | |||
assertEquals( "This is a description", cfg.getDescription() ); | |||
assertEquals("maven2", cfg.getLayout()); |
@@ -89,7 +89,7 @@ public class ArchivaIndexingTaskExecutorTest | |||
super.setUp(); | |||
Path baseDir = Paths.get(System.getProperty("basedir"), "target/test-classes").toAbsolutePath(); | |||
BasicManagedRepository repositoryConfig = BasicManagedRepository.newFilesystemInstance( "test-repo", "Test Repository", baseDir); | |||
BasicManagedRepository repositoryConfig = BasicManagedRepository.newFilesystemInstance("test-repo", "Test Repository", baseDir.resolve("test-repo")); | |||
Path repoLocation = baseDir.resolve("test-repo" ); | |||
repositoryConfig.setLocation(repoLocation.toUri() ); | |||
repositoryConfig.setLayout( "default" ); |
@@ -89,9 +89,7 @@ import javax.inject.Inject; | |||
import javax.inject.Named; | |||
import javax.ws.rs.core.Response; | |||
import java.io.IOException; | |||
import java.io.OutputStream; | |||
import java.io.OutputStreamWriter; | |||
import java.nio.file.Files; | |||
import java.nio.file.Path; | |||
import java.text.DateFormat; | |||
import java.text.SimpleDateFormat; | |||
@@ -403,7 +401,7 @@ public class DefaultRepositoriesService | |||
int lastIndex = artifactPath.lastIndexOf( '/' ); | |||
String path = artifactPath.substring( 0, lastIndex ); | |||
StorageAsset targetPath = target.getAsset( path ); | |||
StorageAsset targetDir = target.getAsset( path ); | |||
Date lastUpdatedTimestamp = Calendar.getInstance().getTime(); | |||
int newBuildNumber = 1; | |||
@@ -412,10 +410,10 @@ public class DefaultRepositoriesService | |||
StorageAsset versionMetadataFile = target.getAsset(path + "/" + MetadataTools.MAVEN_METADATA ); | |||
/* unused */ getMetadata( versionMetadataFile ); | |||
if ( !targetPath.exists() ) | |||
if ( !targetDir.exists() ) | |||
{ | |||
targetPath = target.addAsset(targetPath.getPath(), true); | |||
targetPath.create(); | |||
targetDir = target.addAsset(targetDir.getPath(), true); | |||
targetDir.create(); | |||
} | |||
String filename = artifactPath.substring( lastIndex + 1 ); | |||
@@ -423,7 +421,7 @@ public class DefaultRepositoriesService | |||
boolean fixChecksums = | |||
!( archivaAdministration.getKnownContentConsumers().contains( "create-missing-checksums" ) ); | |||
StorageAsset targetFile = target.getAsset(targetPath + "/" + filename ); | |||
StorageAsset targetFile = target.getAsset(targetDir.getPath() + "/" + filename ); | |||
if ( targetFile.exists() && target.blocksRedeployments()) | |||
{ | |||
throw new ArchivaRestServiceException( | |||
@@ -433,7 +431,7 @@ public class DefaultRepositoriesService | |||
} | |||
else | |||
{ | |||
copyFile( source, artifactFile, target, targetFile, fixChecksums ); | |||
copyFile(artifactFile, targetFile, fixChecksums ); | |||
queueRepositoryTask( target.getId(), targetFile ); | |||
} | |||
@@ -450,8 +448,8 @@ public class DefaultRepositoriesService | |||
if ( pomFile != null && pomFile.exists() ) | |||
{ | |||
StorageAsset targetPomFile = target.getAsset( targetPath.getPath() + "/" + pomFilename ); | |||
copyFile( source, pomFile, target, targetPomFile, fixChecksums ); | |||
StorageAsset targetPomFile = target.getAsset( targetDir.getPath() + "/" + pomFilename ); | |||
copyFile(pomFile, targetPomFile, fixChecksums ); | |||
queueRepositoryTask( target.getId(), targetPomFile ); | |||
@@ -460,7 +458,7 @@ public class DefaultRepositoriesService | |||
// explicitly update only if metadata-updater consumer is not enabled! | |||
if ( !archivaAdministration.getKnownContentConsumers().contains( "metadata-updater" ) ) | |||
{ | |||
updateProjectMetadata( target, targetPath, lastUpdatedTimestamp, timestamp, newBuildNumber, | |||
updateProjectMetadata( target, targetDir, lastUpdatedTimestamp, timestamp, newBuildNumber, | |||
fixChecksums, artifactTransferRequest ); | |||
@@ -521,7 +519,7 @@ public class DefaultRepositoriesService | |||
{ | |||
metadata = MavenMetadataReader.read( metadataFile.getFilePath() ); | |||
} | |||
catch (XMLException | IOException e ) | |||
catch (XMLException e ) | |||
{ | |||
throw new RepositoryMetadataException( e.getMessage(), e ); | |||
} | |||
@@ -538,7 +536,7 @@ public class DefaultRepositoriesService | |||
/* | |||
* Copies the asset to the new target. | |||
*/ | |||
private void copyFile( RepositoryStorage sourceStorage, StorageAsset sourceFile, RepositoryStorage targetStorage, StorageAsset targetPath, boolean fixChecksums ) | |||
private void copyFile(StorageAsset sourceFile, StorageAsset targetPath, boolean fixChecksums) | |||
throws IOException | |||
{ | |||
@@ -817,11 +815,12 @@ public class DefaultRepositoriesService | |||
path = path.substring( 0, index ); | |||
StorageAsset targetPath = repo.getAsset( path ); | |||
if ( targetPath.exists() ) | |||
if ( !targetPath.exists() ) | |||
{ | |||
//throw new ContentNotFoundException( | |||
// artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion() ); | |||
log.warn( "targetPath {} not found skip file deletion", targetPath ); | |||
return false; | |||
} | |||
// TODO: this should be in the storage mechanism so that it is all tied together | |||
@@ -838,11 +837,11 @@ public class DefaultRepositoriesService | |||
{ | |||
repository.deleteArtifact( artifactRef ); | |||
} | |||
} | |||
StorageAsset metadataFile = getMetadata( repo, targetPath.getPath() ); | |||
ArchivaRepositoryMetadata metadata = getMetadata( metadataFile ); | |||
StorageAsset metadataFile = getMetadata( repo, targetPath.getPath() ); | |||
ArchivaRepositoryMetadata metadata = getMetadata( metadataFile ); | |||
updateMetadata( metadata, metadataFile, lastUpdatedTimestamp, artifact ); | |||
updateMetadata( metadata, metadataFile, lastUpdatedTimestamp, artifact ); | |||
} | |||
} | |||
Collection<ArtifactMetadata> artifacts = Collections.emptyList(); | |||
@@ -21,6 +21,7 @@ package org.apache.archiva.rest.services; | |||
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; | |||
import org.apache.archiva.admin.model.beans.ManagedRepository; | |||
import org.apache.archiva.common.utils.PathUtil; | |||
import org.apache.archiva.redback.rest.api.services.RedbackServiceException; | |||
import org.apache.archiva.redback.rest.services.AbstractRestServicesTest; | |||
import org.apache.archiva.rest.api.services.ArchivaAdministrationService; | |||
@@ -53,6 +54,7 @@ import org.junit.runner.RunWith; | |||
import org.slf4j.LoggerFactory; | |||
import javax.ws.rs.core.MediaType; | |||
import java.io.File; | |||
import java.io.IOException; | |||
import java.nio.file.Files; | |||
import java.nio.file.Path; | |||
@@ -368,7 +370,7 @@ public abstract class AbstractArchivaRestTest | |||
protected ManagedRepository getTestManagedRepository() | |||
{ | |||
String location = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repo" ).toAbsolutePath().toString(); | |||
String location = getBasedir().resolve( "target/repositories/test-repo" ).toAbsolutePath().toString(); | |||
return new ManagedRepository( Locale.getDefault(), "TEST", "test", location, "default", true, true, false, "2 * * * * ?", null, | |||
false, 2, 3, true, false, "my nice repo", false ); | |||
@@ -405,7 +407,7 @@ public abstract class AbstractArchivaRestTest | |||
protected void initSourceTargetRepo() | |||
throws Exception | |||
{ | |||
Path targetRepo = Paths.get( "target/test-repo-copy" ); | |||
Path targetRepo = Paths.get( "target/repositories/test-repo-copy" ); | |||
if ( Files.exists(targetRepo) ) | |||
{ | |||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( targetRepo ); | |||
@@ -425,7 +427,7 @@ public abstract class AbstractArchivaRestTest | |||
getManagedRepositoriesService( authorizationHeader ).addManagedRepository( managedRepository ); | |||
assertNotNull( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( TARGET_REPO_ID ) ); | |||
Path originRepo = Paths.get( "target/test-origin-repo" ); | |||
Path originRepo = Paths.get( "target/repositories/test-origin-repo" ); | |||
if ( Files.exists(originRepo) ) | |||
{ | |||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( originRepo ); | |||
@@ -484,7 +486,7 @@ public abstract class AbstractArchivaRestTest | |||
} | |||
protected void createAndIndexRepo( String testRepoId, String repoPath, boolean stageNeeded ) | |||
protected void createAndIndexRepo( String testRepoId, String srcRepoPath, boolean stageNeeded ) | |||
throws ArchivaRestServiceException, IOException, RedbackServiceException | |||
{ | |||
if ( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( testRepoId ) != null ) | |||
@@ -496,19 +498,18 @@ public abstract class AbstractArchivaRestTest | |||
managedRepository.setId( testRepoId ); | |||
managedRepository.setName( "test repo" ); | |||
Path badContent = Paths.get( repoPath, "target" ); | |||
Path badContent = Paths.get( srcRepoPath, "target" ); | |||
if ( Files.exists(badContent) ) | |||
{ | |||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( badContent ); | |||
} | |||
Path file = Paths.get( repoPath ); | |||
if ( !file.isAbsolute() ) | |||
{ | |||
repoPath = getBasedir() + "/" + repoPath; | |||
} | |||
Path repoPath = getBasedir().resolve( "target" ).resolve( "repositories" ).resolve( testRepoId); | |||
FileUtils.deleteQuietly(repoPath.toFile()); | |||
FileUtils.copyDirectory(Paths.get(srcRepoPath).toFile(), repoPath.toFile()); | |||
managedRepository.setLocation( Paths.get( repoPath ).toString() ); | |||
managedRepository.setLocation( repoPath.toAbsolutePath().toString() ); | |||
String suffix = Long.toString( new Date().getTime() ); | |||
String baseDir = System.getProperty("java.io.tmpdir"); | |||
managedRepository.setIndexDirectory( |
@@ -19,6 +19,7 @@ package org.apache.archiva.rest.services; | |||
* under the License. | |||
*/ | |||
import org.apache.archiva.admin.model.beans.FileType; | |||
import org.apache.archiva.admin.model.beans.ManagedRepository; | |||
import org.apache.archiva.rest.api.model.ArchivaRepositoryStatistics; | |||
import org.apache.archiva.rest.api.services.ManagedRepositoriesService; | |||
@@ -134,6 +135,8 @@ public class ManagedRepositoriesServiceTest | |||
throws Exception | |||
{ | |||
getArchivaAdministrationService().addFileTypePattern("ignored", ".index-*/**"); | |||
getArchivaAdministrationService().addFileTypePattern("ignored", ".indexer-*/**"); | |||
String testRepoId = "test-repo"; | |||
// force guest user creation if not exists | |||
if ( getUserService( authorizationHeader ).getGuestUser() == null ) |
@@ -41,9 +41,9 @@ public class MergeRepositoriesServiceTest | |||
private static final String TEST_REPOSITORY = "test-repository"; | |||
private Path repo = Paths.get( System.getProperty( "builddir" ), "test-repository" ); | |||
private Path repo = Paths.get( System.getProperty( "basedir" ),"target","repositories-merge", "test-repository" ); | |||
private Path repoStage = Paths.get( System.getProperty( "builddir" ), "test-repository-stage" ); | |||
private Path repoStage = Paths.get( System.getProperty( "basedir" ),"target","repositories-merge", "test-repository-stage" ); | |||
@Test | |||
public void getMergeConflictedArtifacts() | |||
@@ -95,10 +95,11 @@ public class MergeRepositoriesServiceTest | |||
public void createStageRepo() | |||
throws Exception | |||
{ | |||
FileUtils.copyDirectory( Paths.get( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ).toFile(), repo.toFile() ); | |||
FileUtils.copyDirectory( Paths.get( System.getProperty( "basedir" ), "src/test/repo-with-osgi-stage" ).toFile(), | |||
repoStage.toFile() ); | |||
// FileUtils.copyDirectory( Paths.get( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ).toFile(), repo.toFile() ); | |||
createStagedNeededRepo( TEST_REPOSITORY, repo.toAbsolutePath().toString(), true ); | |||
Path srcRepo = Paths.get( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ); | |||
createStagedNeededRepo( TEST_REPOSITORY, srcRepo.toAbsolutePath().toString(), true ); | |||
FileUtils.copyDirectory( getBasedir().resolve("src/test/repo-with-osgi-stage" ).toFile(), | |||
repoStage.toFile() ); | |||
} | |||
} |
@@ -165,8 +165,8 @@ public class RepositoriesServiceTest | |||
try | |||
{ | |||
Path artifactFile = Paths.get( | |||
"target/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.core/2.2.2/org.apache.karaf.features.core-2.2.2.jar" ); | |||
Path artifactFile = getBasedir().resolve( | |||
"target/repositories/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.core/2.2.2/org.apache.karaf.features.core-2.2.2.jar" ); | |||
assertTrue( "artifact not exists:" + artifactFile, Files.exists(artifactFile) ); | |||
@@ -226,8 +226,8 @@ public class RepositoriesServiceTest | |||
try | |||
{ | |||
Path artifactFile = Paths.get( | |||
"target/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.core/2.2.2/org.apache.karaf.features.core-2.2.2.jar" ); | |||
Path artifactFile = getBasedir().resolve( | |||
"target/repositories/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.core/2.2.2/org.apache.karaf.features.core-2.2.2.jar" ); | |||
assertTrue( "artifact not exists:" + artifactFile.toString(), Files.exists(artifactFile) ); | |||
@@ -283,14 +283,14 @@ public class RepositoriesServiceTest | |||
try | |||
{ | |||
Path artifactFile = Paths.get( | |||
"target/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar" ); | |||
Path artifactFile = getBasedir().resolve( | |||
"target/repositories/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar" ); | |||
Path artifactFilemd5 = Paths.get( | |||
"target/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar.md5" ); | |||
Path artifactFilemd5 = getBasedir().resolve( | |||
"target/repositories/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar.md5" ); | |||
Path artifactFilesha1 = Paths.get( | |||
"target/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar.sha1" ); | |||
Path artifactFilesha1 = getBasedir().resolve( | |||
"target/repositories/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar.sha1" ); | |||
assertTrue( "artifact not exists:" + artifactFile, Files.exists(artifactFile) ); | |||
@@ -354,7 +354,7 @@ public class RepositoriesServiceTest | |||
new BrowseResultEntry( "org.apache.karaf.features.org.apache.karaf.features.core", true ) ); | |||
Path directory = | |||
Paths.get( "target/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.command" ); | |||
getBasedir().resolve( "target/repositories/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.command" ); | |||
assertTrue( "directory not exists", Files.exists(directory) ); | |||
@@ -497,7 +497,7 @@ public class RepositoriesServiceTest | |||
protected Path initSnapshotRepo() | |||
throws Exception | |||
{ | |||
Path targetRepo = getBasedir().resolve( "target/repo-with-snapshots" ); | |||
Path targetRepo = getBasedir().resolve( "target/repositories/repo-with-snapshots" ); | |||
if ( Files.exists(targetRepo) ) | |||
{ | |||
org.apache.archiva.common.utils.FileUtils.deleteDirectory( targetRepo ); | |||
@@ -544,7 +544,7 @@ public class RepositoriesServiceTest | |||
protected ManagedRepository getTestManagedRepository( String id, String path ) | |||
{ | |||
String location = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/" + path ).toAbsolutePath().toString(); | |||
String location = getBasedir().resolve("target/repositories/" + path ).toAbsolutePath().toString(); | |||
return new ManagedRepository( Locale.getDefault(), id, id, location, "default", true, true, true, "2 * * * * ?", null, false, 80, 80, | |||
true, false ); | |||
} |
@@ -43,6 +43,7 @@ | |||
<logger name="org.apache.archiva.rest.services" level="info"/> | |||
<logger name="org.springframework" level="info"/> | |||
<logger name="org.apache.commons.configuration" level="info"/> | |||
<logger name="org.apache.archiva.metadata.repository.storage.maven2" level="DEBUG" /> | |||
<root level="info"> | |||
<appender-ref ref="console"/> |
@@ -344,7 +344,7 @@ public class ArchivaDavResourceFactory | |||
ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read( metadataFile ); | |||
mergedMetadata = RepositoryMetadataMerge.merge( mergedMetadata, repoMetadata ); | |||
} | |||
catch (XMLException | IOException e ) | |||
catch (XMLException e ) | |||
{ | |||
throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, | |||
"Error occurred while reading metadata file." ); |
@@ -67,7 +67,7 @@ public class RepositoryServletBrowseTest | |||
// dumpResponse( response ); | |||
List<String> expectedLinks = Arrays.asList( ".index/", ".indexer/", "commons-lang/", "net/", "org/" ); | |||
List<String> expectedLinks = Arrays.asList( ".indexer/", "commons-lang/", "net/", "org/" ); | |||
Document document = Jsoup.parse( response.getContentAsString() ); | |||
Elements elements = document.getElementsByTag( "a" ); |
@@ -253,10 +253,13 @@ public class ArtifactMetadata | |||
{ | |||
return false; | |||
} | |||
/** | |||
* We cannot compare in different repositories, if this is in here | |||
if ( !repositoryId.equals( that.repositoryId ) ) | |||
{ | |||
return false; | |||
} | |||
**/ | |||
if ( sha1 != null ? !sha1.equals( that.sha1 ) : that.sha1 != null ) | |||
{ | |||
return false; |
@@ -95,7 +95,7 @@ public class DuplicateArtifactsConsumerTest | |||
assertNotNull( consumer ); | |||
config = BasicManagedRepository.newFilesystemInstance(TEST_REPO, TEST_REPO, Paths.get("target")); | |||
config = BasicManagedRepository.newFilesystemInstance(TEST_REPO, TEST_REPO, Paths.get("target").resolve(TEST_REPO)); | |||
config.setLocation( Paths.get( "target/test-repository" ).toAbsolutePath().toUri() ); | |||
metadataRepository = mock( MetadataRepository.class ); |
@@ -48,20 +48,12 @@ import javax.inject.Inject; | |||
import javax.inject.Named; | |||
import java.io.BufferedWriter; | |||
import java.io.IOException; | |||
import java.io.OutputStreamWriter; | |||
import java.nio.Buffer; | |||
import java.nio.file.Files; | |||
import java.nio.file.Path; | |||
import java.nio.file.Paths; | |||
import java.text.DateFormat; | |||
import java.text.SimpleDateFormat; | |||
import java.util.ArrayList; | |||
import java.util.Calendar; | |||
import java.util.Collections; | |||
import java.util.Date; | |||
import java.util.List; | |||
import java.util.TimeZone; | |||
import java.util.regex.Pattern; | |||
import java.util.*; | |||
/** | |||
* | |||
@@ -73,6 +65,11 @@ public class Maven2RepositoryMerger | |||
private Logger log = LoggerFactory.getLogger( getClass() ); | |||
private static final Comparator<ArtifactMetadata> META_COMPARATOR = Comparator.comparing(ArtifactMetadata::getNamespace) | |||
.thenComparing(ArtifactMetadata::getProject) | |||
.thenComparing(ArtifactMetadata::getId) | |||
.thenComparing(ArtifactMetadata::getVersion); | |||
/** | |||
* | |||
*/ | |||
@@ -244,7 +241,7 @@ public class Maven2RepositoryMerger | |||
if ( versionMetaDataFileInSourceRepo.exists() ) | |||
{//Pattern quote for windows path | |||
String relativePathToVersionMetadataFile = | |||
versionMetaDataFileInSourceRepo.getPath().toString().split( Pattern.quote( sourceRepoPath ) )[1]; | |||
getRelativeAssetPath(versionMetaDataFileInSourceRepo); | |||
Path versionMetaDataFileInTargetRepo = Paths.get( targetRepoPath, relativePathToVersionMetadataFile ); | |||
if ( !Files.exists(versionMetaDataFileInTargetRepo) ) | |||
@@ -265,7 +262,7 @@ public class Maven2RepositoryMerger | |||
if ( projectMetadataFileInSourceRepo.exists() ) | |||
{ | |||
String relativePathToProjectMetadataFile = | |||
projectMetadataFileInSourceRepo.getPath().split( Pattern.quote( sourceRepoPath ) )[1]; | |||
getRelativeAssetPath(projectMetadataFileInSourceRepo); | |||
Path projectMetadataFileInTargetRepo = Paths.get( targetRepoPath, relativePathToProjectMetadataFile ); | |||
if ( !Files.exists(projectMetadataFileInTargetRepo) ) | |||
@@ -283,6 +280,14 @@ public class Maven2RepositoryMerger | |||
} | |||
private String getRelativeAssetPath(final StorageAsset asset) { | |||
String relPath = asset.getPath(); | |||
while(relPath.startsWith("/")) { | |||
relPath = relPath.substring(1); | |||
} | |||
return relPath; | |||
} | |||
private void copyFile( Path sourceFile, Path targetFile ) | |||
throws IOException | |||
{ | |||
@@ -378,7 +383,7 @@ public class Maven2RepositoryMerger | |||
{ | |||
metadata = MavenMetadataReader.read( metadataFile ); | |||
} | |||
catch (XMLException | IOException e ) | |||
catch (XMLException e ) | |||
{ | |||
throw new RepositoryMetadataException( e.getMessage(), e ); | |||
} | |||
@@ -393,27 +398,13 @@ public class Maven2RepositoryMerger | |||
{ | |||
try | |||
{ | |||
List<ArtifactMetadata> targetArtifacts = metadataRepository.getArtifacts( targetRepo ); | |||
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepo ); | |||
List<ArtifactMetadata> conflictsArtifacts = new ArrayList<>(); | |||
for ( ArtifactMetadata targetArtifact : targetArtifacts ) | |||
{ | |||
for ( ArtifactMetadata sourceArtifact : sourceArtifacts ) | |||
{ | |||
if ( isEquals( targetArtifact, sourceArtifact ) ) | |||
{ | |||
if ( !conflictsArtifacts.contains( sourceArtifact ) ) | |||
{ | |||
conflictsArtifacts.add( sourceArtifact ); | |||
} | |||
} | |||
} | |||
} | |||
TreeSet<ArtifactMetadata> targetArtifacts = new TreeSet<>(META_COMPARATOR); | |||
targetArtifacts.addAll(metadataRepository.getArtifacts(targetRepo)); | |||
TreeSet<ArtifactMetadata> sourceArtifacts = new TreeSet<>(META_COMPARATOR); | |||
sourceArtifacts.addAll(metadataRepository.getArtifacts(sourceRepo)); | |||
sourceArtifacts.retainAll(targetArtifacts); | |||
sourceArtifacts.removeAll( conflictsArtifacts ); | |||
return conflictsArtifacts; | |||
return new ArrayList<>(sourceArtifacts); | |||
} | |||
catch ( MetadataRepositoryException e ) | |||
{ | |||
@@ -421,20 +412,5 @@ public class Maven2RepositoryMerger | |||
} | |||
} | |||
private boolean isEquals( ArtifactMetadata sourceArtifact, ArtifactMetadata targetArtifact ) | |||
{ | |||
boolean isSame = false; | |||
if ( ( sourceArtifact.getNamespace().equals( targetArtifact.getNamespace() ) ) | |||
&& ( sourceArtifact.getProject().equals( targetArtifact.getProject() ) ) && ( sourceArtifact.getId().equals( | |||
targetArtifact.getId() ) ) && ( sourceArtifact.getProjectVersion().equals( | |||
targetArtifact.getProjectVersion() ) ) ) | |||
{ | |||
isSame = true; | |||
} | |||
return isSame; | |||
} | |||
} |