import org.apache.archiva.security.ArchivaSecurityException;
import org.apache.archiva.xml.XMLException;
import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import javax.inject.Named;
import javax.ws.rs.core.Response;
-import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import javax.inject.Inject;
import javax.ws.rs.core.Response;
-import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.List;
throws ArchivaRestServiceException
{
String location = repositoryCommonValidator.removeExpressions( fileLocation );
- return new File( location ).exists();
+ return Files.exists( Paths.get( location ));
}
@Override
import javax.inject.Inject;
import javax.inject.Named;
import javax.ws.rs.core.Response;
-import java.io.File;
import java.io.IOException;
+import java.nio.file.FileSystems;
import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
null );
}
- File artifactFile = new File( source.getLocation(), artifactSourcePath );
+ Path artifactFile = Paths.get( source.getLocation(), artifactSourcePath );
- if ( !artifactFile.exists() )
+ if ( !Files.exists(artifactFile) )
{
log.error( "cannot find artifact {}", artifactTransferRequest );
throw new ArchivaRestServiceException( "cannot find artifact " + artifactTransferRequest.toString(),
int lastIndex = artifactPath.lastIndexOf( '/' );
String path = artifactPath.substring( 0, lastIndex );
- File targetPath = new File( target.getLocation(), path );
+ Path targetPath = Paths.get( target.getLocation(), path );
Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
int newBuildNumber = 1;
String timestamp = null;
- File versionMetadataFile = new File( targetPath, MetadataTools.MAVEN_METADATA );
+ Path versionMetadataFile = targetPath.resolve( MetadataTools.MAVEN_METADATA );
/* unused */ getMetadata( versionMetadataFile );
- if ( !targetPath.exists() )
+ if ( !Files.exists(targetPath) )
{
- targetPath.mkdirs();
+ Files.createDirectories( targetPath );
}
String filename = artifactPath.substring( lastIndex + 1 );
boolean fixChecksums =
!( archivaAdministration.getKnownContentConsumers().contains( "create-missing-checksums" ) );
- File targetFile = new File( targetPath, filename );
- if ( targetFile.exists() && target.isBlockRedeployments() )
+ Path targetFile = targetPath.resolve( filename );
+ if ( Files.exists(targetFile) && target.isBlockRedeployments() )
{
throw new ArchivaRestServiceException(
"artifact already exists in target repo: " + artifactTransferRequest.getTargetRepositoryId()
}
pomFilename = FilenameUtils.removeExtension( pomFilename ) + ".pom";
- File pomFile = new File(
- new File( source.getLocation(), artifactSourcePath.substring( 0, artifactPath.lastIndexOf( '/' ) ) ),
+ Path pomFile = Paths.get(source.getLocation(),
+ artifactSourcePath.substring( 0, artifactPath.lastIndexOf( '/' ) ) ,
pomFilename );
- if ( pomFile != null && pomFile.length() > 0 )
+ if ( pomFile != null && Files.size( pomFile ) > 0 )
{
copyFile( pomFile, targetPath, pomFilename, fixChecksums );
- queueRepositoryTask( target.getId(), new File( targetPath, pomFilename ) );
+ queueRepositoryTask( target.getId(), targetPath.resolve( pomFilename ) );
}
// explicitly update only if metadata-updater consumer is not enabled!
if ( !archivaAdministration.getKnownContentConsumers().contains( "metadata-updater" ) )
{
- updateProjectMetadata( targetPath.getAbsolutePath(), lastUpdatedTimestamp, timestamp, newBuildNumber,
+ updateProjectMetadata( targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber,
fixChecksums, artifactTransferRequest );
return true;
}
- private void queueRepositoryTask( String repositoryId, File localFile )
+ private void queueRepositoryTask( String repositoryId, Path localFile )
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
- task.setResourceFile( localFile.toPath() );
+ task.setResourceFile( localFile );
task.setUpdateRelatedArtifacts( true );
//task.setScanAll( true );
catch ( TaskQueueException e )
{
log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
- + "'].", localFile.getName() );
+ + "'].", localFile.getFileName());
}
}
- private ArchivaRepositoryMetadata getMetadata( File metadataFile )
+ private ArchivaRepositoryMetadata getMetadata( Path metadataFile )
throws RepositoryMetadataException
{
ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
- if ( metadataFile.exists() )
+ if ( Files.exists(metadataFile) )
{
try
{
- metadata = MavenMetadataReader.read( metadataFile.toPath() );
+ metadata = MavenMetadataReader.read( metadataFile );
}
catch ( XMLException e )
{
return metadata;
}
- private File getMetadata( String targetPath )
+ private Path getMetadata( String targetPath )
{
- String artifactPath = targetPath.substring( 0, targetPath.lastIndexOf( File.separatorChar ) );
+ String artifactPath = targetPath.substring( 0, targetPath.lastIndexOf( FileSystems.getDefault().getSeparator() ));
- return new File( artifactPath, MetadataTools.MAVEN_METADATA );
+ return Paths.get( artifactPath, MetadataTools.MAVEN_METADATA );
}
- private void copyFile( File sourceFile, File targetPath, String targetFilename, boolean fixChecksums )
+ private void copyFile( Path sourceFile, Path targetPath, String targetFilename, boolean fixChecksums )
throws IOException
{
- Files.copy( sourceFile.toPath(), new File( targetPath, targetFilename ).toPath(), StandardCopyOption.REPLACE_EXISTING,
+ Files.copy( sourceFile, targetPath.resolve( targetFilename ), StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.COPY_ATTRIBUTES );
if ( fixChecksums )
{
- fixChecksums( new File( targetPath, targetFilename ) );
+ fixChecksums( targetPath.resolve( targetFilename ) );
}
}
- private void fixChecksums( File file )
+ private void fixChecksums( Path file )
{
- ChecksummedFile checksum = new ChecksummedFile( file.toPath() );
+ ChecksummedFile checksum = new ChecksummedFile( file );
checksum.fixChecksums( algorithms );
}
List<String> availableVersions = new ArrayList<>();
String latestVersion = artifactTransferRequest.getVersion();
- File projectDir = new File( targetPath ).getParentFile();
- File projectMetadataFile = new File( projectDir, MetadataTools.MAVEN_METADATA );
+ Path projectDir = Paths.get( targetPath ).getParent();
+ Path projectMetadataFile = projectDir.resolve( MetadataTools.MAVEN_METADATA );
ArchivaRepositoryMetadata projectMetadata = getMetadata( projectMetadataFile );
- if ( projectMetadataFile.exists() )
+ if ( Files.exists(projectMetadataFile) )
{
availableVersions = projectMetadata.getAvailableVersions();
projectMetadata.setReleasedVersion( latestVersion );
}
- RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile.toPath() );
+ RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile);
if ( fixChecksums )
{
int index = path.lastIndexOf( '/' );
path = path.substring( 0, index );
- File targetPath = new File( repoConfig.getLocation(), path );
+ Path targetPath = Paths.get( repoConfig.getLocation(), path );
- if ( !targetPath.exists() )
+ if ( !Files.exists(targetPath) )
{
//throw new ContentNotFoundException(
// artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion() );
repository.deleteArtifact( artifactRef );
}
}
- File metadataFile = getMetadata( targetPath.getAbsolutePath() );
+ Path metadataFile = getMetadata( targetPath.toAbsolutePath().toString() );
ArchivaRepositoryMetadata metadata = getMetadata( metadataFile );
updateMetadata( metadata, metadataFile, lastUpdatedTimestamp, artifact );
*
* @param metadata
*/
- private void updateMetadata( ArchivaRepositoryMetadata metadata, File metadataFile, Date lastUpdatedTimestamp,
+ private void updateMetadata( ArchivaRepositoryMetadata metadata, Path metadataFile, Date lastUpdatedTimestamp,
Artifact artifact )
throws RepositoryMetadataException
{
List<String> availableVersions = new ArrayList<>();
String latestVersion = "";
- if ( metadataFile.exists() )
+ if ( Files.exists(metadataFile) )
{
if ( metadata.getAvailableVersions() != null )
{
metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp );
metadata.setAvailableVersions( availableVersions );
- RepositoryMetadataWriter.write( metadata, metadataFile.toPath() );
- ChecksummedFile checksum = new ChecksummedFile( metadataFile.toPath() );
+ RepositoryMetadataWriter.write( metadata, metadataFile);
+ ChecksummedFile checksum = new ChecksummedFile( metadataFile );
checksum.fixChecksums( algorithms );
}
* under the License.
*/
+import org.apache.archiva.maven2.model.Artifact;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.repository.ManagedRepositoryContent;
-import org.apache.archiva.maven2.model.Artifact;
import org.apache.commons.io.FilenameUtils;
-import java.io.File;
import java.nio.file.Path;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import org.apache.archiva.redback.rest.services.AbstractRestServicesTest;
import org.apache.archiva.rest.api.services.ArchivaAdministrationService;
import org.apache.archiva.rest.api.services.ArchivaRestServiceException;
-import org.apache.archiva.rest.api.services.RedbackRuntimeConfigurationService;
import org.apache.archiva.rest.api.services.BrowseService;
import org.apache.archiva.rest.api.services.CommonServices;
import org.apache.archiva.rest.api.services.ManagedRepositoriesService;
import org.apache.archiva.rest.api.services.MergeRepositoriesService;
import org.apache.archiva.rest.api.services.NetworkProxyService;
import org.apache.archiva.rest.api.services.PingService;
+import org.apache.archiva.rest.api.services.PluginsService;
import org.apache.archiva.rest.api.services.ProxyConnectorRuleService;
import org.apache.archiva.rest.api.services.ProxyConnectorService;
+import org.apache.archiva.rest.api.services.RedbackRuntimeConfigurationService;
import org.apache.archiva.rest.api.services.RemoteRepositoriesService;
import org.apache.archiva.rest.api.services.RepositoriesService;
import org.apache.archiva.rest.api.services.RepositoryGroupService;
import org.slf4j.LoggerFactory;
import javax.ws.rs.core.MediaType;
-import java.io.File;
import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Collections;
import java.util.Date;
-import org.apache.archiva.rest.api.services.PluginsService;
/**
* @author Olivier Lamy
public void startServer()
throws Exception
{
- File appServerBase = new File( System.getProperty( "appserver.base" ) );
+ Path appServerBase = Paths.get( System.getProperty( "appserver.base" ) );
removeAppsubFolder( appServerBase, "jcr" );
removeAppsubFolder( appServerBase, "conf" );
}
- private void removeAppsubFolder( File appServerBase, String folder )
+ private void removeAppsubFolder( Path appServerBase, String folder )
throws Exception
{
- File directory = new File( appServerBase, folder );
- if ( directory.exists() )
+ Path directory = appServerBase.resolve( folder );
+ if ( Files.exists(directory) )
{
- FileUtils.deleteDirectory( directory );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( directory );
}
}
protected ManagedRepository getTestManagedRepository()
{
- String location = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repo" ).getAbsolutePath();
+ String location = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repo" ).toAbsolutePath().toString();
return new ManagedRepository( "TEST", "test", location, "default", true, true, false, "2 * * * * ?", null,
false, 2, 3, true, false, "my nice repo", false );
protected void initSourceTargetRepo()
throws Exception
{
- File targetRepo = new File( "target/test-repo-copy" );
- if ( targetRepo.exists() )
+ Path targetRepo = Paths.get( "target/test-repo-copy" );
+ if ( Files.exists(targetRepo) )
{
- FileUtils.deleteDirectory( targetRepo );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( targetRepo );
}
- assertFalse( targetRepo.exists() );
- targetRepo.mkdirs();
+ assertFalse( Files.exists(targetRepo) );
+ Files.createDirectories( targetRepo );
if ( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( TARGET_REPO_ID ) != null )
{
}
ManagedRepository managedRepository = getTestManagedRepository();
managedRepository.setId( TARGET_REPO_ID );
- managedRepository.setLocation( targetRepo.getCanonicalPath() );
+ managedRepository.setLocation( targetRepo.toAbsolutePath().toString() );
managedRepository.setCronExpression( "* * * * * ?" );
getManagedRepositoriesService( authorizationHeader ).addManagedRepository( managedRepository );
assertNotNull( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( TARGET_REPO_ID ) );
- File originRepo = new File( "target/test-origin-repo" );
- if ( originRepo.exists() )
+ Path originRepo = Paths.get( "target/test-origin-repo" );
+ if ( Files.exists(originRepo) )
{
- FileUtils.deleteDirectory( originRepo );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( originRepo );
}
- assertFalse( originRepo.exists() );
- FileUtils.copyDirectory( new File( "src/test/repo-with-osgi" ), originRepo );
+ assertFalse( Files.exists(originRepo) );
+ FileUtils.copyDirectory( Paths.get( "src/test/repo-with-osgi" ).toAbsolutePath().toFile(), originRepo.toAbsolutePath().toFile() );
if ( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( SOURCE_REPO_ID ) != null )
{
managedRepository = getTestManagedRepository();
managedRepository.setId( SOURCE_REPO_ID );
- managedRepository.setLocation( originRepo.getCanonicalPath() );
+ managedRepository.setLocation( originRepo.toAbsolutePath().toString() );
getManagedRepositoriesService( authorizationHeader ).addManagedRepository( managedRepository );
assertNotNull( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( SOURCE_REPO_ID ) );
managedRepository.setId( testRepoId );
managedRepository.setName( "test repo" );
- File badContent = new File( repoPath, "target" );
- if ( badContent.exists() )
+ Path badContent = Paths.get( repoPath, "target" );
+ if ( Files.exists(badContent) )
{
- FileUtils.deleteDirectory( badContent );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( badContent );
}
- File file = new File( repoPath );
+ Path file = Paths.get( repoPath );
if ( !file.isAbsolute() )
{
repoPath = getBasedir() + "/" + repoPath;
}
- managedRepository.setLocation( new File( repoPath ).getPath() );
+ managedRepository.setLocation( Paths.get( repoPath ).toString() );
managedRepository.setIndexDirectory(
System.getProperty( "java.io.tmpdir" ) + "/target/.index-" + Long.toString( new Date().getTime() ) );
}
}
- public String getBasedir()
+ public Path getBasedir()
{
- return System.getProperty( "basedir" );
+ return Paths.get(System.getProperty( "basedir" ));
}
protected void waitForScanToComplete( String repoId )
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import org.junit.Test;
import javax.ws.rs.core.MediaType;
-import java.io.File;
import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
assertNotNull( getUserService( authorizationHeader ).createGuestUser() );
}
- createAndIndexRepo( TEST_REPO_ID, new File( getBasedir(), "src/test/repo-with-osgi" ).getAbsolutePath(),
+ createAndIndexRepo( TEST_REPO_ID, getBasedir().resolve( "src/test/repo-with-osgi" ).toAbsolutePath().toString(),
false );
waitForScanToComplete( TEST_REPO_ID );
import org.apache.archiva.rest.api.model.ArtifactTransferRequest;
import org.apache.archiva.rest.api.services.RepositoriesService;
-import org.assertj.core.api.Assertions;
import org.junit.Ignore;
import org.junit.Test;
import javax.ws.rs.InternalServerErrorException;
-import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
/**
* @author Olivier Lamy
String targetRepoPath = getManagedRepositoriesService( authorizationHeader ).getManagedRepository(
TARGET_REPO_ID ).getLocation();
- File artifact = new File( targetRepoPath,
+ Path artifact = Paths.get( targetRepoPath,
"/org/apache/karaf/features/org.apache.karaf.features.core/2.2.2/org.apache.karaf.features.core-2.2.2.jar" );
- assertTrue( artifact.exists() );
- File pom = new File( targetRepoPath,
+ assertTrue( Files.exists(artifact) );
+ Path pom = Paths.get( targetRepoPath,
"/org/apache/karaf/features/org.apache.karaf.features.core/2.2.2/org.apache.karaf.features.core-2.2.2.pom" );
- assertTrue( "not exists " + pom.getPath(), pom.exists() );
+ assertTrue( "not exists " + pom, Files.exists(pom) );
// TODO find a way to force metadata generation and test it !!
}
finally
import org.apache.archiva.rest.api.services.RepositoriesService;
import org.junit.Test;
-import java.io.File;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
/**
* @author Olivier Lamy
throws Exception
{
ManagedRepositoriesService service = getManagedRepositoriesService( authorizationHeader );
- File target = new File( "target" );
+ Path target = Paths.get( "target" );
- assertTrue( service.fileLocationExists( target.getCanonicalPath() ) );
+ assertTrue( service.fileLocationExists( target.toAbsolutePath().toString() ) );
// normally should not exists :-)
assertFalse( service.fileLocationExists( "/fooofofof/foddfdofd/dedede/kdeo" ) );
RepositoriesService repositoriesService = getRepositoriesService( authorizationHeader );
createAndIndexRepo( testRepoId,
- new File( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ).getAbsolutePath() );
+ Paths.get( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ).toAbsolutePath().toString() );
repositoriesService.scanRepositoryDirectoriesNow( testRepoId );
import org.apache.archiva.maven2.model.Artifact;
import org.apache.archiva.rest.api.services.MergeRepositoriesService;
import org.apache.commons.io.FileUtils;
-import static org.assertj.core.api.Assertions.assertThat;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.List;
+import static org.assertj.core.api.Assertions.assertThat;
+
/**
* @author Olivier Lamy
*/
private static final String TEST_REPOSITORY = "test-repository";
- private File repo = new File( System.getProperty( "builddir" ), "test-repository" );
+ private Path repo = Paths.get( System.getProperty( "builddir" ), "test-repository" );
- private File repoStage = new File( System.getProperty( "builddir" ), "test-repository-stage" );
+ private Path repoStage = Paths.get( System.getProperty( "builddir" ), "test-repository-stage" );
@Test
public void getMergeConflictedArtifacts()
String mergedArtifactPomPath =
"org/apache/felix/org.apache.felix.bundlerepository/1.6.4/org.apache.felix.bundlerepository-1.6.4.pom";
- assertTrue( new File( repoStage, mergedArtifactPath ).exists() );
- assertTrue( new File( repoStage, mergedArtifactPomPath ).exists() );
+ assertTrue( Files.exists(repoStage.resolve(mergedArtifactPath)) );
+ assertTrue( Files.exists(repoStage.resolve(mergedArtifactPomPath)) );
MergeRepositoriesService service = getMergeRepositoriesService( authorizationHeader );
service.mergeRepositories( TEST_REPOSITORY + "-stage", TEST_REPOSITORY, true );
- assertTrue( new File( repo, mergedArtifactPath ).exists() );
- assertTrue( new File( repo, mergedArtifactPomPath ).exists() );
+ assertTrue( Files.exists(repo.resolve(mergedArtifactPath)) );
+ assertTrue( Files.exists(repo.resolve(mergedArtifactPomPath)) );
}
@After
deleteTestRepo( TEST_REPOSITORY );
- FileUtils.deleteDirectory( repo );
- FileUtils.deleteDirectory( repoStage );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( repo );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoStage );
}
@Before
public void createStageRepo()
throws Exception
{
- FileUtils.copyDirectory( new File( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ), repo );
- FileUtils.copyDirectory( new File( System.getProperty( "basedir" ), "src/test/repo-with-osgi-stage" ),
- repoStage );
+ FileUtils.copyDirectory( Paths.get( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ).toFile(), repo.toFile() );
+ FileUtils.copyDirectory( Paths.get( System.getProperty( "basedir" ), "src/test/repo-with-osgi-stage" ).toFile(),
+ repoStage.toFile() );
- createStagedNeededRepo( TEST_REPOSITORY, repo.getAbsolutePath(), true );
+ createStagedNeededRepo( TEST_REPOSITORY, repo.toAbsolutePath().toString(), true );
}
}
import javax.ws.rs.BadRequestException;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.core.Response;
-import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
try
{
- File artifactFile = new File(
+ Path artifactFile = Paths.get(
"target/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.core/2.2.2/org.apache.karaf.features.core-2.2.2.jar" );
- assertTrue( "artifact not exists:" + artifactFile.getPath(), artifactFile.exists() );
+ assertTrue( "artifact not exists:" + artifactFile, Files.exists(artifactFile) );
Artifact artifact = new Artifact();
artifact.setGroupId( "org.apache.karaf.features" );
repositoriesService.deleteArtifact( artifact );
- assertFalse( "artifact not deleted exists:" + artifactFile.getPath(), artifactFile.exists() );
+ assertFalse( "artifact not deleted exists:" + artifactFile, Files.exists(artifactFile) );
artifacts =
browseService.getArtifactDownloadInfos( "org.apache.karaf.features", "org.apache.karaf.features.core",
try
{
- File artifactFile = new File(
+ Path artifactFile = Paths.get(
"target/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.core/2.2.2/org.apache.karaf.features.core-2.2.2.jar" );
- assertTrue( "artifact not exists:" + artifactFile.getPath(), artifactFile.exists() );
+ assertTrue( "artifact not exists:" + artifactFile.toString(), Files.exists(artifactFile) );
Artifact artifact = new Artifact();
artifact.setGroupId( "org.apache.karaf.features" );
repositoriesService.deleteArtifact( artifact );
- assertFalse( "artifact not deleted exists:" + artifactFile.getPath(), artifactFile.exists() );
+ assertFalse( "artifact not deleted exists:" + artifactFile, Files.exists(artifactFile) );
artifacts =
browseService.getArtifactDownloadInfos( "org.apache.karaf.features", "org.apache.karaf.features.core",
try
{
- File artifactFile = new File(
+ Path artifactFile = Paths.get(
"target/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar" );
- File artifactFilemd5 = new File(
+ Path artifactFilemd5 = Paths.get(
"target/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar.md5" );
- File artifactFilesha1 = new File(
+ Path artifactFilesha1 = Paths.get(
"target/test-origin-repo/commons-logging/commons-logging/1.0.1/commons-logging-1.0.1-javadoc.jar.sha1" );
- assertTrue( "artifact not exists:" + artifactFile.getPath(), artifactFile.exists() );
+ assertTrue( "artifact not exists:" + artifactFile, Files.exists(artifactFile) );
- assertTrue( "md5 not exists:" + artifactFilemd5.getPath(), artifactFilemd5.exists() );
- assertTrue( "sha1 not exists:" + artifactFilesha1.getPath(), artifactFilesha1.exists() );
+ assertTrue( "md5 not exists:" + artifactFilemd5, Files.exists(artifactFilemd5) );
+ assertTrue( "sha1 not exists:" + artifactFilesha1, Files.exists(artifactFilesha1) );
Artifact artifact = new Artifact();
artifact.setGroupId( "commons-logging" );
repositoriesService.deleteArtifact( artifact );
- assertFalse( "artifact not deleted exists:" + artifactFile.getPath(), artifactFile.exists() );
- assertFalse( "md5 still exists:" + artifactFilemd5.getPath(), artifactFilemd5.exists() );
- assertFalse( "sha1 still exists:" + artifactFilesha1.getPath(), artifactFilesha1.exists() );
+ assertFalse( "artifact not deleted exists:" + artifactFile, Files.exists(artifactFile) );
+ assertFalse( "md5 still exists:" + artifactFilemd5, Files.exists(artifactFilemd5) );
+ assertFalse( "sha1 still exists:" + artifactFilesha1, Files.exists(artifactFilesha1) );
artifacts =
browseService.getArtifactDownloadInfos( "commons-logging", "commons-logging", "1.0.1", SOURCE_REPO_ID );
new BrowseResultEntry( "org.apache.karaf.features.org.apache.karaf.features.command", true ),
new BrowseResultEntry( "org.apache.karaf.features.org.apache.karaf.features.core", true ) );
- File directory =
- new File( "target/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.command" );
+ Path directory =
+ Paths.get( "target/test-origin-repo/org/apache/karaf/features/org.apache.karaf.features.command" );
- assertTrue( "directory not exists", directory.exists() );
+ assertTrue( "directory not exists", Files.exists(directory) );
RepositoriesService repositoriesService = getRepositoriesService( authorizationHeader );
repositoriesService.deleteGroupId( "org.apache.karaf", SOURCE_REPO_ID );
- assertFalse( "directory not exists", directory.exists() );
+ assertFalse( "directory not exists", Files.exists(directory) );
browseResult = browseService.browseGroupId( "org.apache.karaf.features", SOURCE_REPO_ID );
public void deleteSnapshot()
throws Exception
{
- File targetRepo = initSnapshotRepo();
+ Path targetRepo = initSnapshotRepo();
try
{
assertThat( artifacts ).isNotNull().isNotEmpty().hasSize( 10 );
- File artifactFile = new File( targetRepo,
+ Path artifactFile = targetRepo.resolve(
"org/apache/archiva/redback/components/spring-quartz/2.0-SNAPSHOT/spring-quartz-2.0-20120618.214127-1.jar" );
- File artifactFilemd5 = new File( targetRepo,
+ Path artifactFilemd5 = targetRepo.resolve(
"org/apache/archiva/redback/components/spring-quartz/2.0-SNAPSHOT/spring-quartz-2.0-20120618.214127-1.jar.md5" );
- File artifactFilepom = new File( targetRepo,
+ Path artifactFilepom = targetRepo.resolve(
"org/apache/archiva/redback/components/spring-quartz/2.0-SNAPSHOT/spring-quartz-2.0-20120618.214127-1.pom" );
- assertThat( artifactFile ).exists();
- assertThat( artifactFilemd5 ).exists();
- assertThat( artifactFilepom ).exists();
+ assertTrue( Files.exists(artifactFile) );
+ assertTrue( Files.exists(artifactFilemd5) );
+ assertTrue( Files.exists(artifactFilepom ));
// we delete only one snapshot
Artifact artifact =
assertThat( artifacts ).isNotNull().isNotEmpty().hasSize( 8 );
- assertThat( artifactFile ).doesNotExist();
- assertThat( artifactFilemd5 ).doesNotExist();
- assertThat( artifactFilepom ).doesNotExist();
+ assertFalse( Files.exists(artifactFile) );
+ assertFalse( Files.exists(artifactFilemd5 ));
+ assertFalse( Files.exists(artifactFilepom ));
}
catch ( Exception e )
{
}
}
- protected File initSnapshotRepo()
+ protected Path initSnapshotRepo()
throws Exception
{
- File targetRepo = new File( getBasedir(), "target/repo-with-snapshots" );
- if ( targetRepo.exists() )
+ Path targetRepo = getBasedir().resolve( "target/repo-with-snapshots" );
+ if ( Files.exists(targetRepo) )
{
- FileUtils.deleteDirectory( targetRepo );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( targetRepo );
}
- assertFalse( targetRepo.exists() );
+ assertFalse( Files.exists(targetRepo) );
- FileUtils.copyDirectoryToDirectory( new File( getBasedir(), "src/test/repo-with-snapshots" ),
- targetRepo.getParentFile() );
+ FileUtils.copyDirectoryToDirectory( getBasedir().resolve( "src/test/repo-with-snapshots" ).toFile(),
+ targetRepo.getParent().toFile() );
if ( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( SNAPSHOT_REPO_ID ) != null )
{
protected ManagedRepository getTestManagedRepository( String id, String path )
{
- String location = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/" + path ).getAbsolutePath();
+ String location = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/" + path ).toAbsolutePath().toString();
return new ManagedRepository( id, id, location, "default", true, true, true, "2 * * * * ?", null, false, 80, 80,
true, false );
}
* under the License.
*/
-import static org.assertj.core.api.Assertions.*;
+import org.easymock.TestSubject;
+import org.junit.Test;
-import java.io.File;
import java.nio.file.Paths;
-import org.easymock.TestSubject;
-import org.junit.Test;
+import static org.assertj.core.api.Assertions.assertThat;
public class ArtifactBuilderTest
{