* under the License.
*/
-import org.apache.commons.io.FileUtils;
+import org.apache.archiva.common.utils.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.Charset;
import java.nio.file.Files;
+import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
*/
public class ChecksummedFile
{
+
+ private static Charset FILE_ENCODING = Charset.forName( "UTF-8" );
+
private final Logger log = LoggerFactory.getLogger( ChecksummedFile.class );
private static final Pattern METADATA_PATTERN = Pattern.compile( "maven-metadata-\\S*.xml" );
- private final File referenceFile;
+ private final Path referenceFile;
/**
* Construct a ChecksummedFile object.
*
* @param referenceFile
*/
- public ChecksummedFile( final File referenceFile )
+ public ChecksummedFile( final Path referenceFile )
{
this.referenceFile = referenceFile;
}
throws IOException
{
- try (InputStream fis = Files.newInputStream( referenceFile.toPath() ))
+ try (InputStream fis = Files.newInputStream( referenceFile ))
{
Checksum checksum = new Checksum( checksumAlgorithm );
checksum.update( fis );
* @return the checksum File that was created.
* @throws IOException if there was a problem either reading the referenceFile, or writing the checksum file.
*/
- public File createChecksum( ChecksumAlgorithm checksumAlgorithm )
+ public Path createChecksum( ChecksumAlgorithm checksumAlgorithm )
throws IOException
{
- File checksumFile = new File( referenceFile.getAbsolutePath() + "." + checksumAlgorithm.getExt() );
- Files.deleteIfExists( checksumFile.toPath() );
+ Path checksumFile = referenceFile.resolveSibling( referenceFile.getFileName() + "." + checksumAlgorithm.getExt() );
+ Files.deleteIfExists( checksumFile );
String checksum = calculateChecksum( checksumAlgorithm );
- Files.write( checksumFile.toPath(), //
- ( checksum + " " + referenceFile.getName() ).getBytes(), //
+ Files.write( checksumFile, //
+ ( checksum + " " + referenceFile.getFileName().toString() ).getBytes(), //
StandardOpenOption.CREATE_NEW );
return checksumFile;
}
* @param checksumAlgorithm the hash that we are interested in.
* @return the checksum file to return
*/
- public File getChecksumFile( ChecksumAlgorithm checksumAlgorithm )
+ public Path getChecksumFile( ChecksumAlgorithm checksumAlgorithm )
{
- return new File( referenceFile.getAbsolutePath() + "." + checksumAlgorithm.getExt() );
+ return referenceFile.resolveSibling( referenceFile.getFileName() + "." + checksumAlgorithm.getExt() );
}
/**
public boolean isValidChecksums( ChecksumAlgorithm algorithms[] )
{
- try (InputStream fis = Files.newInputStream( referenceFile.toPath() ))
+ try (InputStream fis = Files.newInputStream( referenceFile))
{
List<Checksum> checksums = new ArrayList<>( algorithms.length );
// Create checksum object for each algorithm.
for ( ChecksumAlgorithm checksumAlgorithm : algorithms )
{
- File checksumFile = getChecksumFile( checksumAlgorithm );
+ Path checksumFile = getChecksumFile( checksumAlgorithm );
// Only add algorithm if checksum file exists.
- if ( checksumFile.exists() )
+ if ( Files.exists(checksumFile) )
{
checksums.add( new Checksum( checksumAlgorithm ) );
}
for ( Checksum checksum : checksums )
{
ChecksumAlgorithm checksumAlgorithm = checksum.getAlgorithm();
- File checksumFile = getChecksumFile( checksumAlgorithm );
+ Path checksumFile = getChecksumFile( checksumAlgorithm );
- String rawChecksum = FileUtils.readFileToString( checksumFile );
- String expectedChecksum = parseChecksum( rawChecksum, checksumAlgorithm, referenceFile.getName() );
+ String rawChecksum = FileUtils.readFileToString( checksumFile , FILE_ENCODING );
+ String expectedChecksum = parseChecksum( rawChecksum, checksumAlgorithm, referenceFile.getFileName().toString() );
if ( !StringUtils.equalsIgnoreCase( expectedChecksum, checksum.getChecksum() ) )
{
return true;
}
- try (InputStream fis = Files.newInputStream( referenceFile.toPath() ))
+ try (InputStream fis = Files.newInputStream( referenceFile ))
{
// Parse file once, for all checksums.
Checksum.update( checksums, fis );
ChecksumAlgorithm checksumAlgorithm = checksum.getAlgorithm();
try
{
- File checksumFile = getChecksumFile( checksumAlgorithm );
+ Path checksumFile = getChecksumFile( checksumAlgorithm );
String actualChecksum = checksum.getChecksum();
- if ( checksumFile.exists() )
+ if ( Files.exists(checksumFile) )
{
- String rawChecksum = FileUtils.readFileToString( checksumFile );
- String expectedChecksum = parseChecksum( rawChecksum, checksumAlgorithm, referenceFile.getName() );
+ String rawChecksum = FileUtils.readFileToString( checksumFile, FILE_ENCODING);
+ String expectedChecksum = parseChecksum( rawChecksum, checksumAlgorithm, referenceFile.getFileName().toString() );
if ( !StringUtils.equalsIgnoreCase( expectedChecksum, actualChecksum ) )
{
// create checksum (again)
- FileUtils.writeStringToFile( checksumFile, actualChecksum + " " + referenceFile.getName() );
+ FileUtils.writeStringToFile( checksumFile, FILE_ENCODING, actualChecksum + " " + referenceFile.getFileName().toString());
}
}
else
{
- FileUtils.writeStringToFile( checksumFile, actualChecksum + " " + referenceFile.getName() );
+ FileUtils.writeStringToFile( checksumFile, FILE_ENCODING, actualChecksum + " " + referenceFile.getFileName().toString() );
}
}
catch ( IOException e )
*/
import junit.framework.TestCase;
-import org.apache.archiva.common.utils.FileUtil;
+import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
import org.junit.runner.RunWith;
-import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
/**
* AbstractChecksumTestCase
public abstract class AbstractChecksumTestCase
extends TestCase
{
- public File getTestOutputDir()
+ public Path getTestOutputDir()
{
- File dir = new File( FileUtil.getBasedir(), "target/test-output/" + getName() );
- if ( dir.exists() == false )
+ Path dir = Paths.get( FileUtils.getBasedir(), "target/test-output/" + getName() );
+ if ( !Files.exists(dir))
{
- if ( dir.mkdirs() == false )
+ try
{
- fail( "Unable to create test output directory: " + dir.getAbsolutePath() );
+ Files.createDirectories( dir );
+ }
+ catch ( IOException e )
+ {
+ fail( "Unable to create test output directory: " + dir.toAbsolutePath() );
}
}
return dir;
}
- public File getTestResource( String filename )
+ public Path getTestResource( String filename )
{
- File dir = new File( FileUtil.getBasedir(), "src/test/resources" );
- File file = new File( dir, filename );
- if ( file.exists() == false )
+ Path dir = Paths.get( FileUtils.getBasedir(), "src/test/resources" );
+ Path file = dir.resolve(filename );
+ if ( !Files.exists(file))
{
- fail( "Test Resource does not exist: " + file.getAbsolutePath() );
+ fail( "Test Resource does not exist: " + file.toAbsolutePath() );
}
return file;
}
* under the License.
*/
-import java.io.File;
-import java.nio.file.Paths;
-
import junit.framework.TestCase;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
+import java.nio.file.Paths;
+
/**
* ChecksumAlgorithmTest
*
* under the License.
*/
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
import junit.framework.TestCase;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
/**
* ChecksumTest
*
* under the License.
*/
-import org.apache.commons.io.FileUtils;
+import org.apache.archiva.common.utils.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.LoggerFactory;
-import java.io.File;
import java.io.IOException;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
/**
* ChecksummedFileTest
private static final String REMOTE_METADATA_MD5 = "d41d8cd98f00b204e9800998ecf8427e";
+ private static final Charset FILE_ENCODING = Charset.forName( "UTF-8" );
+
@Before
public void cleanTestDir()
}
}
- private File createTestableJar( String filename )
+ private Path createTestableJar( String filename )
throws IOException
{
- File srcFile = getTestResource( filename );
- File destFile = new File( getTestOutputDir(), srcFile.getName() );
- FileUtils.copyFile( srcFile, destFile );
+ Path srcFile = getTestResource( filename );
+ Path destFile = getTestOutputDir( ).resolve( srcFile.getFileName());
+ Files.copy( srcFile, destFile, StandardCopyOption.REPLACE_EXISTING );
return destFile;
}
- private File createTestableJar( String filename, boolean copySha1, boolean copyMd5 )
+ private Path createTestableJar( String filename, boolean copySha1, boolean copyMd5 )
throws IOException
{
- File srcFile = getTestResource( filename );
- File jarFile = new File( getTestOutputDir(), srcFile.getName() );
- FileUtils.copyFile( srcFile, jarFile );
+ Path srcFile = getTestResource( filename );
+ Path jarFile = getTestOutputDir().resolve(srcFile.getFileName() );
+ Files.copy( srcFile, jarFile, StandardCopyOption.REPLACE_EXISTING );
if ( copySha1 )
{
- File srcSha1 = new File( srcFile.getAbsolutePath() + ".sha1" );
- File sha1File = new File( jarFile.getAbsolutePath() + ".sha1" );
+ Path srcSha1 = srcFile.resolveSibling( srcFile.getFileName() + ".sha1" );
+ Path sha1File = jarFile.resolveSibling( jarFile.getFileName() + ".sha1" );
- FileUtils.copyFile( srcSha1, sha1File );
+ Files.copy( srcSha1, sha1File, StandardCopyOption.REPLACE_EXISTING );
}
if ( copyMd5 )
{
- File srcMd5 = new File( srcFile.getAbsolutePath() + ".md5" );
- File md5File = new File( jarFile.getAbsolutePath() + ".md5" );
+ Path srcMd5 = srcFile.resolveSibling( srcFile.getFileName() + ".md5" );
+ Path md5File = jarFile.resolveSibling( jarFile.getFileName() + ".md5" );
- FileUtils.copyFile( srcMd5, md5File );
+ Files.copy( srcMd5, md5File, StandardCopyOption.REPLACE_EXISTING );
}
return jarFile;
public void testCalculateChecksumMd5()
throws IOException
{
- File testfile = getTestResource( "examples/redback-authz-open.jar" );
+ Path testfile = getTestResource( "examples/redback-authz-open.jar" );
ChecksummedFile checksummedFile = new ChecksummedFile( testfile );
String expectedChecksum = "f42047fe2e177ac04d0df7aa44d408be";
String actualChecksum = checksummedFile.calculateChecksum( ChecksumAlgorithm.MD5 );
public void testCalculateChecksumSha1()
throws IOException
{
- File testfile = getTestResource( "examples/redback-authz-open.jar" );
+ Path testfile = getTestResource( "examples/redback-authz-open.jar" );
ChecksummedFile checksummedFile = new ChecksummedFile( testfile );
String expectedChecksum = "2bb14b388973351b0a4dfe11d171965f59cc61a1";
String actualChecksum = checksummedFile.calculateChecksum( ChecksumAlgorithm.SHA1 );
public void testCreateChecksum()
throws IOException
{
- File testableJar = createTestableJar( "examples/redback-authz-open.jar" );
+ Path testableJar = createTestableJar( "examples/redback-authz-open.jar" );
ChecksummedFile checksummedFile = new ChecksummedFile( testableJar );
checksummedFile.createChecksum( ChecksumAlgorithm.SHA1 );
- File hashFile = checksummedFile.getChecksumFile( ChecksumAlgorithm.SHA1 );
- assertTrue( "ChecksumAlgorithm file should exist.", hashFile.exists() );
- String hashContents = FileUtils.readFileToString( hashFile );
+ Path hashFile = checksummedFile.getChecksumFile( ChecksumAlgorithm.SHA1 );
+ assertTrue( "ChecksumAlgorithm file should exist.", Files.exists(hashFile) );
+ String hashContents = org.apache.commons.io.FileUtils.readFileToString( hashFile.toFile() );
hashContents = StringUtils.trim( hashContents );
assertEquals( "2bb14b388973351b0a4dfe11d171965f59cc61a1 redback-authz-open.jar", hashContents );
}
public void testFixChecksum()
throws IOException
{
- File jarFile = createTestableJar( "examples/redback-authz-open.jar" );
- File sha1File = new File( jarFile.getAbsolutePath() + ".sha1" );
+ Path jarFile = createTestableJar( "examples/redback-authz-open.jar" );
+ Path sha1File = jarFile.resolveSibling( jarFile.getFileName()+ ".sha1" );
// A typical scenario seen in the wild.
- FileUtils.writeStringToFile( sha1File, "sha1sum: redback-authz-open.jar: No such file or directory" );
+ org.apache.commons.io.FileUtils.writeStringToFile( sha1File.toFile(), "sha1sum: redback-authz-open.jar: No such file or directory" );
ChecksummedFile checksummedFile = new ChecksummedFile( jarFile );
assertFalse( "ChecksummedFile.isValid(SHA1) == false",
@Test
public void testGetChecksumFile()
{
- ChecksummedFile checksummedFile = new ChecksummedFile( new File( "test.jar" ) );
- assertEquals( "test.jar.sha1", checksummedFile.getChecksumFile( ChecksumAlgorithm.SHA1 ).getName() );
+ ChecksummedFile checksummedFile = new ChecksummedFile( Paths.get( "test.jar" ) );
+ assertEquals( "test.jar.sha1", checksummedFile.getChecksumFile( ChecksumAlgorithm.SHA1 ).getFileName().toString() );
}
@Test
public void testIsValidChecksum()
throws IOException
{
- File jarFile = createTestableJar( "examples/redback-authz-open.jar", true, false );
+ Path jarFile = createTestableJar( "examples/redback-authz-open.jar", true, false );
ChecksummedFile checksummedFile = new ChecksummedFile( jarFile );
assertTrue( "ChecksummedFile.isValid(SHA1)", checksummedFile.isValidChecksum( ChecksumAlgorithm.SHA1 ) );
public void testIsValidChecksumInvalidSha1Format()
throws IOException
{
- File jarFile = createTestableJar( "examples/redback-authz-open.jar" );
- File sha1File = new File( jarFile.getAbsolutePath() + ".sha1" );
+ Path jarFile = createTestableJar( "examples/redback-authz-open.jar" );
+ Path sha1File = jarFile.resolveSibling( jarFile.getFileName() + ".sha1" );
// A typical scenario seen in the wild.
- FileUtils.writeStringToFile( sha1File, "sha1sum: redback-authz-open.jar: No such file or directory" );
+ FileUtils.writeStringToFile( sha1File, FILE_ENCODING, "sha1sum: redback-authz-open.jar: No such file or directory" );
ChecksummedFile checksummedFile = new ChecksummedFile( jarFile );
assertFalse( "ChecksummedFile.isValid(SHA1)", checksummedFile.isValidChecksum( ChecksumAlgorithm.SHA1 ) );
public void testIsValidChecksumNoChecksumFiles()
throws IOException
{
- File jarFile = createTestableJar( "examples/redback-authz-open.jar", false, false );
+ Path jarFile = createTestableJar( "examples/redback-authz-open.jar", false, false );
ChecksummedFile checksummedFile = new ChecksummedFile( jarFile );
assertFalse( "ChecksummedFile.isValid(SHA1,MD5)", checksummedFile.isValidChecksums(
public void testIsValidChecksumSha1AndMd5()
throws IOException
{
- File jarFile = createTestableJar( "examples/redback-authz-open.jar", true, true );
+ Path jarFile = createTestableJar( "examples/redback-authz-open.jar", true, true );
ChecksummedFile checksummedFile = new ChecksummedFile( jarFile );
assertTrue( "ChecksummedFile.isValid(SHA1,MD5)", checksummedFile.isValidChecksums(
public void testIsValidChecksumSha1NoMd5()
throws IOException
{
- File jarFile = createTestableJar( "examples/redback-authz-open.jar", true, false );
+ Path jarFile = createTestableJar( "examples/redback-authz-open.jar", true, false );
ChecksummedFile checksummedFile = new ChecksummedFile( jarFile );
assertTrue( "ChecksummedFile.isValid(SHA1)", checksummedFile.isValidChecksums(
String expected = SERVLETAPI_SHA1
+ " /home/projects/maven/repository-staging/to-ibiblio/maven2/servletapi/servletapi/2.4/servletapi-2.4.pom";
- File testfile = getTestResource( "examples/redback-authz-open.jar" );
+ Path testfile = getTestResource( "examples/redback-authz-open.jar" );
ChecksummedFile checksummedFile = new ChecksummedFile( testfile );
String s = checksummedFile.parseChecksum( expected, ChecksumAlgorithm.SHA1,
"servletapi/servletapi/2.4/servletapi-2.4.pom" );
throws IOException
{
String expected = SERVLETAPI_SHA1 + " -";
- File testfile = getTestResource( "examples/redback-authz-open.jar" );
+ Path testfile = getTestResource( "examples/redback-authz-open.jar" );
ChecksummedFile checksummedFile = new ChecksummedFile( testfile );
String s = checksummedFile.parseChecksum( expected, ChecksumAlgorithm.SHA1,
"servletapi/servletapi/2.4/servletapi-2.4.pom" );
throws IOException
{
String expected = "SHA1(-)=" + SERVLETAPI_SHA1;
- File testfile = getTestResource( "examples/redback-authz-open.jar" );
+ Path testfile = getTestResource( "examples/redback-authz-open.jar" );
ChecksummedFile checksummedFile = new ChecksummedFile( testfile );
String s = checksummedFile.parseChecksum( expected, ChecksumAlgorithm.SHA1,
"servletapi/servletapi/2.4/servletapi-2.4.pom" );
throws IOException
{
String expected = REMOTE_METADATA_SHA1 + " /home/test/repository/examples/metadata/maven-metadata.xml";
- File testfile = getTestResource( "examples/metadata/maven-metadata-remote.xml" );
+ Path testfile = getTestResource( "examples/metadata/maven-metadata-remote.xml" );
ChecksummedFile checksummedFile = new ChecksummedFile( testfile );
try
throws IOException
{
String expected = REMOTE_METADATA_MD5 + " ./examples/metadata/maven-metadata.xml";
- File testfile = getTestResource( "examples/metadata/maven-metadata-remote.xml" );
+ Path testfile = getTestResource( "examples/metadata/maven-metadata-remote.xml" );
ChecksummedFile checksummedFile = new ChecksummedFile( testfile );
try
+++ /dev/null
-package org.apache.archiva.common.utils;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-
-public class FileUtil
-{
- public static String getBasedir()
- {
- String basedir = System.getProperty( "basedir" );
- if ( basedir == null )
- {
- basedir = new File( "" ).getAbsolutePath();
- }
-
- return basedir;
- }
-}
\ No newline at end of file
* under the License.
*/
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.IOException;
+import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
import java.util.Comparator;
import java.util.Optional;
*/
public class FileUtils
{
+ private static final Logger log = LoggerFactory.getLogger( FileUtils.class );
/**
* Deletes the directory recursively and quietly.
*
throw new IOException("Error during recursive delete of "+dir.toAbsolutePath());
}
}
+
+ public static String readFileToString( Path file, Charset encoding)
+ {
+ try
+ {
+ return new String(Files.readAllBytes( file ), encoding );
+ }
+ catch ( IOException e )
+ {
+ log.error("Could not read from file {}", file);
+ return "";
+ }
+ }
+
+ public static void writeStringToFile( Path file, Charset encoding, String value )
+ {
+ try
+ {
+ Files.write( file, value.getBytes( encoding ), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
+ }
+ catch ( IOException e )
+ {
+ log.error("Could not write to file {}", file);
+ }
+ }
+
+ /**
+ * Return the base directory
+ * @return
+ */
+ public static String getBasedir()
+ {
+ String basedir = System.getProperty( "basedir" );
+ if ( basedir == null )
+ {
+ basedir = Paths.get("").toAbsolutePath().toString();
+ }
+
+ return basedir;
+ }
}
import java.nio.file.Paths;
import java.util.List;
import javax.inject.Inject;
-import org.apache.archiva.common.utils.FileUtil;
+
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
public static Path getTestFile( String path )
{
- return Paths.get( FileUtil.getBasedir(), path );
+ return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
}
protected <T> T lookup( Class<T> clazz, String hint )
public void testGetConfigurationFromDefaultsWithDefaultRepoLocationAlreadyExisting()
throws Exception
{
- Path repo = Paths.get( FileUtil.getBasedir(), "target/test-classes/existing_snapshots" );
+ Path repo = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-classes/existing_snapshots" );
Files.createDirectories(repo);
- repo = Paths.get( FileUtil.getBasedir(), "target/test-classes/existing_internal" );
+ repo = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-classes/existing_internal" );
Files.createDirectories(repo);
String existingTestDefaultArchivaConfigFile = FileUtils.readFileToString(
getTestFile( "target/test-classes/org/apache/archiva/configuration/test-default-archiva.xml" ).toFile(), FILE_ENCODING );
existingTestDefaultArchivaConfigFile =
- StringUtils.replace( existingTestDefaultArchivaConfigFile, "${appserver.base}", FileUtil.getBasedir() );
+ StringUtils.replace( existingTestDefaultArchivaConfigFile, "${appserver.base}", org.apache.archiva.common.utils.FileUtils.getBasedir() );
- Path generatedTestDefaultArchivaConfigFile = Paths.get( FileUtil.getBasedir(),
+ Path generatedTestDefaultArchivaConfigFile = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
"target/test-classes/org/apache/archiva/configuration/default-archiva.xml" );
FileUtils.writeStringToFile( generatedTestDefaultArchivaConfigFile.toFile(), existingTestDefaultArchivaConfigFile,
* under the License.
*/
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.redback.components.registry.RegistryException;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils;
public static Path getTestFile( String path )
{
- return Paths.get( FileUtil.getBasedir(), path );
+ return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
}
protected <T> T lookup( Class<T> clazz, String hint )
if ( checksumFile.exists( ) )
{
- checksum = new ChecksummedFile( artifactFile );
+ checksum = new ChecksummedFile( artifactFile.toPath() );
try
{
if ( !checksum.isValidChecksum( checksumAlgorithm ) )
}
else if ( !checksumFile.exists( ) )
{
- checksum = new ChecksummedFile( artifactFile );
+ checksum = new ChecksummedFile( artifactFile.toPath() );
try
{
checksum.createChecksum( checksumAlgorithm );
Path md5Path = Paths.get( repoConfig.getLocation(), path + ".md5" );
- ChecksummedFile checksum = new ChecksummedFile( new File( repoConfig.getLocation(), path ) );
+ ChecksummedFile checksum = new ChecksummedFile( Paths.get(repoConfig.getLocation(), path ) );
Assertions.assertThat( sha1Path.toFile() ).exists();
Assertions.assertThat( md5Path.toFile() ).exists();
import junit.framework.TestCase;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridge;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils;
import org.apache.maven.artifact.Artifact;
public static File getTestFile( String path )
{
- return new File( FileUtil.getBasedir(), path );
+ return new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
}
private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
Artifact artifact =
createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0", "maven-plugin" );
artifact.setFile(
- new File( FileUtil.getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
+ new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
artifactConverter.convert( artifact, targetRepository );
// There is a warning but I can't figure out how to look at it. Eyeballing the results it appears
// the plugin is being coverted correctly.
import org.apache.archiva.admin.repository.managed.DefaultManagedRepositoryAdmin;
import org.apache.archiva.admin.repository.proxyconnector.DefaultProxyConnectorAdmin;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridge;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
{
super.setUp();
- FileUtils.deleteDirectory( new File( FileUtil.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ) );
- assertFalse( new File( FileUtil.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ).exists() );
+ FileUtils.deleteDirectory( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ) );
+ assertFalse( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ).exists() );
- FileUtils.deleteDirectory( new File( FileUtil.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ) );
- assertFalse( new File( FileUtil.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ).exists() );
+ FileUtils.deleteDirectory( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ) );
+ assertFalse( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ).exists() );
archivaConfigControl = EasyMock.createControl();
nexusIndexer.removeIndexingContext( indexingContext, true );
}
- FileUtils.deleteDirectory( new File( FileUtil.getBasedir(), "/target/repos/" + TEST_REPO_1 ) );
- assertFalse( new File( FileUtil.getBasedir(), "/target/repos/" + TEST_REPO_1 ).exists() );
+ FileUtils.deleteDirectory( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 ) );
+ assertFalse( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 ).exists() );
- FileUtils.deleteDirectory( new File( FileUtil.getBasedir(), "/target/repos/" + TEST_REPO_2 ) );
- assertFalse( new File( FileUtil.getBasedir(), "/target/repos/" + TEST_REPO_2 ).exists() );
+ FileUtils.deleteDirectory( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 ) );
+ assertFalse( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 ).exists() );
super.tearDown();
}
{
ManagedRepositoryConfiguration repositoryConfig = new ManagedRepositoryConfiguration();
repositoryConfig.setId( repository );
- repositoryConfig.setLocation( FileUtil.getBasedir() + "/target/repos/" + repository );
+ repositoryConfig.setLocation( org.apache.archiva.common.utils.FileUtils.getBasedir() + "/target/repos/" + repository );
File f = new File( repositoryConfig.getLocation() );
if ( !f.exists() )
{
nexusIndexer.removeIndexingContext( context, true );
}
- File indexerDirectory = new File( FileUtil.getBasedir(), "/target/repos/" + repository + "/.indexer" );
+ File indexerDirectory = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + repository + "/.indexer" );
if ( indexerDirectory.exists() )
{
assertFalse( indexerDirectory.exists() );
- File lockFile = new File( FileUtil.getBasedir(), "/target/repos/" + repository + "/.indexer/write.lock" );
+ File lockFile = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + repository + "/.indexer/write.lock" );
if ( lockFile.exists() )
{
lockFile.delete();
assertFalse( lockFile.exists() );
- File repo = new File( FileUtil.getBasedir(), "src/test/" + repository );
+ File repo = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + repository );
assertTrue( repo.exists() );
File indexDirectory =
- new File( FileUtil.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
+ new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
indexDirectory.deleteOnExit();
FileUtils.deleteDirectory( indexDirectory );
* under the License.
*/
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.indexer.util.SearchUtil;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.maven.index_shaded.lucene.index.IndexUpgrader;
throws Exception
{
List<File> files = new ArrayList<>();
- files.add( Paths.get( FileUtil.getBasedir(), "src/test", TEST_REPO_1,
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test", TEST_REPO_1,
"/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ).toFile() );
- files.add( Paths.get( FileUtil.getBasedir(), "src/test", TEST_REPO_1,
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test", TEST_REPO_1,
"/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ).toFile() );
- files.add( Paths.get( FileUtil.getBasedir(), "src/test", TEST_REPO_1,
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test", TEST_REPO_1,
"org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ).toFile() );
createIndex( TEST_REPO_1, files, scan );
throws Exception
{
List<File> files = new ArrayList<>();
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/org/apache/archiva/archiva-webapp/1.0/archiva-webapp-1.0.war" ) );
- files.add( new File( FileUtil.getBasedir(),
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(),
"src/test/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(),
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(),
"src/test/" + TEST_REPO_1 + "/com/classname-search/1.0/classname-search-1.0.jar" ) );
createIndex( TEST_REPO_1, files, scan );
{
List<File> files = new ArrayList<>();
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.pom" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0-sources.jar" ) );
createIndex( TEST_REPO_1, files, scan );
createSimpleIndex( true );
List<File> files = new ArrayList<>();
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_2
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_2
+ "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_2
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_2
+ "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
createIndex( TEST_REPO_2, files, false );
throws Exception
{
List<File> files = new ArrayList<>();
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_2
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_2
+ "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_2
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_2
+ "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
createIndex( TEST_REPO_2, files, false );
throws Exception
{
List<File> files = new ArrayList<>();
- files.add( new File( FileUtil.getBasedir(),
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(),
"src/test/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
- files.add( new File( FileUtil.getBasedir(), "src/test/" + TEST_REPO_1
+ files.add( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + TEST_REPO_1
+ "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
createIndex( TEST_REPO_1, files, true );
if ( FAIL.equals( policySetting ) )
{
- ChecksummedFile checksum = new ChecksummedFile( localFile );
+ ChecksummedFile checksum = new ChecksummedFile( localFile.toPath() );
if ( checksum.isValidChecksums( algorithms ) )
{
return;
if ( FIX.equals( policySetting ) )
{
- ChecksummedFile checksum = new ChecksummedFile( localFile );
+ ChecksummedFile checksum = new ChecksummedFile( localFile.toPath() );
if ( checksum.fixChecksums( algorithms ) )
{
log.debug( "Checksum policy set to FIX, checksum files have been updated." );
*/
import org.apache.commons.io.FileUtils;
-import org.apache.archiva.common.utils.FileUtil;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
public static File getTestFile( String path )
{
- return new File( FileUtil.getBasedir(), path );
+ return new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
}
}
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ConfigurationNames;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.model.RepositoryTask;
import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.SystemUtils;
import javax.inject.Named;
import java.io.File;
import java.io.IOException;
-import java.lang.reflect.Proxy;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.archiva.admin.model.beans.RemoteRepository;
import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
import org.apache.archiva.admin.repository.AbstractRepositoryAdmin;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ProxyConnectorConfiguration;
import org.apache.archiva.configuration.RemoteRepositoryConfiguration;
import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
-import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
RepositoryMetadataWriter.write( metadata, metadataFile );
- ChecksummedFile checksum = new ChecksummedFile( metadataFile.toFile() );
+ ChecksummedFile checksum = new ChecksummedFile( metadataFile );
checksum.fixChecksums( algorithms );
}
// Save the metadata model to disk.
RepositoryMetadataWriter.write( metadata, metadataFile );
- ChecksummedFile checksum = new ChecksummedFile( metadataFile.toFile() );
+ ChecksummedFile checksum = new ChecksummedFile( metadataFile );
checksum.fixChecksums( algorithms );
}
// Save the metadata model to disk.
RepositoryMetadataWriter.write( metadata, metadataFile );
- ChecksummedFile checksum = new ChecksummedFile( metadataFile.toFile() );
+ ChecksummedFile checksum = new ChecksummedFile( metadataFile );
checksum.fixChecksums( algorithms );
}
*/
import org.apache.commons.io.FileUtils;
-import org.apache.archiva.common.utils.FileUtil;
import java.io.File;
import java.io.IOException;
public class CopyFileEventTest
extends AbstractFileEventTest
{
- private File testDir = new File( FileUtil.getBasedir(), "target/transaction-tests/copy-file" );
+ private File testDir = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/transaction-tests/copy-file" );
private File testDest = new File( testDir, "test-file.txt" );
- private File testSource = new File( FileUtil.getBasedir(), "target/transaction-tests/test-file.txt" );
+ private File testSource = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/transaction-tests/test-file.txt" );
private File testDestChecksum;
{
super.tearDown();
- FileUtils.deleteDirectory( new File( FileUtil.getBasedir(), "target/transaction-tests" ) );
+ FileUtils.deleteDirectory( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/transaction-tests" ) );
}
@Override
import java.io.File;
import org.apache.commons.io.FileUtils;
-import org.apache.archiva.common.utils.FileUtil;
import org.junit.After;
import org.junit.Test;
public class CreateFileEventTest
extends AbstractFileEventTest
{
- private File testDir = new File( FileUtil.getBasedir(), "target/transaction-tests/create-file" );
+ private File testDir = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/transaction-tests/create-file" );
@Test
public void testCreateCommitRollback()
assertChecksumRollback( testFile );
assertFalse( "Test file parent directories has been rolledback too", testDir.exists() );
- assertTrue( "target directory still exists", new File( FileUtil.getBasedir(), "target" ).exists() );
+ assertTrue( "target directory still exists", new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target" ).exists() );
}
@Test
{
super.tearDown();
- FileUtils.deleteDirectory( new File( FileUtil.getBasedir(), "target/transaction-tests" ) );
+ FileUtils.deleteDirectory( new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/transaction-tests" ) );
}
}
import org.apache.archiva.admin.model.beans.RemoteRepository;
import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
-import org.apache.archiva.common.utils.FileUtil;
+import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.maven.index.FlatSearchRequest;
import org.apache.maven.index.FlatSearchResponse;
{
RemoteRepository remoteRepository = new RemoteRepository();
File indexDirectory =
- new File( FileUtil.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
+ new File( FileUtils.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
indexDirectory.mkdirs();
indexDirectory.deleteOnExit();
private void fixChecksums( File file )
{
- ChecksummedFile checksum = new ChecksummedFile( file );
+ ChecksummedFile checksum = new ChecksummedFile( file.toPath() );
checksum.fixChecksums( algorithms );
}
metadata.setAvailableVersions( availableVersions );
RepositoryMetadataWriter.write( metadata, metadataFile.toPath() );
- ChecksummedFile checksum = new ChecksummedFile( metadataFile );
+ ChecksummedFile checksum = new ChecksummedFile( metadataFile.toPath() );
checksum.fixChecksums( algorithms );
}
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.archiva.admin.model.beans.ManagedRepository;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.redback.rest.api.services.RedbackServiceException;
import org.apache.archiva.redback.rest.services.AbstractRestServicesTest;
import org.apache.archiva.rest.api.services.ArchivaAdministrationService;
import org.apache.commons.lang.StringUtils;
import org.apache.cxf.jaxrs.client.JAXRSClientFactory;
import org.apache.cxf.jaxrs.client.WebClient;
-import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
protected ManagedRepository getTestManagedRepository()
{
- String location = new File( FileUtil.getBasedir(), "target/test-repo" ).getAbsolutePath();
+ String location = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repo" ).getAbsolutePath();
return new ManagedRepository( "TEST", "test", location, "default", true, true, false, "2 * * * * ?", null,
false, 2, 3, true, false, "my nice repo", false );
*/
import org.apache.archiva.admin.model.beans.ManagedRepository;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.maven2.model.Artifact;
import org.apache.archiva.rest.api.model.BrowseResult;
import org.apache.archiva.rest.api.model.BrowseResultEntry;
protected ManagedRepository getTestManagedRepository( String id, String path )
{
- String location = new File( FileUtil.getBasedir(), "target/" + path ).getAbsolutePath();
+ String location = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/" + path ).getAbsolutePath();
return new ManagedRepository( id, id, location, "default", true, true, true, "2 * * * * ?", null, false, 80, 80,
true, false );
}
private void fixChecksums( File file )
{
- ChecksummedFile checksum = new ChecksummedFile( file );
+ ChecksummedFile checksum = new ChecksummedFile( file.toPath() );
checksum.fixChecksums( algorithms );
}
{
metadata.setWhenGathered( new Date() );
metadata.setFileLastModified( file.lastModified() );
- ChecksummedFile checksummedFile = new ChecksummedFile( file );
+ ChecksummedFile checksummedFile = new ChecksummedFile( file.toPath() );
try
{
metadata.setMd5( checksummedFile.calculateChecksum( ChecksumAlgorithm.MD5 ) );
*/
import junit.framework.TestCase;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
{
for ( String path : pathsToBeDeleted )
{
- File dir = new File( FileUtil.getBasedir(), path );
+ File dir = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
FileUtils.deleteDirectory( dir );
assertFalse( dir.exists() );
}
- File dest = new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-module-a" );
+ File dest = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-module-a" );
File parentPom =
- new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-parent" );
- File rootPom = new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-root" );
+ new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-parent" );
+ File rootPom = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-root" );
FileUtils.deleteDirectory( dest );
FileUtils.deleteDirectory( parentPom );
private File copyTestArtifactWithParent( String srcPath, String destPath )
throws IOException
{
- File src = new File( FileUtil.getBasedir(), srcPath );
- File dest = new File( FileUtil.getBasedir(), destPath );
+ File src = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), srcPath );
+ File dest = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), destPath );
FileUtils.copyDirectory( src, dest );
assertTrue( dest.exists() );
*/
import junit.framework.TestCase;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
{
for ( String path : pathsToBeDeleted )
{
- File dir = new File( FileUtil.getBasedir(), path );
+ File dir = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
FileUtils.deleteDirectory( dir );
assertFalse( dir.exists() );
}
- File dest = new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-module-a" );
+ File dest = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-module-a" );
File parentPom =
- new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-parent" );
- File rootPom = new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-root" );
+ new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-parent" );
+ File rootPom = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-root" );
FileUtils.deleteDirectory( dest );
FileUtils.deleteDirectory( parentPom );
private File copyTestArtifactWithParent( String srcPath, String destPath )
throws IOException
{
- File src = new File( FileUtil.getBasedir(), srcPath );
- File dest = new File( FileUtil.getBasedir(), destPath );
+ File src = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), srcPath );
+ File dest = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), destPath );
FileUtils.copyDirectory( src, dest );
assertTrue( dest.exists() );
*/
import junit.framework.TestCase;
-import org.apache.archiva.common.utils.FileUtil;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
import java.io.File;
import java.io.IOException;
import java.net.URL;
-import java.nio.file.CopyOption;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
{
for ( String path : pathsToBeDeleted )
{
- File dir = new File( FileUtil.getBasedir(), path );
+ File dir = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
FileUtils.deleteDirectory( dir );
assertFalse( dir.exists() );
}
- File dest = new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-module-a" );
+ File dest = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-module-a" );
File parentPom =
- new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-parent" );
- File rootPom = new File( FileUtil.getBasedir(), "target/test-repository/com/example/test/test-artifact-root" );
+ new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-parent" );
+ File rootPom = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/test-repository/com/example/test/test-artifact-root" );
FileUtils.deleteDirectory( dest );
FileUtils.deleteDirectory( parentPom );
private File copyTestArtifactWithParent( String srcPath, String destPath )
throws IOException
{
- File src = new File( FileUtil.getBasedir(), srcPath );
- File dest = new File( FileUtil.getBasedir(), destPath );
+ File src = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), srcPath );
+ File dest = new File( org.apache.archiva.common.utils.FileUtils.getBasedir(), destPath );
FileUtils.copyDirectory( src, dest );
assertTrue( dest.exists() );
*/
import org.apache.archiva.admin.model.beans.ManagedRepository;
-import org.apache.archiva.common.utils.FileUtil;
+import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.repository.ManagedRepositoryContent;
private ManagedRepositoryContent createManagedRepo( String layout )
throws Exception
{
- File repoRoot = new File( FileUtil.getBasedir() + "/target/test-repo" );
+ File repoRoot = new File( FileUtils.getBasedir() + "/target/test-repo" );
return createManagedRepositoryContent( "test-internal", "Internal Test Repo", repoRoot, layout );
}
// alternatively this could come straight from the storage resolver, which could populate the artifact metadata
// in the later parse call with the desired checksum and use that
String checksumSha1;
- ChecksummedFile checksummedFile = new ChecksummedFile( artifactFile );
+ ChecksummedFile checksummedFile = new ChecksummedFile( artifactFile.toPath() );
try
{
checksumSha1 = checksummedFile.calculateChecksum( ChecksumAlgorithm.SHA1 );