Path artifactFile = managedRepository.resolve(path);
ArtifactIndexingTask task =
- new ArtifactIndexingTask( repository, artifactFile.toFile(), ArtifactIndexingTask.Action.ADD, getIndexingContext() );
+ new ArtifactIndexingTask( repository, artifactFile, ArtifactIndexingTask.Action.ADD, getIndexingContext() );
try
{
log.debug( "Queueing indexing task '{}' to add or update the artifact in the index.", task );
// specify in indexing task that this is not a repo scan request!
ArtifactIndexingTask task =
- new ArtifactIndexingTask( repository, artifactFile.toFile(), ArtifactIndexingTask.Action.ADD,
+ new ArtifactIndexingTask( repository, artifactFile, ArtifactIndexingTask.Action.ADD,
getIndexingContext(), false );
// only update index we don't need to scan the full repo here
task.setOnlyUpdate( true );
switch ( task.getAction() )
{
case ADD:
- indexed.add( task.getResourceFile().toPath() );
+ indexed.add( task.getResourceFile() );
break;
case DELETE:
indexed.remove( task.getResourceFile() );
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
- task.setResourceFile( localFile.toFile() );
+ task.setResourceFile( localFile );
task.setUpdateRelatedArtifacts( true );
task.setScanAll( true );
import org.springframework.stereotype.Service;
import javax.inject.Inject;
-import java.io.File;
import java.io.IOException;
+import java.nio.file.Path;
/**
* ArchivaIndexingTaskExecutor Executes all indexing tasks. Adding, updating and removing artifacts from the index are
log.debug( "Creating indexing context on resource: {}", //
( indexingTask.getResourceFile() == null
? "none"
- : indexingTask.getResourceFile().getPath() ) );
+ : indexingTask.getResourceFile() ) );
context = managedRepositoryAdmin.createIndexContext( repository );
}
catch ( RepositoryAdminException e )
try
{
- File artifactFile = indexingTask.getResourceFile();
+ Path artifactFile = indexingTask.getResourceFile();
if ( artifactFile == null )
{
log.debug( "no artifact pass in indexing task so skip it" );
}
else
{
- ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile );
+ ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile() );
if ( ac != null )
{
// MRM-1779 pom must be indexed too
// TODO make that configurable?
- if ( artifactFile.getPath().endsWith( ".pom" ) )
+ if ( artifactFile.getFileName().toString().endsWith( ".pom" ) )
{
ac.getArtifactInfo().setFileExtension( "pom" );
ac.getArtifactInfo().setPackaging( "pom" );
if ( !indexingTask.isExecuteOnEntireRepo() )
{
log.debug( "Finishing indexing task on resource file : {}", indexingTask.getResourceFile() != null
- ? indexingTask.getResourceFile().getPath()
+ ? indexingTask.getResourceFile()
: " none " );
finishIndexingTask( indexingTask, repository, context );
}
import org.apache.archiva.redback.components.taskqueue.Task;
import org.apache.maven.index.context.IndexingContext;
-import java.io.File;
+import java.nio.file.Path;
+
public class ArtifactIndexingTask
implements Task
private final ManagedRepository repository;
- private final File resourceFile;
+ private final Path resourceFile;
private final Action action;
*/
private boolean onlyUpdate = false;
- public ArtifactIndexingTask( ManagedRepository repository, File resourceFile, Action action,
+ public ArtifactIndexingTask( ManagedRepository repository, Path resourceFile, Action action,
IndexingContext context )
{
this.repository = repository;
this.context = context;
}
- public ArtifactIndexingTask( ManagedRepository repository, File resourceFile, Action action,
+ public ArtifactIndexingTask( ManagedRepository repository, Path resourceFile, Action action,
IndexingContext context, boolean executeOnEntireRepo )
{
this( repository, resourceFile, action, context );
this.executeOnEntireRepo = executeOnEntireRepo;
}
- public ArtifactIndexingTask( ManagedRepository repository, File resourceFile, Action action,
+ public ArtifactIndexingTask( ManagedRepository repository, Path resourceFile, Action action,
IndexingContext context, boolean executeOnEntireRepo, boolean onlyUpdate )
{
this( repository, resourceFile, action, context, executeOnEntireRepo );
return 0;
}
- public File getResourceFile()
+ public Path getResourceFile()
{
return resourceFile;
}
import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
import org.apache.archiva.proxy.common.WagonFactory;
import org.apache.archiva.proxy.common.WagonFactoryRequest;
-import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.time.StopWatch;
import org.apache.maven.index.context.IndexingContext;
import org.apache.maven.index.updater.IndexUpdateRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
}
this.runningRemoteDownloadIds.add( this.remoteRepository.getId() );
}
- File tempIndexDirectory = null;
+ Path tempIndexDirectory = null;
StopWatch stopWatch = new StopWatch();
stopWatch.start();
try
IndexingContext indexingContext = remoteRepositoryAdmin.createIndexContext( this.remoteRepository );
// create a temp directory to download files
- tempIndexDirectory = new File( indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex" );
- File indexCacheDirectory = new File( indexingContext.getIndexDirectoryFile().getParent(), ".indexCache" );
- indexCacheDirectory.mkdirs();
- if ( tempIndexDirectory.exists() )
+ tempIndexDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex" );
+ Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile().getParent(), ".indexCache" );
+ Files.createDirectories( indexCacheDirectory );
+ if ( Files.exists(tempIndexDirectory) )
{
- FileUtils.deleteDirectory( tempIndexDirectory );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( tempIndexDirectory );
}
- tempIndexDirectory.mkdirs();
- tempIndexDirectory.deleteOnExit();
+ Files.createDirectories( tempIndexDirectory );
+ tempIndexDirectory.toFile().deleteOnExit();
String baseIndexUrl = indexingContext.getIndexUpdateUrl();
String wagonProtocol = new URL( this.remoteRepository.getUrl() ).getProtocol();
wagon.connect( new Repository( this.remoteRepository.getId(), baseIndexUrl ), authenticationInfo,
proxyInfo );
- File indexDirectory = indexingContext.getIndexDirectoryFile();
- if ( !indexDirectory.exists() )
+ Path indexDirectory = indexingContext.getIndexDirectoryFile().toPath();
+ if ( !Files.exists(indexDirectory) )
{
- indexDirectory.mkdirs();
+ Files.createDirectories( indexDirectory );
}
ResourceFetcher resourceFetcher =
new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
request.setForceFullUpdate( this.fullDownload );
- request.setLocalIndexCacheDir( indexCacheDirectory );
+ request.setLocalIndexCacheDir( indexCacheDirectory.toFile() );
this.indexUpdater.fetchAndUpdateIndex( request );
stopWatch.stop();
log.info( "end download remote index for remote repository {}", this.remoteRepository.getId() );
}
- private void deleteDirectoryQuiet( File f )
+ private void deleteDirectoryQuiet( Path f )
{
try
{
- FileUtils.deleteDirectory( f );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( f );
}
catch ( IOException e )
{
Logger log;
- File tempIndexDirectory;
+ Path tempIndexDirectory;
Wagon wagon;
RemoteRepository remoteRepository;
- private WagonResourceFetcher( Logger log, File tempIndexDirectory, Wagon wagon,
+ private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
RemoteRepository remoteRepository )
{
this.log = log;
try
{
log.info( "index update retrieve file, name:{}", name );
- File file = new File( tempIndexDirectory, name );
- Files.deleteIfExists( file.toPath() );
- file.deleteOnExit();
- wagon.get( addParameters( name, this.remoteRepository ), file );
- return Files.newInputStream( file.toPath() );
+ Path file = tempIndexDirectory.resolve( name );
+ Files.deleteIfExists( file );
+ file.toFile().deleteOnExit();
+ wagon.get( addParameters( name, this.remoteRepository ), file.toFile() );
+ return Files.newInputStream( file );
}
catch ( AuthorizationException | TransferFailedException e )
{
import org.apache.maven.index.context.IndexingContext;
import org.apache.maven.index.expr.SourcedSearchExpression;
import org.apache.maven.index.expr.StringSearchExpression;
+import org.apache.maven.index.updater.DefaultIndexUpdater;
+import org.apache.maven.index.updater.IndexUpdateRequest;
+import org.apache.maven.index.updater.IndexUpdater;
import org.apache.maven.index_shaded.lucene.search.BooleanClause;
import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
import org.apache.maven.index_shaded.lucene.search.IndexSearcher;
import org.apache.maven.index_shaded.lucene.search.TopDocs;
-import org.apache.maven.index.updater.DefaultIndexUpdater;
-import org.apache.maven.index.updater.IndexUpdateRequest;
-import org.apache.maven.index.updater.IndexUpdater;
import org.assertj.core.api.Assertions;
import org.junit.After;
import org.junit.Before;
import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
-import java.io.File;
-import java.io.FilenameFilter;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.List;
import java.util.Set;
repositoryConfig = new ManagedRepository();
repositoryConfig.setId( "test-repo" );
repositoryConfig.setLocation(
- new File( System.getProperty( "basedir" ), "target/test-classes/test-repo" ).getAbsolutePath() );
+ Paths.get( System.getProperty( "basedir" ), "target/test-classes/test-repo" ).toAbsolutePath().toString() );
repositoryConfig.setLayout( "default" );
repositoryConfig.setName( "Test Repository" );
repositoryConfig.setScanned( true );
public void testAddArtifactToIndex()
throws Exception
{
- File artifactFile = new File( repositoryConfig.getLocation(),
+ Path artifactFile = Paths.get( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
ArtifactIndexingTask task =
{
IndexingContext context = indexer.addIndexingContext( repositoryConfig.getId(), //
repositoryConfig.getId(), //
- new File( repositoryConfig.getLocation() ), //
- new File( repositoryConfig.getLocation(), ".indexer" )
+ Paths.get( repositoryConfig.getLocation() ).toFile(), //
+ Paths.get( repositoryConfig.getLocation(), ".indexer" ).toFile()
//
, null, null, indexCreators );
context.setSearchable( true );
FlatSearchRequest request = new FlatSearchRequest( q );
FlatSearchResponse response = indexer.searchFlat( request );
- assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
- assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+ assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
+ assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
assertEquals( 1, response.getTotalHits() );
Set<ArtifactInfo> results = response.getResults();
public void testUpdateArtifactInIndex()
throws Exception
{
- File artifactFile = new File( repositoryConfig.getLocation(),
+ Path artifactFile = Paths.get( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
ArtifactIndexingTask task =
//searcher.close();
ctx.releaseIndexSearcher( searcher );
- assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
- assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+ assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
+ assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
// should only return 1 hit!
assertEquals( 1, topDocs.totalHits );
public void testRemoveArtifactFromIndex()
throws Exception
{
- File artifactFile = new File( repositoryConfig.getLocation(),
+ Path artifactFile = Paths.get( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
ArtifactIndexingTask task =
FlatSearchResponse response = indexer.searchFlat( flatSearchRequest );
- assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
- assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+ assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
+ assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
// should return 1 hit
assertEquals( 1, response.getTotalHitsCount() );
new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ),
BooleanClause.Occur.SHOULD );
- assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
- assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
+ assertTrue( Files.exists(Paths.get( repositoryConfig.getLocation(), ".indexer" )) );
+ assertFalse( Files.exists(Paths.get( repositoryConfig.getLocation(), ".index" )) );
flatSearchRequest = new FlatSearchRequest( q, getIndexingContext() );
throws Exception
{
- File indexerDirectory = new File( repositoryConfig.getLocation(), ".indexer" );
+ Path indexerDirectory =Paths.get( repositoryConfig.getLocation(), ".indexer" );
- indexerDirectory.listFiles( new FilenameFilter()
- {
- @Override
- public boolean accept( File file, String s )
+ Files.list(indexerDirectory).filter( path -> path.getFileName().toString().startsWith("nexus-maven-repository-index") )
+ .forEach( path ->
{
- if ( s.startsWith( "nexus-maven-repository-index" ) )
+ try
{
- new File( file, s ).delete();
+ Files.delete( path );
}
- return false;
- }
- } );
+ catch ( IOException e )
+ {
+ e.printStackTrace( );
+ }
+ } );
+
- File artifactFile = new File( repositoryConfig.getLocation(),
+ Path artifactFile = Paths.get( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
ArtifactIndexingTask task =
indexingExecutor.executeTask( task );
- assertTrue( indexerDirectory.exists() );
+ assertTrue( Files.exists(indexerDirectory) );
// test packed index file creation
//no more zip
//Assertions.assertThat(new File( indexerDirectory, "nexus-maven-repository-index.zip" )).exists();
- Assertions.assertThat( new File( indexerDirectory, "nexus-maven-repository-index.properties" ) ).exists();
- Assertions.assertThat( new File( indexerDirectory, "nexus-maven-repository-index.gz" ) ).exists();
+ Assertions.assertThat( Files.exists(indexerDirectory.resolve("nexus-maven-repository-index.properties" ) ));
+ Assertions.assertThat( Files.exists(indexerDirectory.resolve("nexus-maven-repository-index.gz" ) ));
// unpack .zip index
- File destDir = new File( repositoryConfig.getLocation(), ".indexer/tmp" );
+ Path destDir = Paths.get( repositoryConfig.getLocation(), ".indexer/tmp" );
//unzipIndex( indexerDirectory.getPath(), destDir.getPath() );
- DefaultIndexUpdater.FileFetcher fetcher = new DefaultIndexUpdater.FileFetcher( indexerDirectory );
+ DefaultIndexUpdater.FileFetcher fetcher = new DefaultIndexUpdater.FileFetcher( indexerDirectory.toFile() );
IndexUpdateRequest updateRequest = new IndexUpdateRequest( getIndexingContext(), fetcher );
//updateRequest.setLocalIndexCacheDir( indexerDirectory );
indexUpdater.fetchAndUpdateIndex( updateRequest );
import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
-import java.io.File;
import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
server = new Server( );
serverConnector = new ServerConnector( server, new HttpConnectionFactory());
server.addConnector( serverConnector );
- createContext( server, new File( "src/test/" ) );
+ createContext( server, Paths.get( "src/test/" ) );
this.server.start();
this.port = serverConnector.getLocalPort();
log.info( "start server on port {}", this.port );
}
- protected void createContext( Server server, File repositoryDirectory )
+ protected void createContext( Server server, Path repositoryDirectory )
throws IOException
{
ServletContextHandler context = new ServletContextHandler();
- context.setResourceBase( repositoryDirectory.getAbsolutePath() );
+ context.setResourceBase( repositoryDirectory.toAbsolutePath().toString() );
context.setContextPath( "/" );
ServletHolder sh = new ServletHolder( DefaultServlet.class );
context.addServlet( sh, "/" );
}
- protected RemoteRepository getRemoteRepository()
+ protected RemoteRepository getRemoteRepository() throws IOException
{
RemoteRepository remoteRepository = new RemoteRepository();
- File indexDirectory =
- new File( FileUtils.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
- indexDirectory.mkdirs();
- indexDirectory.deleteOnExit();
+ Path indexDirectory =
+ Paths.get( FileUtils.getBasedir(), "target/index/test-" + Long.toString( System.currentTimeMillis() ) );
+ Files.createDirectories( indexDirectory );
+ indexDirectory.toFile().deleteOnExit();
remoteRepository.setName( "foo" );
- remoteRepository.setIndexDirectory( indexDirectory.getAbsolutePath() );
+ remoteRepository.setIndexDirectory( indexDirectory.toAbsolutePath().toString() );
remoteRepository.setDownloadRemoteIndex( true );
remoteRepository.setId( "test-repo" );
remoteRepository.setUrl( "http://localhost:" + port );
import org.apache.archiva.redback.components.taskqueue.Task;
-import java.io.File;
+import java.nio.file.Path;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one
{
private String repositoryId;
- private File resourceFile;
+ private Path resourceFile;
private boolean updateRelatedArtifacts;
return 0;
}
- public File getResourceFile()
+ public Path getResourceFile()
{
return resourceFile;
}
- public void setResourceFile( File resourceFile )
+ public void setResourceFile( Path resourceFile )
{
this.resourceFile = resourceFile;
}
if ( task.getResourceFile() != null )
{
log.debug( "Executing task from queue with job name: {}", task );
- consumers.executeConsumers( arepo, task.getResourceFile().toPath(), task.isUpdateRelatedArtifacts() );
+ consumers.executeConsumers( arepo, task.getResourceFile(), task.isUpdateRelatedArtifacts() );
}
else
{
import javax.inject.Inject;
import javax.inject.Named;
-import java.io.File;
+import java.io.IOException;
+import java.nio.file.FileSystems;
+import java.nio.file.FileVisitOption;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.PathMatcher;
+import java.nio.file.Paths;
+import java.nio.file.attribute.FileTime;
import java.util.Calendar;
+import java.util.Comparator;
+import java.util.stream.Stream;
import static org.mockito.Mockito.mock;
@Named( value = "repositorySessionFactory#mock" )
private MockRepositorySessionFactory factory;
- protected File repoDir;
+ protected Path repoDir;
protected static final String TEST_REPO_ID = "testRepo";
{
super.setUp();
- File sourceRepoDir = new File( "./src/test/repositories/default-repository" );
- repoDir = new File( "./target/default-repository" );
+ Path sourceRepoDir = Paths.get( "src/test/repositories/default-repository" );
+ repoDir = Paths.get( "target/default-repository" );
- FileUtils.deleteDirectory( repoDir );
- assertFalse( "Default Test Repository should not exist.", repoDir.exists() );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoDir );
+ assertFalse( "Default Test Repository should not exist.", Files.exists(repoDir) );
- repoDir.mkdir();
+ Files.createDirectories(repoDir);
- FileUtils.copyDirectoryStructure( sourceRepoDir, repoDir );
+ FileUtils.copyDirectoryStructure( sourceRepoDir.toFile(), repoDir.toFile() );
// set the timestamps to a time well in the past
Calendar cal = Calendar.getInstance();
cal.add( Calendar.YEAR, -1 );
- FileUtils.getFiles( repoDir, "**", null ) //
- .stream().forEach( file -> file.setLastModified( cal.getTimeInMillis() ) );
-
- // TODO: test they are excluded instead
- for ( String dir : FileUtils.getDirectoryNames( repoDir, "**/.svn", null, false ) )
- {
- FileUtils.deleteDirectory( new File( repoDir, dir ) );
+ try(Stream<Path> stream = Files.walk( repoDir,FileVisitOption.FOLLOW_LINKS)) {
+ stream.forEach( path ->
+ {
+ try
+ {
+ Files.setLastModifiedTime( path, FileTime.fromMillis( cal.getTimeInMillis( ) ) );
+ }
+ catch ( IOException e )
+ {
+ e.printStackTrace( );
+ }
+ } );
}
+ PathMatcher m = FileSystems.getDefault().getPathMatcher( "glob:**/.svn" );
+ Files.walk(repoDir, FileVisitOption.FOLLOW_LINKS).filter(Files::isDirectory)
+ .sorted( Comparator.reverseOrder( ))
+ .filter( path -> m.matches( path ) )
+ .forEach( path ->
+ org.apache.archiva.common.utils.FileUtils.deleteQuietly( path )
+ );
- assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
+ assertTrue( "Default Test Repository should exist.", Files.exists(repoDir) && Files.isDirectory( repoDir) );
assertNotNull( archivaConfig );
ManagedRepositoryConfiguration repositoryConfiguration = new ManagedRepositoryConfiguration();
repositoryConfiguration.setId( TEST_REPO_ID );
repositoryConfiguration.setName( "Test Repository" );
- repositoryConfiguration.setLocation( repoDir.getAbsolutePath() );
+ repositoryConfiguration.setLocation( repoDir.toAbsolutePath().toString() );
archivaConfig.getConfiguration().getManagedRepositories().clear();
archivaConfig.getConfiguration().addManagedRepository( repositoryConfiguration );
public void tearDown()
throws Exception
{
- FileUtils.deleteDirectory( repoDir );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( repoDir );
- assertFalse( repoDir.exists() );
+ assertFalse( Files.exists(repoDir) );
super.tearDown();
}
import org.junit.Test;
import org.springframework.test.context.ContextConfiguration;
-import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.FileTime;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
// assertEquals( 5, newStats.getTotalProjectCount() );
// assertEquals( 14159, newStats.getTotalArtifactFileSize() );
- File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
- assertFalse( "newArtifactGroup should not exist.", newArtifactGroup.exists() );
+ Path newArtifactGroup = repoDir.resolve( "org/apache/archiva" );
+ assertFalse( "newArtifactGroup should not exist.", Files.exists(newArtifactGroup) );
- FileUtils.copyDirectoryStructure( new File( "target/test-classes/test-repo/org/apache/archiva" ),
- newArtifactGroup );
+ FileUtils.copyDirectoryStructure( Paths.get( "target/test-classes/test-repo/org/apache/archiva" ).toFile(),
+ newArtifactGroup.toFile() );
// update last modified date
- new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
- Calendar.getInstance().getTimeInMillis() + 1000 );
- new File( newArtifactGroup,
- "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
- Calendar.getInstance().getTimeInMillis() + 1000 );
+ Files.setLastModifiedTime(newArtifactGroup.resolve( "archiva-index-methods-jar-test/1.0/pom.xml" ), FileTime.fromMillis(
+ Calendar.getInstance().getTimeInMillis() + 1000 ));
+ Files.setLastModifiedTime( newArtifactGroup.resolve(
+ "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ), FileTime.fromMillis(
+ Calendar.getInstance().getTimeInMillis() + 1000 ));
- assertTrue( newArtifactGroup.exists() );
+ assertTrue( Files.exists(newArtifactGroup) );
taskExecutor.executeTask( repoTask );
createAndSaveTestStats();
- File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
- assertFalse( "newArtifactGroup should not exist.", newArtifactGroup.exists() );
+ Path newArtifactGroup = repoDir.resolve( "org/apache/archiva" );
+ assertFalse( "newArtifactGroup should not exist.", Files.exists(newArtifactGroup) );
- FileUtils.copyDirectoryStructure( new File( "target/test-classes/test-repo/org/apache/archiva" ),
- newArtifactGroup );
+ FileUtils.copyDirectoryStructure( Paths.get( "target/test-classes/test-repo/org/apache/archiva" ).toFile(),
+ newArtifactGroup.toFile() );
// update last modified date, placing shortly after last scan
- new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
- Calendar.getInstance().getTimeInMillis() + 1000 );
- new File( newArtifactGroup,
- "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
- Calendar.getInstance().getTimeInMillis() + 1000 );
+ Files.setLastModifiedTime(newArtifactGroup.resolve("archiva-index-methods-jar-test/1.0/pom.xml" ), FileTime.fromMillis(
+ Calendar.getInstance().getTimeInMillis() + 1000 ));
+ Files.setLastModifiedTime( newArtifactGroup.resolve(
+ "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ), FileTime.fromMillis(
+ Calendar.getInstance().getTimeInMillis() + 1000 ));
- assertTrue( newArtifactGroup.exists() );
+ assertTrue( Files.exists(newArtifactGroup) );
// scan using the really long previous duration
taskExecutor.executeTask( repoTask );
createAndSaveTestStats();
- File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
- assertFalse( "newArtifactGroup should not exist.", newArtifactGroup.exists() );
+ Path newArtifactGroup = repoDir.resolve( "org/apache/archiva" );
+ assertFalse( "newArtifactGroup should not exist.", Files.exists(newArtifactGroup) );
- FileUtils.copyDirectoryStructure( new File( "target/test-classes/test-repo/org/apache/archiva" ),
- newArtifactGroup );
+ FileUtils.copyDirectoryStructure( Paths.get( "target/test-classes/test-repo/org/apache/archiva" ).toFile(),
+ newArtifactGroup.toFile() );
// update last modified date, placing in middle of last scan
- new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
- Calendar.getInstance().getTimeInMillis() - 50000 );
- new File( newArtifactGroup,
- "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
- Calendar.getInstance().getTimeInMillis() - 50000 );
+ Files.setLastModifiedTime( newArtifactGroup.resolve("archiva-index-methods-jar-test/1.0/pom.xml" ), FileTime.fromMillis(
+ Calendar.getInstance().getTimeInMillis() - 50000 ));
+ Files.setLastModifiedTime( newArtifactGroup.resolve(
+ "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ), FileTime.fromMillis(
+ Calendar.getInstance().getTimeInMillis() - 50000 ));
- assertTrue( newArtifactGroup.exists() );
+ assertTrue( Files.exists(newArtifactGroup) );
// scan using the really long previous duration
taskExecutor.executeTask( repoTask );
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
- task.setResourceFile( localFile );
+ task.setResourceFile( localFile.toPath() );
task.setUpdateRelatedArtifacts( true );
//task.setScanAll( true );
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
- task.setResourceFile( localFile );
+ task.setResourceFile( localFile.toPath() );
task.setUpdateRelatedArtifacts( true );
task.setScanAll( false );
{
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repository.getId() );
- task.setResourceFile( localFile );
+ task.setResourceFile( localFile.toPath() );
task.setUpdateRelatedArtifacts( false );
task.setScanAll( false );