import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
import org.apache.archiva.metadata.repository.cassandra.model.ProjectVersionMetadataModel;
-import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import javax.inject.Inject;
import javax.inject.Named;
-
-import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
{
super.setUp();
- File directory = new File( "target/test-repositories" );
- if ( directory.exists() )
+ Path directory = Paths.get( "target/test-repositories" );
+ if ( Files.exists(directory) )
{
- FileUtils.deleteDirectory( directory );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( directory );
}
Map<String, MetadataFacetFactory> factories = createTestMetadataFacetFactories();
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.jcr.Jcr;
import org.apache.jackrabbit.oak.plugins.index.IndexUtils;
-import org.apache.jackrabbit.oak.plugins.index.lucene.ExtractedTextCache;
-import org.apache.jackrabbit.oak.plugins.index.lucene.IndexCopier;
-import org.apache.jackrabbit.oak.plugins.index.lucene.IndexTracker;
-import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider;
-import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProvider;
+import org.apache.jackrabbit.oak.plugins.index.lucene.*;
import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.DocumentQueue;
import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.LocalIndexObserver;
import org.apache.jackrabbit.oak.plugins.index.lucene.hybrid.NRTIndexFactory;
import javax.annotation.Nonnull;
import javax.jcr.Repository;
-import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
StatisticsProvider statsProvider = StatisticsProvider.NOOP;
int queueSize = Integer.getInteger( "queueSize", 10000 );
- File indexDir = Files.createTempDirectory( "archiva_index" ).toFile();
+ Path indexDir = Files.createTempDirectory( "archiva_index" );
log.info( "Queue Index {}", indexDir.toString() );
- IndexCopier indexCopier = new IndexCopier( executorService, indexDir, true );
+ IndexCopier indexCopier = new IndexCopier( executorService, indexDir.toFile(), true );
NRTIndexFactory nrtIndexFactory = new NRTIndexFactory( indexCopier, statsProvider );
MountInfoProvider mountInfoProvider = Mounts.defaultMountInfoProvider();
IndexTracker tracker =
import org.apache.archiva.metadata.model.MetadataFacetFactory;
import org.apache.archiva.metadata.repository.AbstractMetadataRepositoryTest;
-import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.junit.After;
import org.junit.Before;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
-import java.io.File;
import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Map;
public class JcrMetadataRepositoryTest
@BeforeClass
public static void setupSpec() throws IOException, InvalidFileStoreVersionException
{
- File directory = new File( "target/test-repositories" );
- if ( directory.exists() )
+ Path directory = Paths.get( "target/test-repositories" );
+ if (Files.exists(directory) )
{
- FileUtils.deleteDirectory( directory );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( directory );
}
RepositoryFactory factory = new RepositoryFactory();
- factory.setRepositoryPath( directory.getPath() );
+ factory.setRepositoryPath( directory.toString());
jcrRepository = factory.createRepository();
}
import org.apache.archiva.metadata.repository.jcr.RepositoryFactory;
import org.apache.archiva.metadata.repository.stats.model.DefaultRepositoryStatistics;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.commons.JcrUtils;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.junit.After;
import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
-import javax.jcr.ImportUUIDBehavior;
-import javax.jcr.Node;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
+import javax.jcr.*;
import javax.jcr.nodetype.NodeTypeManager;
import javax.jcr.nodetype.NodeTypeTemplate;
-import java.io.File;
import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
public static void setupSpec()
throws IOException, InvalidFileStoreVersionException
{
- File directory = new File( "target/test-repositories" );
- if ( directory.exists() )
+ Path directory = Paths.get( "target/test-repositories" );
+ if ( Files.exists(directory) )
{
- FileUtils.deleteDirectory( directory );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( directory );
}
RepositoryFactory factory = new RepositoryFactory();
- factory.setRepositoryPath( directory.getPath() );
+ factory.setRepositoryPath( directory.toString() );
factory.setStoreType( RepositoryFactory.StoreType.IN_MEMORY_TYPE );
jcrRepository = factory.createRepository();
}
import org.apache.archiva.consumers.ConsumerException;
import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.model.facets.RepositoryProblemFacet;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
import org.apache.archiva.redback.components.registry.Registry;
import org.apache.archiva.redback.components.registry.RegistryListener;
-import org.apache.archiva.metadata.model.facets.RepositoryProblemFacet;
import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.inject.Named;
-import java.io.File;
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
-import java.util.List;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.*;
/**
* Search the artifact repository of known SHA1 Checksums for potential duplicate artifacts.
private List<String> includes = new ArrayList<>();
- private File repositoryDir;
+ private Path repositoryDir;
private String repoId;
throws ConsumerException
{
repoId = repo.getId();
- this.repositoryDir = new File( repo.getLocation() );
+ this.repositoryDir = Paths.get( repo.getLocation() );
repositorySession = repositorySessionFactory.createSession();
}
public void processFile( String path )
throws ConsumerException
{
- File artifactFile = new File( this.repositoryDir, path );
+ Path artifactFile = this.repositoryDir.resolve( path );
// TODO: would be quicker to somehow make sure it ran after the update database consumer, or as a part of that
// perhaps could use an artifact context that is retained for all consumers? First in can set the SHA-1
// alternatively this could come straight from the storage resolver, which could populate the artifact metadata
// in the later parse call with the desired checksum and use that
String checksumSha1;
- ChecksummedFile checksummedFile = new ChecksummedFile( artifactFile.toPath() );
+ ChecksummedFile checksummedFile = new ChecksummedFile( artifactFile);
try
{
checksumSha1 = checksummedFile.calculateChecksum( ChecksumAlgorithm.SHA1 );
import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
-import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
public void testMerge()
throws Exception
{
- String targetRepoPath = "target" + File.separatorChar + "test-repository-target";
- File mergedArtifact = new File( targetRepoPath,
+ String targetRepoPath = "target/test-repository-target";
+ Path mergedArtifact = Paths.get( targetRepoPath,
"com/example/test/test-artifact/1.0-SNAPSHOT/test-artifact-1.0-20100308.230825-1.jar" );
- File mavenMetadata = new File( targetRepoPath, "com/example/test/test-artifact/maven-metadata.xml" );
+ Path mavenMetadata = Paths.get( targetRepoPath, "com/example/test/test-artifact/maven-metadata.xml" );
- File pom = new File( targetRepoPath,
+ Path pom = Paths.get( targetRepoPath,
"com/example/test/test-artifact/1.0-SNAPSHOT/test-artifact-1.0-20100308.230825-1.pom" );
- assertFalse( mergedArtifact.exists() );
- assertFalse( mavenMetadata.exists() );
- assertFalse( pom.exists() );
+ assertFalse( Files.exists(mergedArtifact) );
+ assertFalse( Files.exists(mavenMetadata) );
+ assertFalse( Files.exists(pom) );
Configuration c = new Configuration();
ManagedRepositoryConfiguration testRepo = new ManagedRepositoryConfiguration();
testRepo.setId( TEST_REPO_ID );
- testRepo.setLocation( "target" + File.separatorChar + "test-repository" );
+ testRepo.setLocation( "target/test-repository" );
RepositoryScanningConfiguration repoScanConfig = new RepositoryScanningConfiguration();
List<String> knownContentConsumers = new ArrayList<>();
when( metadataRepository.getArtifacts( TEST_REPO_ID ) ).thenReturn( getArtifacts() );
repositoryMerger.merge( metadataRepository, TEST_REPO_ID, "target-rep" );
verify( metadataRepository ).getArtifacts( TEST_REPO_ID );
- assertTrue( mergedArtifact.exists() );
- assertTrue( mavenMetadata.exists() );
- assertTrue( pom.exists() );
+ assertTrue( Files.exists(mergedArtifact) );
+ assertTrue( Files.exists(mavenMetadata) );
+ assertTrue( Files.exists(pom) );
}
@Test
Configuration c = new Configuration();
ManagedRepositoryConfiguration testRepo = new ManagedRepositoryConfiguration();
testRepo.setId( TEST_REPO_ID );
- testRepo.setLocation( "target" + File.separatorChar + "test-repository" );
+ testRepo.setLocation( "target/test-repository" );
- String sourceRepo = "src" + File.separatorChar + "test" + File.separatorChar + "resources" + File.separatorChar
- + "test-repository-with-conflict-artifacts";
+ String sourceRepo = "src/test/resources/test-repository-with-conflict-artifacts";
ManagedRepositoryConfiguration testRepoWithConflicts = new ManagedRepositoryConfiguration();
testRepoWithConflicts.setId( sourceRepoId );
testRepoWithConflicts.setLocation( sourceRepo );
c.addManagedRepository( testRepoWithConflicts );
configuration.save( c );
- File targetRepoFile = new File(
+ Path targetRepoFile = Paths.get(
"/target/test-repository/com/example/test/test-artifact/1.0-SNAPSHOT/test-artifact-1.0-20100308.230825-1.jar" );
- targetRepoFile.setReadOnly();
+ targetRepoFile.toFile().setReadOnly();
when( metadataRepository.getArtifacts( sourceRepoId ) ).thenReturn( sourceRepoArtifactsList );
when( metadataRepository.getArtifacts( TEST_REPO_ID ) ).thenReturn( targetRepoArtifactsList );