}
catch ( IOException e )
{
- log.warn( "Unable to update checksum:" + e.getMessage() );
+ log.warn( "Unable to update checksum:{}", e.getMessage() );
return false;
}
}
catch ( IOException e )
{
- log.warn( "Unable to read / parse checksum: " + e.getMessage() );
+ log.warn( "Unable to read / parse checksum: {}", e.getMessage() );
return false;
}
else
{
// Policy key doesn't exist. Don't add it to golden version.
- log.warn( "Policy [" + policyId + "] does not exist." );
+ log.warn( "Policy [{}] does not exist.", policyId );
}
}
assertEquals( "check managed repositories", "default", repository.getLayout() );
assertTrue( "check managed repositories", repository.isScanned() );
- log.info( "knowContentConsumers " + configuration.getRepositoryScanning().getKnownContentConsumers() );
+ log.info( "knowContentConsumers {}", configuration.getRepositoryScanning().getKnownContentConsumers() );
assertFalse(
configuration.getRepositoryScanning().getKnownContentConsumers().contains( "update-db-artifact" ) );
catch ( MetadataRepositoryException e )
{
log.warn(
- "Error occurred persisting metadata for artifact:{} (repository:{}); message: {}" + e.getMessage(),
- new Object[]{ path, repoId, e.getMessage() }, e );
+ "Error occurred persisting metadata for artifact:{} (repository:{}); message: {}" ,
+ path, repoId, e.getMessage(), e );
repositorySession.revert();
}
catch ( RepositoryStorageRuntimeException e )
{
log.warn(
- "Error occurred persisting metadata for artifact:{} (repository:{}); message: {}" + e.getMessage(),
- new Object[]{ path, repoId, e.getMessage() }, e );
+ "Error occurred persisting metadata for artifact:{} (repository:{}); message: {}",
+ path, repoId, e.getMessage(), e );
repositorySession.revert();
}
finally
}
catch ( LayoutException e )
{
- log.warn( "Unable to convert artifact: " + path + " : " + e.getMessage(), e );
+ log.warn( "Unable to convert artifact: {} : {}",path , e.getMessage(), e );
}
catch ( ArtifactConversionException e )
{
- log.warn( "Unable to convert artifact: " + path + " : " + e.getMessage(), e );
+ log.warn( "Unable to convert artifact: {} : {}",path , e.getMessage(), e );
}
}
archivaConfigControl.verify();
- log.info( "groupIds: " + groupIds );
+ log.info( "groupIds: {}", groupIds );
assertEquals( 3, groupIds.size() );
assertTrue( groupIds.contains( "com" ) );
}
catch ( Exception e )
{
- logger.warn( "fail to configure User-Agent: " + e.getMessage(), e );
+ logger.warn( "fail to configure User-Agent: {}", e.getMessage(), e );
}
}
}
}
catch ( RepositoryNotFoundException e )
{
- log.warn( "Unable to use proxy connector: " + e.getMessage(), e );
+ log.warn( "Unable to use proxy connector: {}", e.getMessage(), e );
}
catch ( RepositoryException e )
{
- log.warn( "Unable to use proxy connector: " + e.getMessage(), e );
+ log.warn( "Unable to use proxy connector: {}", e.getMessage(), e );
}
catch ( ProxyException e )
{
log.warn(
- "Transfer error from repository \"" + targetRepository.getRepository().getId() + "\" for resource "
- + path + ", continuing to next repository. Error message: {}", e.getMessage() );
+ "Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",
+ targetRepository.getRepository().getId(), path, e.getMessage() );
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ),
"Transfer error from repository \"" + targetRepository.getRepository().getId()
+ "\" for resource " + path + ", continuing to next repository. Error message: {}",
catch ( RepositoryAdminException e )
{
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ),
- "Transfer error from repository \"" + targetRepository.getRepository().getId()
- + "\" for resource " + path + ", continuing to next repository. Error message: {}",
+ "Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",targetRepository.getRepository().getId(), path,
e.getMessage(), e );
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ), "Full stack trace", e );
}
}
catch ( ProxyException e )
{
- log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId()
- + "\" for versioned Metadata " + logicalPath
- + ", continuing to next repository. Error message: " + e.getMessage() );
+ log.warn( "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
+ targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
log.debug( "Full stack trace", e );
}
catch ( RepositoryAdminException e )
{
- log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId()
- + "\" for versioned Metadata " + logicalPath
- + ", continuing to next repository. Error message: " + e.getMessage() );
+ log.warn( "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
+ targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
log.debug( "Full stack trace", e );
}
}
}
catch ( RepositoryMetadataException e )
{
- log.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
+ log.warn( "Unable to update metadata {}:{}", localFile.getAbsolutePath(), e.getMessage(), e );
}
}
catch ( ProxyException e )
{
urlFailureCache.cacheFailure( url );
- log.warn( "Transfer failed on checksum: " + url + " : " + e.getMessage(), e );
+ log.warn( "Transfer failed on checksum: {} : {}",url ,e.getMessage(), e );
// Critical issue, pass it on.
throw e;
}
}
log.warn(
- "Transfer error from repository \"" + content.getRepository().getId() + "\" for artifact " + Keys.toKey(
- artifact ) + ", continuing to next repository. Error message: " + exception.getMessage() );
+ "Transfer error from repository {} for artifact {} , continuing to next repository. Error message: {}",
+ content.getRepository().getId(), Keys.toKey( artifact), exception.getMessage() );
log.debug( "Full stack trace", exception );
}
}
catch ( ConnectionException e )
{
- log.warn( "Could not connect to " + remoteRepository.getRepository().getName() + ": " + e.getMessage() );
+ log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
connected = false;
}
catch ( AuthenticationException e )
{
- log.warn( "Could not connect to " + remoteRepository.getRepository().getName() + ": " + e.getMessage() );
+ log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
connected = false;
}
CacheManager.getInstance().clearAll();
- log.info( "\n.\\ " + name + "() \\._________________________________________\n" );
+ log.info( "\n.\\ {}() \\._________________________________________\n", name );
}
@After
public boolean getIfNewer( String resourceName, File destination, long timestamp )
throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
{
- log.info( ".getIfNewer(" + resourceName + ", " + destination + ", " + timestamp + ")" );
+ log.info( ".getIfNewer({}, {}, {})", resourceName, destination, timestamp );
boolean result = delegate.getIfNewer( resourceName, destination, timestamp );
createIfMissing( destination );
catch ( MetadataRepositoryException e )
{
//throw new RepositoryAdminException( e.getMessage(), e );
- log.warn( "skip error during removing repository from MetadatRepository:" + e.getMessage(), e );
+ log.warn( "skip error during removing repository from MetadatRepository:{}", e.getMessage(), e );
}
finally
{
List<ManagedRepository> repos = managedRepositoryAdmin.getManagedRepositories();
assertNotNull( repos );
assertTrue( repos.size() > 0 );
- log.info( "repos " + repos );
+ log.info( "repos {}", repos );
// check default internal
ManagedRepository internal = findManagedRepoById( repos, "internal" );
List<RemoteRepository> remoteRepositories = remoteRepositoryAdmin.getRemoteRepositories();
assertNotNull( remoteRepositories );
assertTrue( remoteRepositories.size() > 0 );
- log.info( "remote " + remoteRepositories );
+ log.info( "remote {}", remoteRepositories );
}
@Test
{
// TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code?
- log.warn( "Unable to read metadata: " + metadataFile.getAbsolutePath(), e );
+ log.warn( "Unable to read metadata: {}", metadataFile.getAbsolutePath(), e );
return null;
}
}
{
// TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code?
- log.warn( "Unable to read metadata: " + metadataFile.getAbsolutePath(), e );
+ log.warn( "Unable to read metadata: {}", metadataFile.getAbsolutePath(), e );
return null;
}
}
{
// TODO: [monitor] consider a monitor for this event.
// TODO: consider a read-redo on monitor return code?
- log.warn( "Unable to read metadata: " + metadataFile.getAbsolutePath(), e );
+ log.warn( "Unable to read metadata: {}", metadataFile.getAbsolutePath(), e );
return null;
}
}
}
catch ( ConsumerException e )
{
- log.warn( "Consumer [" + consumer.getId() + "] cannot begin: " + e.getMessage(), e );
+ log.warn( "Consumer [{}] cannot begin: {}",consumer.getId(), e.getMessage(), e );
}
}
}
}
catch ( IllegalArgumentException e )
{
- log.warn( "Unable to schedule remote index download: " + e.getLocalizedMessage() );
+ log.warn( "Unable to schedule remote index download: {}", e.getLocalizedMessage() );
}
if ( remoteRepository.isDownloadRemoteIndexOnStartup() )
deleteDirectoryQuiet( tempIndexDirectory );
this.runningRemoteDownloadIds.remove( this.remoteRepository.getId() );
}
- log.info( "end download remote index for remote repository " + this.remoteRepository.getId() );
+ log.info( "end download remote index for remote repository {}", this.remoteRepository.getId() );
}
private void deleteDirectoryQuiet( File f )
this.server.start();
Connector connector = this.server.getConnectors()[0];
this.port = connector.getLocalPort();
- log.info( "start server on port " + this.port );
+ log.info( "start server on port {}", this.port );
nexusIndexer = plexusSisuBridge.lookup( NexusIndexer.class );
}
FlatSearchResponse response = nexusIndexer.searchFlat( rq );
- log.info( "returned hit count:" + response.getReturnedHitsCount() );
+ log.info( "returned hit count:{}", response.getReturnedHitsCount() );
assertEquals( 8, response.getReturnedHitsCount() );
}
catch ( MetadataResolutionException e )
{
log.warn(
- "Skipping invalid metadata while compiling shared model for " + groupId + ":" + artifactId
- + " in repo " + repoId + ": " + e.getMessage() );
+ "Skipping invalid metadata while compiling shared model for {}:{} in repo {}: {}",
+ groupId, artifactId, repoId, e.getMessage() );
}
}
}
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Error retrieving repository statistics: " + e.getMessage(), e );
+ log.warn( "Error retrieving repository statistics: {}", e.getMessage(), e );
}
if ( stats != null )
{
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
+ log.warn( "Unable to retrieve stats, assuming is empty: {}", e.getMessage(), e );
}
}
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
+ log.warn( "Unable to retrieve stats, assuming is empty: {}", e.getMessage(), e );
}
if ( stats == null || stats.isEmpty() )
{
assertNotNull( archivaRepositoryStatistics );
- log.info( "archivaRepositoryStatistics:" + archivaRepositoryStatistics.toString() );
+ log.info( "archivaRepositoryStatistics: {}", archivaRepositoryStatistics.toString() );
assertEquals( 92, archivaRepositoryStatistics.getNewFileCount() );
assertEquals( 92, archivaRepositoryStatistics.getTotalFileCount() );
for ( Artifact artifact : artifacts )
{
- log.info( "url:" + artifact.getUrl() );
+ log.info( "url: {}", artifact.getUrl() );
String version = artifact.getVersion();
assertTrue( artifact.getUrl().contains( version ) );
for ( Artifact artifact : artifacts )
{
- log.info( "url:" + artifact.getUrl() );
+ log.info( "url: {}", artifact.getUrl() );
String version = artifact.getVersion();
assertEquals( "http://localhost:" + port
+ "/repository/test-repo/org/apache/karaf/features/org.apache.karaf.features.core/"
assertTrue( " not 1 results for Bundle ExportPackage org.apache.karaf.features.command.completers but "
+ artifacts.size() + ":" + artifacts, artifacts.size() == 1 );
- log.info( "artifact url " + artifacts.get( 0 ).getUrl() );
+ log.info( "artifact url {}", artifacts.get( 0 ).getUrl() );
deleteTestRepo( testRepoId );
}
assertTrue( " not 0 results for Bundle ExportPackage org.apache.karaf.features.command.completers but "
+ artifacts.size() + ":" + artifacts, artifacts.size() == 1 );
- log.info( "artifact url " + artifacts.get( 0 ).getUrl() );
+ log.info( "artifact url {}", artifacts.get( 0 ).getUrl() );
deleteTestRepo( testRepoId );
}
SearchService searchService = getSearchService( authorizationHeader );
Collection<String> groupIds = searchService.getAllGroupIds( Arrays.asList( testRepoId ) ).getGroupIds();
- log.info( "groupIds " + groupIds );
+ log.info( "groupIds {}", groupIds );
assertFalse( groupIds.isEmpty() );
assertTrue( groupIds.contains( "commons-cli" ) );
assertTrue( groupIds.contains( "org.apache.felix" ) );
new SearchRequest( "org.foo", "studio-all-update-site", null, null, null, Arrays.asList( "test-repo" ) );
List<Artifact> artifacts = searchService.searchArtifacts( searchRequest );
- log.info( "artifacts:" + artifacts );
+ log.info( "artifacts: {}", artifacts );
assertEquals( 1, artifacts.size() );
deleteTestRepo( testRepoId );
}
{
if ( authzResult.getException() != null )
{
- log.info( "Authorization Denied [ip=" + request.getRemoteAddr() + ",permission=" + permission + ",repo="
- + repositoryId + "] : " + authzResult.getException().getMessage() );
+ log.info( "Authorization Denied [ip={},permission={},repo={}] : {}", request.getRemoteAddr(),
+ permission, repositoryId, authzResult.getException().getMessage() );
throw new UnauthorizedException( "Access denied for repository " + repositoryId );
}
}
catch ( DecoderException ie )
{
- log.warn( "Error decoding username and password.", ie.getMessage() );
+ log.warn( "Error decoding username and password: {}", ie.getMessage() );
}
if ( usernamePassword == null || usernamePassword.trim().equals( "" ) )
}
catch ( RbacManagerException e )
{
- log.warn( "Exception when checking for locked admin user: " + e.getMessage(), e );
+ log.warn( "Exception when checking for locked admin user: {}", e.getMessage(), e );
}
checked = true;
public static void display( String version )
{
String banner = getBanner( version );
- LoggerFactory.getLogger( Banner.class ).info( StringUtils.repeat( "_", 25 ) + eol + banner );
+ LoggerFactory.getLogger( Banner.class ).info( "{} {}, {}" , StringUtils.repeat( "_", 25 ), eol, banner );
}
}
EnvironmentCheck check = entry.getValue();
List<String> v = new ArrayList<String>();
check.validateEnvironment( v );
- log.info( "Environment Check: " + entry.getKey() + " -> " + v.size() + " violation(s)" );
+ log.info( "Environment Check: {} -> {} violation(s)", entry.getKey(), v.size() );
for ( String s : v )
{
violations.add( "[" + entry.getKey() + "] " + s );
}
catch ( RbacManagerException e )
{
- log.warn( "Unable to add role [" + ArchivaRoleConstants.toRepositoryObserverRoleName( repoId ) + "] to "
- + principal + " user.", e );
+ log.warn( "Unable to add role [{}] to {} user.", ArchivaRoleConstants.toRepositoryObserverRoleName( repoId ), principal, e );
}
}
}
this.server.start();
Connector connector = this.server.getConnectors()[0];
this.port = connector.getLocalPort();
- log.info( "start server on port " + this.port );
+ log.info( "start server on port {}", this.port );
User user = new User();
user.setEmail( "toto@toto.fr" );
}
catch ( Throwable e )
{
- log.info( "fail to get zipEntries " + e.getMessage(), e );
+ log.info( "fail to get zipEntries {}", e.getMessage(), e );
}
return Collections.emptyList();
}
}
catch ( LayoutException e )
{
- log.warn( "Artifact path '" + resourcePath + "' is invalid." );
+ log.warn( "Artifact path '{}' is invalid." ,resourcePath );
}
}
int port = repo.server.getConnectors()[0].getLocalPort();
repo.url = "http://localhost:" + port + repo.context;
- log.info( "Remote HTTP Server started on " + repo.url );
+ log.info( "Remote HTTP Server started on {}", repo.url );
repo.config = createRemoteRepository( repo.id, "Testable [" + repo.id + "] Remote Repo", repo.url );
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
+ log.warn( "Unable to persist resolved information: {}", e.getMessage(), e );
}
session.markDirty();
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
+ log.warn( "Unable to persist resolved information: {}", e.getMessage(), e );
}
}
session.markDirty();
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
+ log.warn( "Unable to persist resolved information: {}", e.getMessage(), e );
}
}
session.markDirty();
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
+ log.warn( "Unable to persist resolved information: {}", e.getMessage(), e );
}
}
}
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
+ log.warn( "Unable to persist resolved information: {}", e.getMessage(), e );
}
catch ( RepositoryStorageMetadataInvalidException e )
{
log.warn(
- "Not update project in metadata repository due to an error resolving it from storage: "
- + e.getMessage() );
+ "Not update project in metadata repository due to an error resolving it from storage: {}",
+ e.getMessage() );
for ( RepositoryListener listener : listeners )
{
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
+ log.warn( "Unable to persist resolved information: {}", e.getMessage(), e );
}
}
session.markDirty();
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to write audit event to repository: " + e.getMessage(), e );
+ log.warn( "Unable to write audit event to repository: {}", e.getMessage(), e );
}
finally
{
catch ( XMLException e )
{
// unable to parse metadata - log it, and continue with the version as the original SNAPSHOT version
- log.warn( "Invalid metadata: " + metadataFile + " - " + e.getMessage() );
+ log.warn( "Invalid metadata: {} - {}", metadataFile, e.getMessage() );
}
}
if ( ( problem.getException() instanceof FileNotFoundException && e.getModelId() != null &&
!e.getModelId().equals( problem.getModelId() ) ) )
{
- log.warn( "The artifact's parent POM file '" + file + "' cannot be resolved. " +
- "Using defaults for project version metadata.." );
+ log.warn( "The artifact's parent POM file '{}' cannot be resolved. " +
+ "Using defaults for project version metadata..", file );
ProjectVersionMetadata metadata = new ProjectVersionMetadata();
metadata.setId( readMetadataRequest.getProjectVersion() );
{
log.info(
"An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}",
- new Object[]{ model.getAbsolutePath(), remoteRepository.getId(), e.getMessage() } );
+ model.getAbsolutePath(), remoteRepository.getId(), e.getMessage() );
}
catch ( Exception e )
{
log.warn(
"An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}",
- new Object[]{ model.getAbsolutePath(), remoteRepository.getId(), e.getMessage() } );
+ model.getAbsolutePath(), remoteRepository.getId(), e.getMessage() );
continue;
}
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to remove metadata facet as part of delete event: " + e.getMessage(), e );
+ log.warn( "Unable to remove metadata facet as part of delete event: {}", e.getMessage(), e );
}
}
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to remove repository problem facets for the version being corrected in the repository: "
- + e.getMessage(), e );
+ log.warn( "Unable to remove repository problem facets for the version being corrected in the repository: {}",
+ e.getMessage(), e );
}
}
}
catch ( MetadataRepositoryException e )
{
- log.warn( "Unable to add repository problem facets for the version being removed: " + e.getMessage(), e );
+ log.warn( "Unable to add repository problem facets for the version being removed: {}", e.getMessage(), e );
}
}
}
catch ( Exception e )
{
- log.warn( "Not reporting problem for invalid artifact in checksum check: " + e.getMessage() );
+ log.warn( "Not reporting problem for invalid artifact in checksum check: {}", e.getMessage() );
return;
}