@@ -703,7 +703,7 @@ public class DefaultArchivaConfiguration | |||
} | |||
catch ( IOException e ) | |||
{ | |||
log.error( "Unable to create " + filetype + " file: " + e.getMessage(), e ); | |||
log.error( "Unable to create {} file: {}", filetype, e.getMessage(), e ); | |||
return false; | |||
} | |||
} |
@@ -144,7 +144,7 @@ public abstract class AbstractRepositoryPurgeTest | |||
if ( path.contains( " " ) ) | |||
{ | |||
LoggerFactory.getLogger( AbstractRepositoryPurgeTest.class.getName() ).error( | |||
"You are building and testing with a path: \n " + path + " containing space. Consider relocating." ); | |||
"You are building and testing with a path: \n {} containing space. Consider relocating.", path ); | |||
return path.replaceAll( " ", "&20" ); | |||
} | |||
return path; |
@@ -219,7 +219,7 @@ public class NexusIndexerConsumer | |||
} | |||
catch ( TaskQueueException e ) | |||
{ | |||
log.error( "Error queueing task: " + task + ": " + e.getMessage(), e ); | |||
log.error( "Error queueing task: {}: {}", task, e.getMessage(), e ); | |||
} | |||
} | |||
@@ -112,7 +112,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread1 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -132,7 +132,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread2 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -156,7 +156,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread3 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -177,7 +177,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread4 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -198,7 +198,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread5 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -221,7 +221,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread6 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -242,7 +242,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread7 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -265,7 +265,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread8 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -286,7 +286,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread9 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} | |||
@@ -308,7 +308,7 @@ public class DefaultFileLockManagerTest { | |||
logger.info("thread10 ok"); | |||
success.incrementAndGet(); | |||
} catch (Throwable e) { | |||
logger.error("Error occured " + e.getMessage()); | |||
logger.error("Error occured {}", e.getMessage()); | |||
e.printStackTrace(); | |||
throw e; | |||
} |
@@ -76,14 +76,14 @@ public class DefaultFileLockManagerTimeoutTest | |||
try { | |||
Files.copy(largeJar.toPath(), lock.getFile().toPath(), StandardCopyOption.REPLACE_EXISTING); | |||
} catch (IOException e) { | |||
logger.warn("Copy failed "+e.getMessage()); | |||
logger.warn("Copy failed {}", e.getMessage()); | |||
// On windows a FileSystemException is thrown | |||
// We ignore this | |||
} | |||
lock = fileLockManager.writeFileLock(file); | |||
} catch (FileSystemException ex) { | |||
logger.error("Exception from filesystem "+ex.getMessage()); | |||
logger.error("Exception from filesystem {}", ex.getMessage()); | |||
ex.printStackTrace(); | |||
throw ex; | |||
} |
@@ -428,10 +428,9 @@ public class DefaultRepositoryProxyConnectors | |||
"Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}", | |||
targetRepository.getRepository().getId(), path, e.getMessage() ); | |||
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ), | |||
"Transfer error from repository \"" + targetRepository.getRepository().getId() | |||
+ "\" for resource " + path + ", continuing to next repository. Error message: {}", | |||
e.getMessage(), e | |||
); | |||
"Transfer error from repository \"{}" | |||
+ "\" for resource {}, continuing to next repository. Error message: {}", | |||
targetRepository.getRepository().getId(), path, e.getMessage(), e ); | |||
} | |||
catch ( RepositoryAdminException e ) | |||
{ | |||
@@ -830,8 +829,8 @@ public class DefaultRepositoryProxyConnectors | |||
} | |||
catch ( TaskQueueException e ) | |||
{ | |||
log.error( "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() | |||
+ "']." ); | |||
log.error( "Unable to queue repository task to execute consumers on resource file ['{}" | |||
+ "'].", localFile.getName() ); | |||
} | |||
} | |||
@@ -89,8 +89,8 @@ public abstract class AbstractRepositoryAdminTest | |||
if ( path.contains( SPACE ) ) | |||
{ | |||
LoggerFactory.getLogger( AbstractRepositoryAdminTest.class.getName() ).error( | |||
"You are building and testing with {appserver.base}: \n " + path | |||
+ " containing space. Consider relocating." ); | |||
"You are building and testing with {appserver.base}: \n {}" | |||
+ " containing space. Consider relocating.", path ); | |||
} | |||
return path.replaceAll( SPACE, "&20" ); | |||
} |
@@ -77,8 +77,8 @@ public class ConsumerProcessFileClosure | |||
/* Intentionally Catch all exceptions. | |||
* So that the discoverer processing can continue. | |||
*/ | |||
log.error( "Consumer [" + id + "] had an error when processing file [" | |||
+ basefile.getAbsolutePath() + "]: " + e.getMessage(), e ); | |||
log.error( "Consumer [{}] had an error when processing file [" | |||
+ "{}]: {}", id, basefile.getAbsolutePath(), e.getMessage(), e ); | |||
} | |||
} | |||
} |
@@ -135,7 +135,7 @@ public class ArchivaIndexingTaskExecutor | |||
} | |||
catch ( RepositoryAdminException e ) | |||
{ | |||
log.error( "Error occurred while creating context: " + e.getMessage() ); | |||
log.error( "Error occurred while creating context: {}", e.getMessage() ); | |||
throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage(), e ); | |||
} | |||
} | |||
@@ -227,7 +227,7 @@ public class ArchivaIndexingTaskExecutor | |||
} | |||
catch ( IOException e ) | |||
{ | |||
log.error( "Error occurred while executing indexing task '" + indexingTask + "': " + e.getMessage(), | |||
log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage(), | |||
e ); | |||
throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'", | |||
e ); | |||
@@ -261,7 +261,7 @@ public class ArchivaIndexingTaskExecutor | |||
} | |||
catch ( IOException e ) | |||
{ | |||
log.error( "Error occurred while executing indexing task '" + indexingTask + "': " + e.getMessage() ); | |||
log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage() ); | |||
throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'", | |||
e ); | |||
} |
@@ -387,7 +387,7 @@ public class DefaultRepositoryArchivaTaskScheduler | |||
catch ( RuntimeException e ) | |||
{ | |||
log.error( | |||
"ParseException in repository scanning cron expression, disabling repository scanning for '': {}", | |||
"ParseException in repository scanning cron expression, disabling repository scanning for '{}': {}", | |||
repoConfig.getId(), e.getMessage() ); | |||
} | |||
@@ -83,7 +83,7 @@ public class DefaultCommonServices | |||
} | |||
catch ( IOException e ) | |||
{ | |||
log.warn( "skip error loading properties {}", resourceName.toString() ); | |||
log.warn( "skip error loading properties {}", resourceName ); | |||
} | |||
return fromProperties( properties ); |
@@ -72,7 +72,7 @@ public class DefaultPluginsServices | |||
{ | |||
sb.append( repoType ).append( "|" ); | |||
} | |||
log.debug( "getAdminPlugins: {}", sb.toString() ); | |||
log.debug( "getAdminPlugins: {}", sb ); | |||
if ( sb.length() > 1 ) | |||
{ | |||
adminPlugins = sb.substring( 0, sb.length() - 1 ); |
@@ -352,7 +352,7 @@ public class DefaultRepositoriesService | |||
} | |||
catch ( AuthorizationException e ) | |||
{ | |||
log.error( "error reading permission: " + e.getMessage(), e ); | |||
log.error( "error reading permission: {}", e.getMessage(), e ); | |||
throw new ArchivaRestServiceException( e.getMessage(), e ); | |||
} | |||
@@ -370,7 +370,7 @@ public class DefaultRepositoriesService | |||
} | |||
catch ( AuthorizationException e ) | |||
{ | |||
log.error( "error reading permission: " + e.getMessage(), e ); | |||
log.error( "error reading permission: {}", e.getMessage(), e ); | |||
throw new ArchivaRestServiceException( e.getMessage(), e ); | |||
} | |||
@@ -394,7 +394,7 @@ public class DefaultRepositoriesService | |||
if ( StringUtils.isEmpty( artifactSourcePath ) ) | |||
{ | |||
log.error( "cannot find artifact " + artifactTransferRequest.toString() ); | |||
log.error( "cannot find artifact {}", artifactTransferRequest ); | |||
throw new ArchivaRestServiceException( "cannot find artifact " + artifactTransferRequest.toString(), | |||
null ); | |||
} | |||
@@ -403,7 +403,7 @@ public class DefaultRepositoriesService | |||
if ( !artifactFile.exists() ) | |||
{ | |||
log.error( "cannot find artifact " + artifactTransferRequest.toString() ); | |||
log.error( "cannot find artifact {}", artifactTransferRequest ); | |||
throw new ArchivaRestServiceException( "cannot find artifact " + artifactTransferRequest.toString(), | |||
null ); | |||
} | |||
@@ -487,17 +487,17 @@ public class DefaultRepositoriesService | |||
} | |||
catch ( RepositoryException e ) | |||
{ | |||
log.error( "RepositoryException: " + e.getMessage(), e ); | |||
log.error( "RepositoryException: {}", e.getMessage(), e ); | |||
throw new ArchivaRestServiceException( e.getMessage(), e ); | |||
} | |||
catch ( RepositoryAdminException e ) | |||
{ | |||
log.error( "RepositoryAdminException: " + e.getMessage(), e ); | |||
log.error( "RepositoryAdminException: {}", e.getMessage(), e ); | |||
throw new ArchivaRestServiceException( e.getMessage(), e ); | |||
} | |||
catch ( IOException e ) | |||
{ | |||
log.error( "IOException: " + e.getMessage(), e ); | |||
log.error( "IOException: {}", e.getMessage(), e ); | |||
throw new ArchivaRestServiceException( e.getMessage(), e ); | |||
} | |||
return true; | |||
@@ -517,8 +517,8 @@ public class DefaultRepositoriesService | |||
} | |||
catch ( TaskQueueException e ) | |||
{ | |||
log.error( "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() | |||
+ "']." ); | |||
log.error( "Unable to queue repository task to execute consumers on resource file ['{}" | |||
+ "'].", localFile.getName() ); | |||
} | |||
} | |||
@@ -569,8 +569,8 @@ public class DefaultFileUploadService | |||
} | |||
catch ( TaskQueueException e ) | |||
{ | |||
log.error( "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() | |||
+ "']." ); | |||
log.error( "Unable to queue repository task to execute consumers on resource file ['{}" | |||
+ "'].", localFile.getName() ); | |||
} | |||
} | |||
@@ -73,7 +73,7 @@ public class ArchivaLdapConnectionFactory | |||
} | |||
catch ( InvalidNameException e ) | |||
{ | |||
log.error("Error during initialization of LdapConnectionFactory "+e.getMessage(),e); | |||
log.error( "Error during initialization of LdapConnectionFactory {}", e.getMessage(), e ); | |||
// throw new RuntimeException( "Error while initializing connection factory.", e ); | |||
} | |||
catch ( RepositoryAdminException e ) |
@@ -89,7 +89,7 @@ public class ArchivaUserManagerAuthenticator | |||
} | |||
catch ( RepositoryAdminException e ) | |||
{ | |||
log.error("Error during repository initialization "+e.getMessage(),e); | |||
log.error( "Error during repository initialization {}", e.getMessage(), e ); | |||
// throw new AuthenticationException( e.getMessage(), e ); | |||
} | |||
} |
@@ -152,7 +152,7 @@ public class SecuritySynchronization | |||
catch ( RoleManagerException e ) | |||
{ | |||
// Log error. | |||
log.error( "Unable to create roles for configured repositories: " + e.getMessage(), e ); | |||
log.error( "Unable to create roles for configured repositories: {}", e.getMessage(), e ); | |||
} | |||
} | |||
@@ -522,7 +522,7 @@ public class ArchivaDavResource | |||
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_FILE ); | |||
} | |||
log.debug( "{}{}' copied to '{}' (current user '{)')", ( isCollection() ? "Directory '" : "File '" ), | |||
log.debug( "{}{}' copied to '{}' (current user '{}')", ( isCollection() ? "Directory '" : "File '" ), | |||
getLocalResource().getName(), destination, this.principal ); | |||
} | |||
@@ -723,8 +723,8 @@ public class ArchivaDavResource | |||
} | |||
catch ( TaskQueueException e ) | |||
{ | |||
log.error( "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() | |||
+ "']." ); | |||
log.error( "Unable to queue repository task to execute consumers on resource file ['{}" | |||
+ "'].", localFile.getName() ); | |||
} | |||
} | |||
} |
@@ -100,7 +100,7 @@ public class MimeTypes | |||
} | |||
catch ( IOException e ) | |||
{ | |||
log.error( "Unable to load mime map " + resourceName + " : " + e.getMessage(), e ); | |||
log.error( "Unable to load mime map {} : {}", resourceName, e.getMessage(), e ); | |||
} | |||
} | |||
@@ -146,7 +146,7 @@ public class MimeTypes | |||
} | |||
catch ( IOException e ) | |||
{ | |||
log.error( "Unable to read mime types from input stream : " + e.getMessage(), e ); | |||
log.error( "Unable to read mime types from input stream : {}", e.getMessage(), e ); | |||
} | |||
} | |||
@@ -134,7 +134,7 @@ public class DefaultAuditManager | |||
} | |||
catch ( ParseException e ) | |||
{ | |||
log.error( "Invalid audit event found in the metadata repository: " + e.getMessage() ); | |||
log.error( "Invalid audit event found in the metadata repository: {}", e.getMessage() ); | |||
// continue and ignore this one | |||
} | |||
} |
@@ -458,7 +458,7 @@ public class DefaultCassandraArchivaManager | |||
{ // ensure keyspace exists, here if the keyspace doesn't exist we suppose nothing exist | |||
if ( cluster.describeKeyspace( keyspaceName ) == null ) | |||
{ | |||
logger.info( "Creating Archiva Cassandra '" + keyspaceName + "' keyspace." ); | |||
logger.info( "Creating Archiva Cassandra '{}' keyspace.", keyspaceName ); | |||
cluster.addKeyspace( HFactory.createKeyspaceDefinition( keyspaceName, // | |||
ThriftKsDef.DEF_STRATEGY_CLASS, // | |||
replicationFactor, // |
@@ -570,7 +570,7 @@ public class FileMetadataRepository | |||
MetadataFacetFactory factory = metadataFacetFactories.get( facetId ); | |||
if ( factory == null ) | |||
{ | |||
log.error( "Attempted to load unknown artifact metadata facet: " + facetId ); | |||
log.error( "Attempted to load unknown artifact metadata facet: {}", facetId ); | |||
} | |||
else | |||
{ |
@@ -1353,7 +1353,7 @@ public class JcrMetadataRepository | |||
getJcrSession().save(); | |||
} catch ( InvalidItemStateException e ) { | |||
// olamy this might happen when deleting a repo while is under scanning | |||
log.warn( "skip InvalidItemStateException:" + e.getMessage(), e ); | |||
log.warn( "skip InvalidItemStateException:{}", e.getMessage(), e ); | |||
} | |||
catch ( RepositoryException e ) | |||
{ | |||
@@ -1493,7 +1493,7 @@ public class JcrMetadataRepository | |||
MetadataFacetFactory factory = metadataFacetFactories.get( name ); | |||
if ( factory == null ) | |||
{ | |||
log.error( "Attempted to load unknown project version metadata facet: " + name ); | |||
log.error( "Attempted to load unknown project version metadata facet: {}", name ); | |||
} | |||
else | |||
{ |
@@ -156,7 +156,7 @@ public class DefaultRepositoryStatisticsManager | |||
} | |||
catch ( ParseException e ) | |||
{ | |||
log.error( "Invalid scan result found in the metadata repository: " + e.getMessage() ); | |||
log.error( "Invalid scan result found in the metadata repository: {}", e.getMessage() ); | |||
// continue and ignore this one | |||
} | |||
} |