diff options
Diffstat (limited to 'archiva-base')
17 files changed, 215 insertions, 190 deletions
diff --git a/archiva-base/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/Checksums.java b/archiva-base/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/Checksums.java index 11b16f9f4..8abdfb88e 100644 --- a/archiva-base/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/Checksums.java +++ b/archiva-base/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/Checksums.java @@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory; */ public class Checksums { - private static final Logger LOG = LoggerFactory.getLogger(Checksums.class); + private Logger log = LoggerFactory.getLogger(Checksums.class); /** * @plexus.requirement role-hint="sha1" @@ -66,7 +66,7 @@ public class Checksums // Both files missing is a failure. if ( !sha1File.exists() && !md5File.exists() ) { - LOG.error( "File " + file.getPath() + " has no checksum files (sha1 or md5)." ); + log.error( "File " + file.getPath() + " has no checksum files (sha1 or md5)." ); checksPass = false; } @@ -75,7 +75,7 @@ public class Checksums // Bad sha1 checksum is a failure. if ( !validateChecksum( sha1File, "sha1" ) ) { - LOG.warn( "SHA1 is incorrect for " + file.getPath() ); + log.warn( "SHA1 is incorrect for " + file.getPath() ); checksPass = false; } } @@ -85,7 +85,7 @@ public class Checksums // Bad md5 checksum is a failure. if ( !validateChecksum( md5File, "md5" ) ) { - LOG.warn( "MD5 is incorrect for " + file.getPath() ); + log.warn( "MD5 is incorrect for " + file.getPath() ); checksPass = false; } } @@ -139,12 +139,12 @@ public class Checksums } catch ( DigesterException e ) { - LOG.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e ); + log.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e ); return false; } catch ( IOException e ) { - LOG.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e ); + log.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e ); return false; } } @@ -169,28 +169,28 @@ public class Checksums { if ( checksumFile.isValidChecksum( hashFile ) ) { - LOG.debug( "Valid checksum: " + hashFile.getPath() ); + log.debug( "Valid checksum: " + hashFile.getPath() ); return true; } else { - LOG.debug( "Not valid checksum: " + hashFile.getPath() ); + log.debug( "Not valid checksum: " + hashFile.getPath() ); return createChecksum( localFile, digester ); } } catch ( FileNotFoundException e ) { - LOG.warn( "Unable to find " + ext + " file: " + hashFile.getAbsolutePath(), e ); + log.warn( "Unable to find " + ext + " file: " + hashFile.getAbsolutePath(), e ); return false; } catch ( DigesterException e ) { - LOG.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e ); + log.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e ); return false; } catch ( IOException e ) { - LOG.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e ); + log.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e ); return false; } } @@ -213,27 +213,27 @@ public class Checksums boolean validity = checksumFile.isValidChecksum( hashFile ); if ( validity ) { - LOG.debug( "Valid checksum: " + hashFile.getPath() ); + log.debug( "Valid checksum: " + hashFile.getPath() ); } else { - LOG.debug( "Not valid checksum: " + hashFile.getPath() ); + log.debug( "Not valid checksum: " + hashFile.getPath() ); } return validity; } catch ( FileNotFoundException e ) { - LOG.warn( "Unable to find " + type + " file: " + hashFile.getAbsolutePath(), e ); + log.warn( "Unable to find " + type + " file: " + hashFile.getAbsolutePath(), e ); return false; } catch ( DigesterException e ) { - LOG.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e ); + log.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e ); return false; } catch ( IOException e ) { - LOG.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e ); + log.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e ); return false; } } diff --git a/archiva-base/archiva-configuration/src/main/java/org/apache/maven/archiva/configuration/DefaultArchivaConfiguration.java b/archiva-base/archiva-configuration/src/main/java/org/apache/maven/archiva/configuration/DefaultArchivaConfiguration.java index 4ca6f0229..987a456d7 100644 --- a/archiva-base/archiva-configuration/src/main/java/org/apache/maven/archiva/configuration/DefaultArchivaConfiguration.java +++ b/archiva-base/archiva-configuration/src/main/java/org/apache/maven/archiva/configuration/DefaultArchivaConfiguration.java @@ -36,12 +36,13 @@ import org.codehaus.plexus.evaluator.DefaultExpressionEvaluator; import org.codehaus.plexus.evaluator.EvaluatorException; import org.codehaus.plexus.evaluator.ExpressionEvaluator; import org.codehaus.plexus.evaluator.sources.SystemPropertyExpressionSource; -import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.RegistryException; import org.codehaus.plexus.registry.RegistryListener; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; @@ -83,9 +84,10 @@ import java.util.Map.Entry; * @plexus.component role="org.apache.maven.archiva.configuration.ArchivaConfiguration" */ public class DefaultArchivaConfiguration - extends AbstractLogEnabled implements ArchivaConfiguration, RegistryListener, Initializable { + private Logger log = LoggerFactory.getLogger(DefaultArchivaConfiguration.class); + /** * Plexus registry to read the configuration from. * @@ -287,7 +289,7 @@ public class DefaultArchivaConfiguration else { // Policy key doesn't exist. Don't add it to golden version. - getLogger().warn( "Policy [" + policyId + "] does not exist." ); + log.warn( "Policy [" + policyId + "] does not exist." ); } } @@ -323,13 +325,13 @@ public class DefaultArchivaConfiguration { if ( MapUtils.isEmpty( prePolicies ) ) { - getLogger().error( "No PreDownloadPolicies found!" ); + log.error( "No PreDownloadPolicies found!" ); return null; } if ( MapUtils.isEmpty( postPolicies ) ) { - getLogger().error( "No PostDownloadPolicies found!" ); + log.error( "No PostDownloadPolicies found!" ); return null; } @@ -354,13 +356,13 @@ public class DefaultArchivaConfiguration { if ( MapUtils.isEmpty( prePolicies ) ) { - getLogger().error( "No PreDownloadPolicies found!" ); + log.error( "No PreDownloadPolicies found!" ); return false; } if ( MapUtils.isEmpty( postPolicies ) ) { - getLogger().error( "No PostDownloadPolicies found!" ); + log.error( "No PostDownloadPolicies found!" ); return false; } @@ -505,7 +507,7 @@ public class DefaultArchivaConfiguration } catch ( IOException e ) { - getLogger().error( "Unable to create " + filetype + " file: " + e.getMessage(), e ); + log.error( "Unable to create " + filetype + " file: " + e.getMessage(), e ); return false; } } @@ -521,7 +523,7 @@ public class DefaultArchivaConfiguration } catch ( Throwable t ) { - getLogger().warn( "Unable to notify of saved configuration event.", t ); + log.warn( "Unable to notify of saved configuration event.", t ); } } } diff --git a/archiva-base/archiva-configuration/src/main/java/org/apache/maven/archiva/configuration/FileTypes.java b/archiva-base/archiva-configuration/src/main/java/org/apache/maven/archiva/configuration/FileTypes.java index c286eac50..486e8f495 100644 --- a/archiva-base/archiva-configuration/src/main/java/org/apache/maven/archiva/configuration/FileTypes.java +++ b/archiva-base/archiva-configuration/src/main/java/org/apache/maven/archiva/configuration/FileTypes.java @@ -19,23 +19,25 @@ package org.apache.maven.archiva.configuration; * under the License. */ +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.Predicate; import org.apache.commons.configuration.CombinedConfiguration; +import org.apache.maven.archiva.common.utils.Slf4JPlexusLogger; import org.apache.maven.archiva.configuration.functors.FiletypeSelectionPredicate; import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryReader; -import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.registry.RegistryException; import org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry; - -import java.lang.reflect.Field; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * FileTypes @@ -46,9 +48,10 @@ import java.util.Map; * @plexus.component role="org.apache.maven.archiva.configuration.FileTypes" */ public class FileTypes - extends AbstractLogEnabled implements Initializable { + private Logger log = LoggerFactory.getLogger(FileTypes.class); + public static final String ARTIFACTS = "artifacts"; public static final String AUTO_REMOVE = "auto-remove"; @@ -122,7 +125,7 @@ public class FileTypes Field fld = commonsRegistry.getClass().getDeclaredField( "configuration" ); fld.setAccessible( true ); fld.set( commonsRegistry, new CombinedConfiguration() ); - commonsRegistry.enableLogging( getLogger() ); + commonsRegistry.enableLogging( new Slf4JPlexusLogger( FileTypes.class ) ); commonsRegistry.addConfigurationFromResource( "org/apache/maven/archiva/configuration/default-archiva.xml" ); // Read configuration as it was intended. diff --git a/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/functors/IndexExistsPredicate.java b/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/functors/IndexExistsPredicate.java index 03b74be02..1dcf17c69 100644 --- a/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/functors/IndexExistsPredicate.java +++ b/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/functors/IndexExistsPredicate.java @@ -22,7 +22,8 @@ package org.apache.maven.archiva.indexer.functors; import org.apache.commons.collections.Predicate; import org.apache.maven.archiva.indexer.RepositoryContentIndex; import org.apache.maven.archiva.indexer.RepositoryIndexException; -import org.codehaus.plexus.logging.AbstractLogEnabled; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the {@link RepositoryContentIndex} object for the existance of an index. @@ -35,9 +36,10 @@ import org.codehaus.plexus.logging.AbstractLogEnabled; * role-hint="index-exists" */ public class IndexExistsPredicate - extends AbstractLogEnabled implements Predicate { + private Logger log = LoggerFactory.getLogger( IndexExistsPredicate.class ); + public boolean evaluate( Object object ) { boolean satisfies = false; @@ -51,7 +53,7 @@ public class IndexExistsPredicate } catch ( RepositoryIndexException e ) { - getLogger().info( + log.info( "Repository Content Index [" + index.getId() + "] for repository [" + index.getRepository().getId() + "] does not exist yet in [" + index.getIndexDirectory().getAbsolutePath() + "]." ); diff --git a/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/functors/SearchableTransformer.java b/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/functors/SearchableTransformer.java index 6eb81bd4c..0bee20a43 100644 --- a/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/functors/SearchableTransformer.java +++ b/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/functors/SearchableTransformer.java @@ -22,7 +22,8 @@ package org.apache.maven.archiva.indexer.functors; import org.apache.commons.collections.Transformer; import org.apache.maven.archiva.indexer.RepositoryIndexSearchException; import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndex; -import org.codehaus.plexus.logging.AbstractLogEnabled; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * SearchableTransformer @@ -33,9 +34,10 @@ import org.codehaus.plexus.logging.AbstractLogEnabled; * @plexus.component role="org.apache.commons.collections.Transformer" role-hint="searchable" */ public class SearchableTransformer - extends AbstractLogEnabled implements Transformer { + private Logger log = LoggerFactory.getLogger( SearchableTransformer.class ); + public Object transform( Object input ) { if ( input instanceof LuceneRepositoryContentIndex ) @@ -47,7 +49,7 @@ public class SearchableTransformer } catch ( RepositoryIndexSearchException e ) { - getLogger().warn("Unable to get searchable for index:" + e.getMessage(), e); + log.warn("Unable to get searchable for index:" + e.getMessage(), e); } } diff --git a/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/DefaultCrossRepositorySearch.java b/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/DefaultCrossRepositorySearch.java index 2b67d9caa..98490ebe4 100644 --- a/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/DefaultCrossRepositorySearch.java +++ b/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/DefaultCrossRepositorySearch.java @@ -19,9 +19,10 @@ package org.apache.maven.archiva.indexer.search; * under the License. */ -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.Predicate; -import org.apache.commons.collections.Transformer; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + import org.apache.lucene.document.Document; import org.apache.lucene.queryParser.MultiFieldQueryParser; import org.apache.lucene.queryParser.ParseException; @@ -38,21 +39,17 @@ import org.apache.maven.archiva.indexer.RepositoryIndexException; import org.apache.maven.archiva.indexer.RepositoryIndexSearchException; import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers; import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers; -import org.apache.maven.archiva.indexer.functors.UserAllowedToSearchRepositoryPredicate; import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers; import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys; import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter; import org.apache.maven.archiva.indexer.lucene.LuceneQuery; import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord; -import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.RegistryListener; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DefaultCrossRepositorySearch @@ -62,9 +59,10 @@ import java.util.List; * @plexus.component role="org.apache.maven.archiva.indexer.search.CrossRepositorySearch" role-hint="default" */ public class DefaultCrossRepositorySearch - extends AbstractLogEnabled implements CrossRepositorySearch, RegistryListener, Initializable { + private Logger log = LoggerFactory.getLogger( DefaultCrossRepositorySearch.class ); + /** * @plexus.requirement role-hint="lucene" */ @@ -93,7 +91,7 @@ public class DefaultCrossRepositorySearch } catch ( ParseException e ) { - getLogger().warn( "Unable to parse query [" + checksum + "]: " + e.getMessage(), e ); + log.warn( "Unable to parse query [" + checksum + "]: " + e.getMessage(), e ); } // empty results. @@ -115,7 +113,7 @@ public class DefaultCrossRepositorySearch } catch ( ParseException e ) { - getLogger().warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e ); + log.warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e ); } // empty results. @@ -137,7 +135,7 @@ public class DefaultCrossRepositorySearch } catch ( ParseException e ) { - getLogger().warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e ); + log.warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e ); } // empty results. @@ -212,7 +210,7 @@ public class DefaultCrossRepositorySearch } catch ( java.text.ParseException e ) { - getLogger().warn( "Unable to parse document into record: " + e.getMessage(), e ); + log.warn( "Unable to parse document into record: " + e.getMessage(), e ); } } } @@ -220,7 +218,7 @@ public class DefaultCrossRepositorySearch } catch ( IOException e ) { - getLogger().error( "Unable to setup multi-search: " + e.getMessage(), e ); + log.error( "Unable to setup multi-search: " + e.getMessage(), e ); } finally { @@ -233,7 +231,7 @@ public class DefaultCrossRepositorySearch } catch ( IOException ie ) { - getLogger().error( "Unable to close index searcher: " + ie.getMessage(), ie ); + log.error( "Unable to close index searcher: " + ie.getMessage(), ie ); } } @@ -251,7 +249,7 @@ public class DefaultCrossRepositorySearch } catch ( RepositoryIndexSearchException e ) { - getLogger().warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :" + log.warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :" + e.getMessage(), e ); } } @@ -329,7 +327,7 @@ public class DefaultCrossRepositorySearch } catch ( RepositoryIndexException e ) { - getLogger().info( + log.info( "Repository Content Index [" + index.getId() + "] for repository [" + index.getRepository().getId() + "] does not exist yet in [" + index.getIndexDirectory().getAbsolutePath() + "]." ); diff --git a/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/AbstractUpdatePolicy.java b/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/AbstractUpdatePolicy.java index 3f099e657..0b44c7c67 100644 --- a/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/AbstractUpdatePolicy.java +++ b/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/AbstractUpdatePolicy.java @@ -19,16 +19,17 @@ package org.apache.maven.archiva.policies; * under the License. */ -import org.apache.commons.lang.StringUtils; -import org.apache.maven.archiva.common.utils.VersionUtil; -import org.codehaus.plexus.logging.AbstractLogEnabled; - import java.io.File; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.Properties; +import org.apache.commons.lang.StringUtils; +import org.apache.maven.archiva.common.utils.VersionUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * AbstractUpdatePolicy * @@ -36,9 +37,10 @@ import java.util.Properties; * @version $Id$ */ public abstract class AbstractUpdatePolicy - extends AbstractLogEnabled implements PreDownloadPolicy { + private Logger log = LoggerFactory.getLogger( AbstractUpdatePolicy.class ); + /** * The ALWAYS policy setting means that the artifact is always uipdated from the remote repo. */ @@ -127,20 +129,20 @@ public abstract class AbstractUpdatePolicy if ( ALWAYS.equals( policySetting ) ) { // Skip means ok to update. - getLogger().debug( "OK to update, " + getUpdateMode() + " policy set to ALWAYS." ); + log.debug( "OK to update, " + getUpdateMode() + " policy set to ALWAYS." ); return; } // Test for mismatches. if ( !isSnapshotVersion && isSnapshotPolicy() ) { - getLogger().debug( "OK to update, snapshot policy does not apply for non-snapshot versions." ); + log.debug( "OK to update, snapshot policy does not apply for non-snapshot versions." ); return; } if ( isSnapshotVersion && !isSnapshotPolicy() ) { - getLogger().debug( "OK to update, release policy does not apply for snapshot versions." ); + log.debug( "OK to update, release policy does not apply for snapshot versions." ); return; } @@ -153,7 +155,7 @@ public abstract class AbstractUpdatePolicy if ( !localFile.exists() ) { // No file means it's ok. - getLogger().debug( "OK to update " + getUpdateMode() + ", local file does not exist." ); + log.debug( "OK to update " + getUpdateMode() + ", local file does not exist." ); return; } diff --git a/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/CachedFailuresPolicy.java b/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/CachedFailuresPolicy.java index c7d44258c..9f4495dc9 100644 --- a/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/CachedFailuresPolicy.java +++ b/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/CachedFailuresPolicy.java @@ -19,15 +19,16 @@ package org.apache.maven.archiva.policies; * under the License. */ -import org.apache.commons.lang.StringUtils; -import org.apache.maven.archiva.policies.urlcache.UrlFailureCache; -import org.codehaus.plexus.logging.AbstractLogEnabled; - import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; +import org.apache.commons.lang.StringUtils; +import org.apache.maven.archiva.policies.urlcache.UrlFailureCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * {@link PreDownloadPolicy} to check if the requested url has failed before. * @@ -37,9 +38,10 @@ import java.util.Properties; * role-hint="cache-failures" */ public class CachedFailuresPolicy - extends AbstractLogEnabled implements PreDownloadPolicy { + private Logger log = LoggerFactory.getLogger( CachedFailuresPolicy.class ); + /** * The NO policy setting means that the the existence of old failures is <strong>not</strong> checked. * All resource requests are allowed thru to the remote repo. @@ -78,7 +80,7 @@ public class CachedFailuresPolicy if ( NO.equals( policySetting ) ) { // Skip. - getLogger().debug( "OK to fetch, check-failures policy set to NO." ); + log.debug( "OK to fetch, check-failures policy set to NO." ); return; } @@ -92,7 +94,7 @@ public class CachedFailuresPolicy } } - getLogger().debug( "OK to fetch, check-failures detected no issues." ); + log.debug( "OK to fetch, check-failures detected no issues." ); } public String getDefaultOption() diff --git a/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/ChecksumPolicy.java b/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/ChecksumPolicy.java index c86bbaaec..9b91c42d1 100644 --- a/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/ChecksumPolicy.java +++ b/archiva-base/archiva-policies/src/main/java/org/apache/maven/archiva/policies/ChecksumPolicy.java @@ -19,15 +19,16 @@ package org.apache.maven.archiva.policies; * under the License. */ -import org.apache.commons.lang.StringUtils; -import org.apache.maven.archiva.common.utils.Checksums; -import org.codehaus.plexus.logging.AbstractLogEnabled; - import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; +import org.apache.commons.lang.StringUtils; +import org.apache.maven.archiva.common.utils.Checksums; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * ChecksumPolicy - a policy applied after the download to see if the file has been downloaded * successfully and completely (or not). @@ -39,9 +40,10 @@ import java.util.Properties; * role-hint="checksum" */ public class ChecksumPolicy - extends AbstractLogEnabled implements PostDownloadPolicy { + private Logger log = LoggerFactory.getLogger( ChecksumPolicy.class ); + /** * The IGNORE policy indicates that if the checksum policy is ignored, and * the state of, contents of, or validity of the checksum files are not @@ -90,7 +92,7 @@ public class ChecksumPolicy if ( IGNORE.equals( policySetting ) ) { // Ignore. - getLogger().debug( "Checksum policy set to IGNORE." ); + log.debug( "Checksum policy set to IGNORE." ); return; } @@ -131,7 +133,7 @@ public class ChecksumPolicy { if( checksums.update( localFile ) ) { - getLogger().debug( "Checksum policy set to FIX, checksum files have been updated." ); + log.debug( "Checksum policy set to FIX, checksum files have been updated." ); return; } else diff --git a/archiva-base/archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultRepositoryProxyConnectors.java b/archiva-base/archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultRepositoryProxyConnectors.java index 208f822ef..63bd89e7e 100644 --- a/archiva-base/archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultRepositoryProxyConnectors.java +++ b/archiva-base/archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultRepositoryProxyConnectors.java @@ -19,9 +19,18 @@ package org.apache.maven.archiva.proxy; * under the License. */ +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Map.Entry; + import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ConfigurationNames; @@ -56,24 +65,13 @@ import org.apache.maven.wagon.authentication.AuthenticationException; import org.apache.maven.wagon.authentication.AuthenticationInfo; import org.apache.maven.wagon.proxy.ProxyInfo; import org.apache.maven.wagon.repository.Repository; -import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.RegistryListener; import org.codehaus.plexus.util.SelectorUtils; - -import java.io.File; -import java.io.IOException; -import java.net.URLClassLoader; -import java.net.URL; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DefaultRepositoryProxyConnectors @@ -83,9 +81,10 @@ import java.util.Properties; * @plexus.component role-hint="default" */ public class DefaultRepositoryProxyConnectors - extends AbstractLogEnabled implements RepositoryProxyConnectors, RegistryListener, Initializable { + private Logger log = LoggerFactory.getLogger( DefaultRepositoryProxyConnectors.class ); + /** * @plexus.requirement */ @@ -160,29 +159,29 @@ public class DefaultRepositoryProxyConnectors if ( fileExists( downloadedFile ) ) { - getLogger().debug( "Successfully transferred: " + downloadedFile.getAbsolutePath() ); + log.debug( "Successfully transferred: " + downloadedFile.getAbsolutePath() ); return downloadedFile; } } catch ( NotFoundException e ) { - getLogger().debug( "Artifact " + Keys.toKey( artifact ) + " not found on repository \"" + log.debug( "Artifact " + Keys.toKey( artifact ) + " not found on repository \"" + targetRepository.getRepository().getId() + "\"." ); } catch ( NotModifiedException e ) { - getLogger().debug( "Artifact " + Keys.toKey( artifact ) + " not updated on repository \"" + log.debug( "Artifact " + Keys.toKey( artifact ) + " not updated on repository \"" + targetRepository.getRepository().getId() + "\"." ); } catch ( ProxyException e ) { - getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + + log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + "\" for artifact " + Keys.toKey( artifact ) + ", continuing to next repository. Error message: " + e.getMessage() ); - getLogger().debug( "Full stack trace", e ); + log.debug( "Full stack trace", e ); } } - getLogger().debug( "Exhausted all target repositories, artifact " + Keys.toKey( artifact ) + " not found." ); + log.debug( "Exhausted all target repositories, artifact " + Keys.toKey( artifact ) + " not found." ); return null; } @@ -221,22 +220,22 @@ public class DefaultRepositoryProxyConnectors } catch ( NotFoundException e ) { - getLogger().debug( "Versioned Metadata " + Keys.toKey( metadata ) + log.debug( "Versioned Metadata " + Keys.toKey( metadata ) + " not found on remote repository \"" + targetRepository.getRepository().getId() + "\"." ); } catch ( NotModifiedException e ) { - getLogger().debug( "Versioned Metadata " + Keys.toKey( metadata ) + log.debug( "Versioned Metadata " + Keys.toKey( metadata ) + " not updated on remote repository \"" + targetRepository.getRepository().getId() + "\"." ); } catch ( ProxyException e ) { - getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + + log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + "\" for versioned Metadata " + Keys.toKey( metadata ) + ", continuing to next repository. Error message: " + e.getMessage() ); - getLogger().debug( "Full stack trace", e ); + log.debug( "Full stack trace", e ); } } @@ -253,24 +252,24 @@ public class DefaultRepositoryProxyConnectors } catch ( LayoutException e ) { - getLogger().warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() ); + log.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() ); // TODO: add into repository report? } catch ( RepositoryMetadataException e ) { - getLogger() + log .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); // TODO: add into repository report? } catch ( IOException e ) { - getLogger() + log .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); // TODO: add into repository report? } catch ( ContentNotFoundException e ) { - getLogger() + log .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); // TODO: add into repository report? } @@ -339,21 +338,21 @@ public class DefaultRepositoryProxyConnectors } catch ( NotFoundException e ) { - getLogger().debug( "Project Metadata " + Keys.toKey( metadata ) + " not found on remote repository \"" + log.debug( "Project Metadata " + Keys.toKey( metadata ) + " not found on remote repository \"" + targetRepository.getRepository().getId() + "\"." ); } catch ( NotModifiedException e ) { - getLogger().debug( "Project Metadata " + Keys.toKey( metadata ) + log.debug( "Project Metadata " + Keys.toKey( metadata ) + " not updated on remote repository \"" + targetRepository.getRepository().getId() + "\"." ); } catch ( ProxyException e ) { - getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + + log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + "\" for project metadata " + Keys.toKey( metadata ) + ", continuing to next repository. Error message: " + e.getMessage() ); - getLogger().debug( "Full stack trace", e ); + log.debug( "Full stack trace", e ); } } @@ -371,24 +370,24 @@ public class DefaultRepositoryProxyConnectors } catch ( LayoutException e ) { - getLogger().warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() ); + log.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() ); // TODO: add into repository report? } catch ( RepositoryMetadataException e ) { - getLogger() + log .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); // TODO: add into repository report? } catch ( IOException e ) { - getLogger() + log .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); // TODO: add into repository report? } catch ( ContentNotFoundException e ) { - getLogger() + log .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); // TODO: add into repository report? } @@ -495,7 +494,7 @@ public class DefaultRepositoryProxyConnectors // Path must belong to whitelist. if ( !matchesPattern( remotePath, connector.getWhitelist() ) ) { - getLogger().debug( "Path [" + remotePath + + log.debug( "Path [" + remotePath + "] is not part of defined whitelist (skipping transfer from repository [" + remoteRepository.getRepository().getName() + "])." ); return null; @@ -505,7 +504,7 @@ public class DefaultRepositoryProxyConnectors // Is target path part of blacklist? if ( matchesPattern( remotePath, connector.getBlacklist() ) ) { - getLogger().debug( "Path [" + remotePath + "] is part of blacklist (skipping transfer from repository [" + + log.debug( "Path [" + remotePath + "] is part of blacklist (skipping transfer from repository [" + remoteRepository.getRepository().getName() + "])." ); return null; } @@ -520,11 +519,11 @@ public class DefaultRepositoryProxyConnectors String emsg = "Transfer not attempted on " + url + " : " + e.getMessage(); if ( fileExists( localFile ) ) { - getLogger().info( emsg + ": using already present local file." ); + log.info( emsg + ": using already present local file." ); return localFile; } - getLogger().info( emsg ); + log.info( emsg ); return null; } @@ -573,7 +572,7 @@ public class DefaultRepositoryProxyConnectors } catch ( ConnectionException e ) { - getLogger().warn( "Unable to disconnect wagon.", e ); + log.warn( "Unable to disconnect wagon.", e ); } } } @@ -585,7 +584,7 @@ public class DefaultRepositoryProxyConnectors } catch ( PolicyViolationException e ) { - getLogger().info( "Transfer invalidated from " + url + " : " + e.getMessage() ); + log.info( "Transfer invalidated from " + url + " : " + e.getMessage() ); if ( fileExists( localFile ) ) { return localFile; @@ -629,22 +628,22 @@ public class DefaultRepositoryProxyConnectors { File hashFile = new File( localFile.getAbsolutePath() + type ); transferSimpleFile( wagon, remoteRepository, remotePath + type, hashFile ); - getLogger().debug( "Checksum" + type + " Downloaded: " + hashFile ); + log.debug( "Checksum" + type + " Downloaded: " + hashFile ); } catch ( NotFoundException e ) { - getLogger().debug( "Transfer failed, checksum not found: " + url ); + log.debug( "Transfer failed, checksum not found: " + url ); // Consume it, do not pass this on. } catch ( NotModifiedException e ) { - getLogger().debug( "Transfer skipped, checksum not modified: " + url ); + log.debug( "Transfer skipped, checksum not modified: " + url ); // Consume it, do not pass this on. } catch ( ProxyException e ) { urlFailureCache.cacheFailure( url + type ); - getLogger().warn( "Transfer failed on checksum: " + url + " : " + e.getMessage(), e ); + log.warn( "Transfer failed on checksum: " + url + " : " + e.getMessage(), e ); // Critical issue, pass it on. throw e; } @@ -678,7 +677,7 @@ public class DefaultRepositoryProxyConnectors if ( !localFile.exists() ) { - getLogger().debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() ); + log.debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() ); wagon.get( remotePath, temp ); success = true; @@ -688,11 +687,11 @@ public class DefaultRepositoryProxyConnectors } // You wouldn't get here on failure, a WagonException would have been thrown. - getLogger().debug( "Downloaded successfully." ); + log.debug( "Downloaded successfully." ); } else { - getLogger().debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() + log.debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() + " if updated" ); success = wagon.getIfNewer( remotePath, temp, localFile.lastModified() ); if ( !success ) @@ -703,7 +702,7 @@ public class DefaultRepositoryProxyConnectors if ( temp.exists() ) { - getLogger().debug( "Downloaded successfully." ); + log.debug( "Downloaded successfully." ); moveTempToTarget( temp, localFile ); } } @@ -748,14 +747,14 @@ public class DefaultRepositoryProxyConnectors String defaultSetting = policy.getDefaultOption(); String setting = StringUtils.defaultString( (String) settings.get( key ), defaultSetting ); - getLogger().debug( "Applying [" + key + "] policy with [" + setting + "]" ); + log.debug( "Applying [" + key + "] policy with [" + setting + "]" ); try { policy.applyPolicy( setting, request, localFile ); } catch ( PolicyConfigurationException e ) { - getLogger().error( e.getMessage(), e ); + log.error( e.getMessage(), e ); } } } @@ -778,7 +777,7 @@ public class DefaultRepositoryProxyConnectors if ( !temp.renameTo( target ) ) { - getLogger().warn( "Unable to rename tmp file to its final name... resorting to copy command." ); + log.warn( "Unable to rename tmp file to its final name... resorting to copy command." ); try { @@ -821,7 +820,7 @@ public class DefaultRepositoryProxyConnectors if ( StringUtils.isNotBlank( username ) && StringUtils.isNotBlank( password ) ) { - getLogger().debug( "Using username " + username + " to connect to remote repository " + log.debug( "Using username " + username + " to connect to remote repository " + remoteRepository.getURL() ); authInfo = new AuthenticationInfo(); authInfo.setUserName( username ); @@ -829,7 +828,7 @@ public class DefaultRepositoryProxyConnectors } else { - getLogger().debug( "No authentication for remote repository needed" ); + log.debug( "No authentication for remote repository needed" ); } //Convert seconds to milliseconds @@ -851,14 +850,14 @@ public class DefaultRepositoryProxyConnectors } catch ( ConnectionException e ) { - getLogger().warn( + log.warn( "Could not connect to " + remoteRepository.getRepository().getName() + ": " + e.getMessage() ); connected = false; } catch ( AuthenticationException e ) { - getLogger().warn( + log.warn( "Could not connect to " + remoteRepository.getRepository().getName() + ": " + e.getMessage() ); connected = false; @@ -925,6 +924,16 @@ public class DefaultRepositoryProxyConnectors { /* do nothing */ } + + private void logProcess( String managedRepoId, String resource, String event ) + { + + } + + private void logRejection( String managedRepoId, String remoteRepoId, String resource, String reason ) + { + + } private void initConnectorsAndNetworkProxies() { @@ -988,11 +997,11 @@ public class DefaultRepositoryProxyConnectors } catch ( RepositoryNotFoundException e ) { - getLogger().warn( "Unable to use proxy connector: " + e.getMessage(), e ); + log.warn( "Unable to use proxy connector: " + e.getMessage(), e ); } catch ( RepositoryException e ) { - getLogger().warn( "Unable to use proxy connector: " + e.getMessage(), e ); + log.warn( "Unable to use proxy connector: " + e.getMessage(), e ); } } diff --git a/archiva-base/archiva-proxy/src/test/java/org/apache/maven/archiva/proxy/WagonDelegate.java b/archiva-base/archiva-proxy/src/test/java/org/apache/maven/archiva/proxy/WagonDelegate.java index 13e673570..897954438 100644 --- a/archiva-base/archiva-proxy/src/test/java/org/apache/maven/archiva/proxy/WagonDelegate.java +++ b/archiva-base/archiva-proxy/src/test/java/org/apache/maven/archiva/proxy/WagonDelegate.java @@ -19,6 +19,10 @@ package org.apache.maven.archiva.proxy; * under the License. */ +import java.io.File; +import java.io.IOException; +import java.util.List; + import org.apache.commons.io.FileUtils; import org.apache.maven.wagon.ConnectionException; import org.apache.maven.wagon.ResourceDoesNotExistException; @@ -31,11 +35,8 @@ import org.apache.maven.wagon.events.SessionListener; import org.apache.maven.wagon.events.TransferListener; import org.apache.maven.wagon.proxy.ProxyInfo; import org.apache.maven.wagon.repository.Repository; -import org.codehaus.plexus.logging.AbstractLogEnabled; - -import java.io.File; -import java.io.IOException; -import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A dummy wagon implementation @@ -43,9 +44,10 @@ import java.util.List; * @author <a href="mailto:brett@apache.org">Brett Porter</a> */ public class WagonDelegate - extends AbstractLogEnabled implements Wagon { + private Logger log = LoggerFactory.getLogger( WagonDelegate.class ); + private Wagon delegate; private String contentToGet; @@ -53,7 +55,7 @@ public class WagonDelegate public void get( String resourceName, File destination ) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException { - getLogger().debug( ".get(" + resourceName + ", " + destination + ")" ); + log.debug( ".get(" + resourceName + ", " + destination + ")" ); delegate.get( resourceName, destination ); create( destination ); } @@ -61,7 +63,7 @@ public class WagonDelegate public boolean getIfNewer( String resourceName, File destination, long timestamp ) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException { - getLogger().info( ".getIfNewer(" + resourceName + ", " + destination + ", " + timestamp + ")" ); + log.info( ".getIfNewer(" + resourceName + ", " + destination + ", " + timestamp + ")" ); boolean result = delegate.getIfNewer( resourceName, destination, timestamp ); createIfMissing( destination ); diff --git a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/audit/AuditLog.java b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/audit/AuditLog.java index 2dcc30f5a..3118fe597 100644 --- a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/audit/AuditLog.java +++ b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/audit/AuditLog.java @@ -19,7 +19,8 @@ package org.apache.maven.archiva.repository.audit; * under the License. */ -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * AuditLog - Audit Log. @@ -33,7 +34,7 @@ import org.apache.log4j.Logger; public class AuditLog implements AuditListener { - public static final Logger logger = Logger.getLogger( "org.apache.archiva.AuditLog" ); + public static final Logger logger = LoggerFactory.getLogger( "org.apache.archiva.AuditLog" ); private static final char DELIM = ' '; diff --git a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/project/ProjectModelResolverFactory.java b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/project/ProjectModelResolverFactory.java index edea17854..df7bd0fa2 100644 --- a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/project/ProjectModelResolverFactory.java +++ b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/project/ProjectModelResolverFactory.java @@ -19,6 +19,8 @@ package org.apache.maven.archiva.repository.project; * under the License. */ +import java.util.List; + import org.apache.commons.lang.StringUtils; import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ConfigurationNames; @@ -29,13 +31,12 @@ import org.apache.maven.archiva.repository.RepositoryException; import org.apache.maven.archiva.repository.project.resolvers.ManagedRepositoryProjectResolver; import org.apache.maven.archiva.repository.project.resolvers.NopProjectResolver; import org.apache.maven.archiva.repository.project.resolvers.ProjectModelResolverStack; -import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.RegistryListener; - -import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Factory for ProjectModelResolver objects @@ -45,9 +46,10 @@ import java.util.List; * @plexus.component role="org.apache.maven.archiva.repository.project.ProjectModelResolverFactory" */ public class ProjectModelResolverFactory - extends AbstractLogEnabled implements RegistryListener, Initializable { + private Logger log = LoggerFactory.getLogger( ProjectModelResolverFactory.class ); + /** * @plexus.requirement */ @@ -128,7 +130,7 @@ public class ProjectModelResolverFactory } catch ( RepositoryException e ) { - getLogger().warn( e.getMessage(), e ); + log.warn( e.getMessage(), e ); } } diff --git a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/DefaultRepositoryScanner.java b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/DefaultRepositoryScanner.java index edf078fb7..a78ee10ac 100644 --- a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/DefaultRepositoryScanner.java +++ b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/DefaultRepositoryScanner.java @@ -19,6 +19,10 @@ package org.apache.maven.archiva.repository.scanner; * under the License. */ +import java.io.File; +import java.util.ArrayList; +import java.util.List; + import org.apache.commons.collections.CollectionUtils; import org.apache.maven.archiva.configuration.FileTypes; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; @@ -26,12 +30,9 @@ import org.apache.maven.archiva.consumers.InvalidRepositoryContentConsumer; import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.maven.archiva.consumers.RepositoryContentConsumer; import org.apache.maven.archiva.repository.RepositoryException; -import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.util.DirectoryWalker; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DefaultRepositoryScanner @@ -41,9 +42,10 @@ import java.util.List; * @plexus.component role="org.apache.maven.archiva.repository.scanner.RepositoryScanner" */ public class DefaultRepositoryScanner - extends AbstractLogEnabled implements RepositoryScanner { + private Logger log = LoggerFactory.getLogger( DefaultRepositoryScanner.class ); + /** * @plexus.requirement */ @@ -112,7 +114,7 @@ public class DefaultRepositoryScanner // Setup the Scan Instance RepositoryScannerInstance scannerInstance = new RepositoryScannerInstance( repository, knownContentConsumers, - invalidContentConsumers, getLogger(), changesSince ); + invalidContentConsumers, changesSince ); dirWalker.addDirectoryWalkListener( scannerInstance ); diff --git a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/RepositoryScannerInstance.java b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/RepositoryScannerInstance.java index b4fa4b1e1..9c0f38680 100644 --- a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/RepositoryScannerInstance.java +++ b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/RepositoryScannerInstance.java @@ -19,6 +19,9 @@ package org.apache.maven.archiva.repository.scanner; * under the License. */ +import java.io.File; +import java.util.List; + import org.apache.commons.collections.Closure; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.functors.IfClosure; @@ -30,12 +33,9 @@ import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.maven.archiva.repository.scanner.functors.ConsumerProcessFileClosure; import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate; import org.apache.maven.archiva.repository.scanner.functors.TriggerBeginScanClosure; -import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.util.DirectoryWalkListener; - -import java.io.File; -import java.util.List; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * RepositoryScannerInstance * @@ -45,6 +45,8 @@ import java.util.List; public class RepositoryScannerInstance implements DirectoryWalkListener { + private Logger log = LoggerFactory.getLogger( RepositoryScannerInstance.class ); + /** * Consumers that process known content. */ @@ -65,24 +67,21 @@ public class RepositoryScannerInstance private ConsumerWantsFilePredicate consumerWantsFile; - private Logger logger; - public RepositoryScannerInstance( ManagedRepositoryConfiguration repository, List<KnownRepositoryContentConsumer> knownConsumerList, - List<InvalidRepositoryContentConsumer> invalidConsumerList, Logger logger ) + List<InvalidRepositoryContentConsumer> invalidConsumerList ) { this.repository = repository; this.knownConsumers = knownConsumerList; this.invalidConsumers = invalidConsumerList; - this.logger = logger; - this.consumerProcessFile = new ConsumerProcessFileClosure( logger ); + this.consumerProcessFile = new ConsumerProcessFileClosure(); this.consumerWantsFile = new ConsumerWantsFilePredicate(); stats = new RepositoryScanStatistics(); stats.setRepositoryId( repository.getId() ); - Closure triggerBeginScan = new TriggerBeginScanClosure( repository, logger ); + Closure triggerBeginScan = new TriggerBeginScanClosure( repository ); CollectionUtils.forAllDo( knownConsumerList, triggerBeginScan ); CollectionUtils.forAllDo( invalidConsumerList, triggerBeginScan ); @@ -95,10 +94,9 @@ public class RepositoryScannerInstance public RepositoryScannerInstance( ManagedRepositoryConfiguration repository, List<KnownRepositoryContentConsumer> knownContentConsumers, - List<InvalidRepositoryContentConsumer> invalidContentConsumers, Logger logger, - long changesSince ) + List<InvalidRepositoryContentConsumer> invalidContentConsumers, long changesSince ) { - this( repository, knownContentConsumers, invalidContentConsumers, logger ); + this( repository, knownContentConsumers, invalidContentConsumers ); consumerWantsFile.setChangesSince( changesSince ); @@ -112,13 +110,13 @@ public class RepositoryScannerInstance public void directoryWalkStarting( File basedir ) { - logger.info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getLocation() ); + log.info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getLocation() ); stats.triggerStart(); } public void directoryWalkStep( int percentage, File file ) { - logger.debug( "Walk Step: " + percentage + ", " + file ); + log.debug( "Walk Step: " + percentage + ", " + file ); stats.increaseFileCount(); @@ -146,7 +144,7 @@ public class RepositoryScannerInstance public void directoryWalkFinished() { - logger.info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getLocation() ); + log.info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getLocation() ); stats.triggerFinished(); } @@ -155,6 +153,6 @@ public class RepositoryScannerInstance */ public void debug( String message ) { - logger.debug( "Repository Scanner: " + message ); + log.debug( "Repository Scanner: " + message ); } } diff --git a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/functors/TriggerBeginScanClosure.java b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/functors/TriggerBeginScanClosure.java index d1e85ab72..26d15a380 100644 --- a/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/functors/TriggerBeginScanClosure.java +++ b/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/functors/TriggerBeginScanClosure.java @@ -23,7 +23,8 @@ import org.apache.commons.collections.Closure; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; import org.apache.maven.archiva.consumers.ConsumerException; import org.apache.maven.archiva.consumers.RepositoryContentConsumer; -import org.codehaus.plexus.logging.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * TriggerBeginScanClosure @@ -34,14 +35,13 @@ import org.codehaus.plexus.logging.Logger; public class TriggerBeginScanClosure implements Closure { + private Logger log = LoggerFactory.getLogger( TriggerBeginScanClosure.class ); + private ManagedRepositoryConfiguration repository; - private Logger logger; - - public TriggerBeginScanClosure( ManagedRepositoryConfiguration repository, Logger logger ) + public TriggerBeginScanClosure( ManagedRepositoryConfiguration repository ) { this.repository = repository; - this.logger = logger; } public void execute( Object input ) @@ -56,7 +56,7 @@ public class TriggerBeginScanClosure } catch ( ConsumerException e ) { - logger.warn( "Consumer [" + consumer.getId() + "] cannot begin: " + e.getMessage(), e ); + log.warn( "Consumer [" + consumer.getId() + "] cannot begin: " + e.getMessage(), e ); } } } diff --git a/archiva-base/archiva-transaction/src/main/java/org/apache/maven/archiva/transaction/AbstractTransactionEvent.java b/archiva-base/archiva-transaction/src/main/java/org/apache/maven/archiva/transaction/AbstractTransactionEvent.java index 5764d8e97..b8e08f0eb 100644 --- a/archiva-base/archiva-transaction/src/main/java/org/apache/maven/archiva/transaction/AbstractTransactionEvent.java +++ b/archiva-base/archiva-transaction/src/main/java/org/apache/maven/archiva/transaction/AbstractTransactionEvent.java @@ -21,7 +21,6 @@ package org.apache.maven.archiva.transaction; import org.codehaus.plexus.digest.Digester; import org.codehaus.plexus.digest.DigesterException; -import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; @@ -43,7 +42,6 @@ import java.util.Map; * @version $Id$ */ public abstract class AbstractTransactionEvent - extends AbstractLogEnabled implements TransactionEvent { private Map backups = new HashMap(); |