1 package org.apache.archiva.reports.consumers;
4 * Licensed to the Apache Software Foundation (ASF) under one
5 * or more contributor license agreements. See the NOTICE file
6 * distributed with this work for additional information
7 * regarding copyright ownership. The ASF licenses this file
8 * to you under the Apache License, Version 2.0 (the
9 * "License"); you may not use this file except in compliance
10 * with the License. You may obtain a copy of the License at
12 * http://www.apache.org/licenses/LICENSE-2.0
14 * Unless required by applicable law or agreed to in writing,
15 * software distributed under the License is distributed on an
16 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17 * KIND, either express or implied. See the License for the
18 * specific language governing permissions and limitations
22 import org.apache.archiva.checksum.ChecksumAlgorithm;
23 import org.apache.archiva.checksum.ChecksummedFile;
24 import org.apache.archiva.metadata.model.ArtifactMetadata;
25 import org.apache.archiva.metadata.repository.MetadataRepository;
26 import org.apache.archiva.metadata.repository.MetadataRepositoryException;
27 import org.apache.archiva.metadata.repository.RepositorySession;
28 import org.apache.archiva.metadata.repository.RepositorySessionFactory;
29 import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
30 import org.apache.archiva.reports.RepositoryProblemFacet;
31 import org.apache.commons.collections.CollectionUtils;
32 import org.apache.maven.archiva.configuration.ArchivaConfiguration;
33 import org.apache.maven.archiva.configuration.ConfigurationNames;
34 import org.apache.maven.archiva.configuration.FileTypes;
35 import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
36 import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
37 import org.apache.maven.archiva.consumers.ConsumerException;
38 import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
39 import org.codehaus.plexus.registry.Registry;
40 import org.codehaus.plexus.registry.RegistryListener;
41 import org.slf4j.Logger;
42 import org.slf4j.LoggerFactory;
43 import org.springframework.context.annotation.Scope;
44 import org.springframework.stereotype.Service;
46 import javax.annotation.PostConstruct;
47 import javax.inject.Inject;
48 import javax.inject.Named;
50 import java.io.IOException;
51 import java.util.ArrayList;
52 import java.util.Collections;
53 import java.util.Date;
54 import java.util.List;
57 * Search the database of known SHA1 Checksums for potential duplicate artifacts.
59 * TODO: no need for this to be a scanner - we can just query the database / content repository to get a full list
62 * plexus.component role="org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer"
63 * role-hint="duplicate-artifacts"
64 * instantiation-strategy="per-lookup"
66 @Service( "knownRepositoryContentConsumer#duplicate-artifacts" )
68 public class DuplicateArtifactsConsumer
69 extends AbstractMonitoredConsumer
70 implements KnownRepositoryContentConsumer, RegistryListener
72 private Logger log = LoggerFactory.getLogger( DuplicateArtifactsConsumer.class );
75 * plexus.configuration default-value="duplicate-artifacts"
77 private String id = "duplicate-artifacts";
80 * plexus.configuration default-value="Check for Duplicate Artifacts via SHA1 Checksums"
82 private String description = "Check for Duplicate Artifacts via SHA1 Checksums";
88 private ArchivaConfiguration configuration;
94 private FileTypes filetypes;
97 * FIXME: can be of other types
102 private RepositorySessionFactory repositorySessionFactory;
104 private List<String> includes = new ArrayList<String>();
106 private File repositoryDir;
108 private String repoId;
111 * FIXME: needs to be selected based on the repository in question
113 * plexus.requirement role-hint="maven2"
116 @Named( value = "repositoryPathTranslator#maven2" )
117 private RepositoryPathTranslator pathTranslator;
119 private RepositorySession repositorySession;
121 public String getId()
126 public String getDescription()
131 public boolean isPermanent()
136 public List<String> getIncludes()
141 public List<String> getExcludes()
143 return Collections.emptyList();
146 public void beginScan( ManagedRepositoryConfiguration repo, Date whenGathered )
147 throws ConsumerException
149 repoId = repo.getId();
150 this.repositoryDir = new File( repo.getLocation() );
151 repositorySession = repositorySessionFactory.createSession();
154 public void beginScan( ManagedRepositoryConfiguration repo, Date whenGathered, boolean executeOnEntireRepo )
155 throws ConsumerException
157 beginScan( repo, whenGathered );
160 public void processFile( String path )
161 throws ConsumerException
163 File artifactFile = new File( this.repositoryDir, path );
165 // TODO: would be quicker to somehow make sure it ran after the update database consumer, or as a part of that
166 // perhaps could use an artifact context that is retained for all consumers? First in can set the SHA-1
167 // alternatively this could come straight from the storage resolver, which could populate the artifact metadata
168 // in the later parse call with the desired checksum and use that
170 ChecksummedFile checksummedFile = new ChecksummedFile( artifactFile );
173 checksumSha1 = checksummedFile.calculateChecksum( ChecksumAlgorithm.SHA1 );
175 catch ( IOException e )
177 throw new ConsumerException( e.getMessage(), e );
180 MetadataRepository metadataRepository = repositorySession.getRepository();
182 List<ArtifactMetadata> results;
185 results = metadataRepository.getArtifactsByChecksum( repoId, checksumSha1 );
187 catch ( MetadataRepositoryException e )
189 throw new ConsumerException( e.getMessage(), e );
192 if ( CollectionUtils.isNotEmpty( results ) )
194 ArtifactMetadata originalArtifact;
197 originalArtifact = pathTranslator.getArtifactForPath( repoId, path );
199 catch ( Exception e )
201 log.warn( "Not reporting problem for invalid artifact in checksum check: " + e.getMessage() );
205 for ( ArtifactMetadata dupArtifact : results )
207 String id = path.substring( path.lastIndexOf( "/" ) + 1 );
208 if ( dupArtifact.getId().equals( id )
209 && dupArtifact.getNamespace().equals( originalArtifact.getNamespace() )
210 && dupArtifact.getProject().equals( originalArtifact.getProject() )
211 && dupArtifact.getVersion().equals( originalArtifact.getVersion() ) )
213 // Skip reference to itself.
214 if ( log.isDebugEnabled() )
216 log.debug( "Not counting duplicate for artifact " + dupArtifact + " for path " + path );
221 RepositoryProblemFacet problem = new RepositoryProblemFacet();
222 problem.setRepositoryId( repoId );
223 problem.setNamespace( originalArtifact.getNamespace() );
224 problem.setProject( originalArtifact.getProject() );
225 problem.setVersion( originalArtifact.getVersion() );
227 // FIXME: need to get the right storage resolver for the repository the dupe artifact is in, it might be
229 // FIXME: we need the project version here, not the artifact version
230 problem.setMessage( "Duplicate Artifact Detected: " + path + " <--> " + pathTranslator.toPath(
231 dupArtifact.getNamespace(), dupArtifact.getProject(), dupArtifact.getVersion(),
232 dupArtifact.getId() ) );
233 problem.setProblem( "duplicate-artifact" );
237 metadataRepository.addMetadataFacet( repoId, problem );
239 catch ( MetadataRepositoryException e )
241 throw new ConsumerException( e.getMessage(), e );
247 public void processFile( String path, boolean executeOnEntireRepo )
248 throws ConsumerException
253 public void completeScan()
255 repositorySession.close();
258 public void completeScan( boolean executeOnEntireRepo )
263 public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
265 if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
271 public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
276 private void initIncludes()
280 includes.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
284 public void initialize()
287 configuration.addChangeListener( this );