*/
private I18N i18n;
- public void convert( Artifact artifact,
- ArtifactRepository targetRepository,
- ArtifactReporter reporter )
+ public void convert( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter )
throws RepositoryConversionException
{
if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
return metadata;
}
- private void updateMetadata( RepositoryMetadata artifactMetadata,
- ArtifactRepository sourceRepository,
- ArtifactRepository targetRepository,
- Metadata newMetadata,
+ private void updateMetadata( RepositoryMetadata artifactMetadata, ArtifactRepository sourceRepository,
+ ArtifactRepository targetRepository, Metadata newMetadata,
FileTransaction transaction )
throws RepositoryConversionException
{
Metadata metadata;
- boolean changed = false;
+ boolean changed;
//merge with target repository metadata
File file = new File( targetRepository.getBasedir(),
if ( srcfile.exists() )
{
Metadata sourceMetadata = readMetadata( srcfile );
- changed = changed | metadata.merge( sourceMetadata );
+ changed = changed || metadata.merge( sourceMetadata );
}
}
return metadata;
}
- private boolean validateMetadata( Artifact artifact,
- ArtifactReporter reporter )
+ private boolean validateMetadata( Artifact artifact, ArtifactReporter reporter )
throws RepositoryConversionException
{
ArtifactRepository repository = artifact.getRepository();
return result;
}
- private boolean validateMetadata( Metadata metadata,
- RepositoryMetadata repositoryMetadata,
- Artifact artifact,
+ private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
ArtifactReporter reporter )
{
String groupIdKey;
return result;
}
- private boolean copyPom( Artifact artifact,
- ArtifactRepository targetRepository,
- ArtifactReporter reporter,
+ private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
FileTransaction transaction )
throws RepositoryConversionException
{
return result;
}
- private boolean doRelocation( Artifact artifact,
- org.apache.maven.model.v3_0_0.Model v3Model,
- ArtifactRepository repository,
- FileTransaction transaction )
+ private boolean doRelocation( Artifact artifact, org.apache.maven.model.v3_0_0.Model v3Model,
+ ArtifactRepository repository, FileTransaction transaction )
throws IOException
{
Properties properties = v3Model.getProperties();
}
}
- private void writeRelocationPom( String groupId,
- String artifactId,
- String version,
- String newGroupId,
- String newArtifactId,
- String newVersion,
- String message,
- ArtifactRepository repository,
- FileTransaction transaction )
+ private void writeRelocationPom( String groupId, String artifactId, String version, String newGroupId,
+ String newArtifactId, String newVersion, String message,
+ ArtifactRepository repository, FileTransaction transaction )
throws IOException
{
Model pom = new Model();
transaction.createFile( strWriter.toString(), pomFile );
}
- private String getI18NString( String key,
- String arg0 )
+ private String getI18NString( String key, String arg0 )
{
return i18n.format( getClass().getName(), Locale.getDefault(), key, arg0 );
}
return i18n.getString( getClass().getName(), Locale.getDefault(), key );
}
- private boolean testChecksums( Artifact artifact,
- File file,
- ArtifactReporter reporter )
- throws IOException, RepositoryConversionException
+ private boolean testChecksums( Artifact artifact, File file, ArtifactReporter reporter )
+ throws IOException
{
- boolean result;
- result = verifyChecksum( file, file.getName() + ".md5", Digester.MD5, reporter, artifact,
- "failure.incorrect.md5" );
+ boolean result =
+ verifyChecksum( file, file.getName() + ".md5", Digester.MD5, reporter, artifact, "failure.incorrect.md5" );
result = result && verifyChecksum( file, file.getName() + ".sha1", Digester.SHA1, reporter, artifact,
"failure.incorrect.sha1" );
return result;
}
- private boolean verifyChecksum( File file,
- String fileName,
- String algorithm,
- ArtifactReporter reporter,
- Artifact artifact,
- String key )
+ private boolean verifyChecksum( File file, String fileName, String algorithm, ArtifactReporter reporter,
+ Artifact artifact, String key )
throws IOException
{
boolean result = true;
return result;
}
- private boolean copyArtifact( Artifact artifact,
- ArtifactRepository targetRepository,
- ArtifactReporter reporter,
+ private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
FileTransaction transaction )
throws RepositoryConversionException
{
return result;
}
- public void convert( List artifacts,
- ArtifactRepository targetRepository,
- ArtifactReporter reporter )
+ public void convert( List artifacts, ArtifactRepository targetRepository, ArtifactReporter reporter )
throws RepositoryConversionException
{
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
package org.apache.maven.repository;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.apache.maven.repository.converter.RepositoryConversionException;
import org.apache.maven.repository.converter.RepositoryConverter;
import org.apache.maven.repository.discovery.ArtifactDiscoverer;
import org.apache.maven.repository.reporting.ArtifactReporter;
import java.io.File;
-import java.util.List;
import java.net.MalformedURLException;
+import java.util.List;
/**
* @author Jason van Zyl
*/
private ArtifactReporter reporter;
- public void convertLegacyRepository( File legacyRepositoryDirectory,
- File repositoryDirectory,
+ public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
boolean includeSnapshots )
throws RepositoryConversionException
{
/**
* Return a list of artifacts found in a specified repository
*
- * @param repository The ArtifactRepository to discover artifacts
+ * @param repository The ArtifactRepository to discover artifacts
* @param blacklistedPatterns Comma-delimited list of string paths that will be excluded in the discovery
- * @param includeSnapshots if the repository contains snapshots which should also be included
+ * @param includeSnapshots if the repository contains snapshots which should also be included
* @return list of artifacts
*/
public List discoverArtifacts( ArtifactRepository repository, String blacklistedPatterns, boolean includeSnapshots )
{
String path = artifactPaths[i];
- Artifact artifact;
try
{
- artifact = buildArtifactFromPath( path, repository );
+ Artifact artifact = buildArtifactFromPath( path, repository );
if ( includeSnapshots || !artifact.isSnapshot() )
{
/**
* Returns a list of pom packaging artifacts found in a specified repository
*
- * @param repository The ArtifactRepository to discover artifacts
+ * @param repository The ArtifactRepository to discover artifacts
* @param blacklistedPatterns Comma-delimited list of string paths that will be excluded in the discovery
- * @param includeSnapshots if the repository contains snapshots which should also be included
+ * @param includeSnapshots if the repository contains snapshots which should also be included
* @return list of pom artifacts
*/
public List discoverStandalonePoms( ArtifactRepository repository, String blacklistedPatterns,
/**
* Returns an artifact object that is represented by the specified path in a repository
*
- * @param path The path that is pointing to an artifact
+ * @param path The path that is pointing to an artifact
* @param repository The repository of the artifact
* @return Artifact
* @throws DiscovererException when the specified path does correspond to an artifact
Collections.reverse( pathParts );
- Artifact artifact = null;
+ Artifact artifact;
if ( pathParts.size() >= 4 )
{
// maven 2.x path
Artifact result;
if ( classifier == null )
{
- result = artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME,
- type );
+ result =
+ artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
}
else
{
- result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
- classifier );
+ result =
+ artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
}
if ( result.isSnapshot() )
{
classifier = remainingFilename.substring( classifierIndex + 1 );
remainingFilename = remainingFilename.substring( 0, classifierIndex );
- result = artifactFactory.createArtifactWithClassifier( groupId, artifactId,
- remainingFilename, type,
- classifier );
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
+ type, classifier );
}
else
{
}
else if ( !result.getBaseVersion().equals( version ) )
{
- throw new DiscovererException( "Built snapshot artifact base version does not match " +
- "path version" );
+ throw new DiscovererException(
+ "Built snapshot artifact base version does not match " + "path version" );
}
else
{
*/
public class DiscovererPath
{
- private String path;
- private String comment;
+ /**
+ * The path discovered.
+ */
+ private final String path;
- public DiscovererPath()
- {
- }
+ /**
+ * A comment about why the path is being processed.
+ */
+ private final String comment;
public DiscovererPath( String path, String comment )
{
- setPath( path );
- setComment( comment );
+ this.path = path;
+ this.comment = comment;
}
public String getPath()
return path;
}
- public void setPath( String path )
- {
- this.path = path;
- }
-
public String getComment()
{
return comment;
}
-
- public void setComment( String comment )
- {
- this.comment = comment;
- }
}
import org.apache.lucene.analysis.CharTokenizer;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
-import org.apache.lucene.document.Document;
import org.apache.maven.artifact.repository.ArtifactRepository;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.util.Collection;
-import java.util.List;
-import java.util.Iterator;
import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
import java.util.zip.ZipEntry;
/**
}
catch ( IOException e )
{
- throw new RepositoryIndexException( "Failed to validate index path: " +
- getIndexPath().getAbsolutePath(), e );
+ throw new RepositoryIndexException( "Failed to validate index path: " + indexPath.getAbsolutePath(), e );
}
}
/**
* @see RepositoryIndex#validate()
*/
- public void validate()
+ public final void validate()
throws RepositoryIndexException, IOException
{
if ( indexExists() )
}
/**
- * @see RepositoryIndex#deleteDocuments(java.util.List)
+ * @see RepositoryIndex#deleteDocuments(java.util.List)
*/
public void deleteDocuments( List termList )
throws RepositoryIndexException, IOException
/**
* @author Edwin Punzalan
+ * @todo can we move all of these constants out of the interface? Perhaps they should be an enumerated type?
*/
public interface RepositoryIndex
{
- static final String POM = "POM";
+ String POM = "POM";
- static final String METADATA = "METADATA";
+ String METADATA = "METADATA";
- static final String ARTIFACT = "ARTIFACT";
+ String ARTIFACT = "ARTIFACT";
- static final String FLD_ID = "id";
+ String FLD_ID = "id";
- static final String FLD_NAME = "name";
+ String FLD_NAME = "name";
- static final String FLD_DOCTYPE = "doctype";
+ String FLD_DOCTYPE = "doctype";
- static final String FLD_GROUPID = "groupId";
+ String FLD_GROUPID = "groupId";
- static final String FLD_ARTIFACTID = "artifactId";
+ String FLD_ARTIFACTID = "artifactId";
- static final String FLD_VERSION = "version";
+ String FLD_VERSION = "version";
- static final String FLD_PACKAGING = "packaging";
+ String FLD_PACKAGING = "packaging";
- static final String FLD_SHA1 = "sha1";
+ String FLD_SHA1 = "sha1";
- static final String FLD_MD5 = "md5";
+ String FLD_MD5 = "md5";
- static final String FLD_LASTUPDATE = "last update";
+ String FLD_LASTUPDATE = "last update";
- static final String FLD_PLUGINPREFIX = "plugin prefix";
+ String FLD_PLUGINPREFIX = "plugin prefix";
- static final String FLD_CLASSES = "class";
+ String FLD_CLASSES = "class";
- static final String FLD_PACKAGES = "package";
+ String FLD_PACKAGES = "package";
- static final String FLD_FILES = "file";
+ String FLD_FILES = "file";
- static final String FLD_LICENSE_URLS = "license url";
+ String FLD_LICENSE_URLS = "license url";
- static final String FLD_DEPENDENCIES = "dependency";
+ String FLD_DEPENDENCIES = "dependency";
- static final String FLD_PLUGINS_BUILD = "build plugin";
+ String FLD_PLUGINS_BUILD = "build plugin";
- static final String FLD_PLUGINS_REPORT = "report plugin";
+ String FLD_PLUGINS_REPORT = "report plugin";
- static final String FLD_PLUGINS_ALL = "plugins_all";
+ String FLD_PLUGINS_ALL = "plugins_all";
- static final String[] FIELDS = {FLD_ID, FLD_NAME, FLD_DOCTYPE, FLD_GROUPID, FLD_ARTIFACTID, FLD_VERSION, FLD_PACKAGING, FLD_SHA1,
+ String[] FIELDS = {FLD_ID, FLD_NAME, FLD_DOCTYPE, FLD_GROUPID, FLD_ARTIFACTID, FLD_VERSION, FLD_PACKAGING, FLD_SHA1,
FLD_MD5, FLD_LASTUPDATE, FLD_PLUGINPREFIX, FLD_CLASSES, FLD_PACKAGES, FLD_FILES, FLD_LICENSE_URLS,
FLD_DEPENDENCIES, FLD_PLUGINS_BUILD, FLD_PLUGINS_REPORT, FLD_PLUGINS_ALL};
- static final List KEYWORD_FIELDS = Arrays.asList( new String[]{FLD_ID, FLD_PACKAGING, FLD_LICENSE_URLS, FLD_DEPENDENCIES,
+ List KEYWORD_FIELDS = Arrays.asList( new String[]{FLD_ID, FLD_PACKAGING, FLD_LICENSE_URLS, FLD_DEPENDENCIES,
FLD_PLUGINS_BUILD, FLD_PLUGINS_REPORT, FLD_PLUGINS_ALL} );
- static final String[] MODEL_FIELDS = {FLD_PACKAGING, FLD_LICENSE_URLS, FLD_DEPENDENCIES, FLD_PLUGINS_BUILD, FLD_PLUGINS_REPORT};
+ String[] MODEL_FIELDS = {FLD_PACKAGING, FLD_LICENSE_URLS, FLD_DEPENDENCIES, FLD_PLUGINS_BUILD, FLD_PLUGINS_REPORT};
ArtifactRepository getRepository();
*
* @throws RepositoryIndexException if the given indexPath is not valid for this type of RepositoryIndex
*/
- public void validate()
+ void validate()
throws RepositoryIndexException, IOException;
/**
* @param docList List of Lucene Documents
* @throws RepositoryIndexException when an error occurred during the indexing of the documents
*/
- public void addDocuments( List docList )
+ void addDocuments( List docList )
throws RepositoryIndexException;
/**
* @throws RepositoryIndexException
* @throws IOException
*/
- public void deleteDocuments( List termList )
+ void deleteDocuments( List termList )
throws RepositoryIndexException, IOException;
}
validateDirectories();
}
- private void validateRemoteRepo( )
+ private void validateRemoteRepo()
throws ValidationException
{
//Verify remote repository set
private void validateDirectories()
throws ValidationException
{
- File f = new File( getRepositoryCachePath() );
+ File f = new File( cachePath );
if ( !f.exists() )
{
throw new ValidationException( "Specified directory does not exist: " + f.getAbsolutePath() );
*/
public void put( Object key, Object value )
{
- Object old = null;
-
// remove and put: this promotes it to the top since we use a linked hash map
synchronized ( cache )
{
if ( cache.containsKey( key ) )
{
- old = cache.remove( key );
+ cache.remove( key );
}
cache.put( key, value );
import java.io.File;
import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
-import java.io.FileNotFoundException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.regex.Matcher;
throw new DigesterException( "Specified algorithm not found: " + algorithm, e );
}
- InputStream fis = null;
+ InputStream fis;
try
{
fis = new FileInputStream( file );
}
while ( numRead != -1 );
}
- catch( IOException e )
+ catch ( IOException e )
{
throw new DigesterException( "Failed to read from file: " + file.getAbsolutePath(), e );
}
* limitations under the License.\r
*/\r
\r
-import com.opensymphony.xwork.ActionSupport;\r
import com.opensymphony.webwork.interceptor.ParameterAware;\r
+import com.opensymphony.xwork.ActionSupport;\r
import org.apache.maven.repository.configuration.Configuration;\r
import org.apache.maven.repository.manager.web.execution.DiscovererExecution;\r
import org.apache.maven.repository.manager.web.job.DiscovererScheduler;\r
import org.apache.maven.repository.manager.web.utils.ConfigurationManager;\r
\r
import java.io.File;\r
-import java.util.Map;\r
import java.util.HashMap;\r
+import java.util.Map;\r
\r
/**\r
* This is the Action class of index.jsp, which is the initial page of the web application.\r
parameters.put( ConfigurationManager.INDEXPATH, config.getIndexPath() );\r
parameters.put( ConfigurationManager.MIN_INDEXPATH, config.getMinimalIndexPath() );\r
parameters.put( ConfigurationManager.DISCOVERY_BLACKLIST_PATTERNS, config.getDiscoveryBlackListPatterns() );\r
- parameters.put( ConfigurationManager.DISCOVER_SNAPSHOTS, new Boolean( config.isDiscoverSnapshots() ) );\r
+ parameters.put( ConfigurationManager.DISCOVER_SNAPSHOTS, Boolean.valueOf( config.isDiscoverSnapshots() ) );\r
parameters.put( ConfigurationManager.DISCOVERY_CRON_EXPRESSION, config.getDiscoveryCronExpression() );\r
- setParameters( parameters );\r
+ this.parameters = parameters;\r
\r
//Configuration configuration = new Configuration(); // TODO!\r
execution.executeDiscovererIfIndexDoesNotExist( new File( config.getIndexPath() ) );\r
package org.apache.maven.repository.manager.web.action;
/*
- * Copyright 2006 The Apache Software Foundation.
+ * Copyright 2005-2006 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* limitations under the License.
*/
-import com.opensymphony.xwork.Action;
import com.opensymphony.webwork.interceptor.ParameterAware;
+import com.opensymphony.xwork.Action;
+import org.apache.maven.repository.manager.web.utils.ConfigurationManager;
-import java.util.Map;
+import java.io.IOException;
import java.util.HashMap;
-
-import org.apache.maven.repository.manager.web.utils.ConfigurationManager;
+import java.util.Map;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* Method that is executed when the action is invoked.
*
* @return a String that specifies where to go to next
- * @throws Exception
+ * @throws IOException
*/
public String execute()
- throws Exception
+ throws IOException
{
String[] indexPath = (String[]) parameters.get( ConfigurationManager.INDEXPATH );
Map map = new HashMap();
package org.apache.maven.repository.manager.web.action;\r
\r
-import com.opensymphony.xwork.Action;\r
+/*\r
+ * Copyright 2005-2006 The Apache Software Foundation.\r
+ *\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ * http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ */\r
+\r
import com.opensymphony.webwork.interceptor.ParameterAware;\r
+import com.opensymphony.xwork.Action;\r
+import org.apache.maven.repository.manager.web.utils.ConfigurationManager;\r
\r
-import java.util.Map;\r
import java.util.HashMap;\r
-\r
-import org.apache.maven.repository.manager.web.utils.ConfigurationManager;\r
+import java.util.Map;\r
\r
/**\r
* @author <a href="mailto:aramirez@apache.org">Allan Ramirez</a>\r
- *\r
* @plexus.component role="com.opensymphony.xwork.Action" role-hint="org.apache.maven.repository.manager.web.action.SchedulerConfigurationAction"\r
*/\r
public class SchedulerConfigurationAction\r
*/\r
public String execute()\r
{\r
- Map map;\r
try\r
{\r
- map = new HashMap();\r
+ Map map = new HashMap();\r
\r
String[] cronExpression = (String[]) parameters.get( ConfigurationManager.DISCOVERY_CRON_EXPRESSION );\r
\r
- if( cronExpression[0] != null )\r
+ if ( cronExpression[0] != null )\r
{\r
map.put( ConfigurationManager.DISCOVERY_CRON_EXPRESSION, cronExpression[0] );\r
\r
return ERROR;\r
}\r
}\r
- catch( Exception e )\r
+ catch ( Exception e )\r
{\r
+ // TODO: fix error handling!\r
e.printStackTrace();\r
return ERROR;\r
}\r
* @param indexPath the path to the index file
*/
protected void indexMetadata( List metadataList, File indexPath, ArtifactRepository repository )
- throws RepositoryIndexException, MalformedURLException
+ throws RepositoryIndexException
{
MetadataRepositoryIndex metadataIndex = indexFactory.createMetadataRepositoryIndex( indexPath, repository );
metadataIndex.indexMetadata( metadataList );
*/
import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.io.xpp3.ConfigurationXpp3Writer;
import org.apache.maven.repository.configuration.io.xpp3.ConfigurationXpp3Reader;
+import org.apache.maven.repository.configuration.io.xpp3.ConfigurationXpp3Writer;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
-import java.io.Writer;
import java.io.Reader;
-import java.io.FileReader;
-import java.io.FileNotFoundException;
+import java.io.Writer;
import java.net.URL;
import java.util.Iterator;
import java.util.Map;
}
if ( name.equals( DISCOVER_SNAPSHOTS ) )
{
- config.setDiscoverSnapshots( Boolean.getBoolean( value ) );
+ config.setDiscoverSnapshots( Boolean.valueOf( value ).booleanValue() );
}
if ( name.equals( REPOSITORY_DIRECTORY ) )
{
public Configuration getConfiguration()
throws IOException
{
- Map map = null;
File file = getConfigFile();
config = new Configuration();
}
catch ( XmlPullParserException xe )
{
+ // TODO: fix error handling!
xe.printStackTrace();
}
}