--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.apache.maven.archiva</groupId>
+ <artifactId>archiva</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>archiva-indexer</artifactId>
+ <name>Archiva Indexer</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-artifact</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-artifact-manager</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-project</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.codehaus.plexus.cache</groupId>
+ <artifactId>plexus-cache-hashmap</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-model</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-core</artifactId>
+ <version>2.0.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-utils</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-container-default</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-digest</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-repository-metadata</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>cobertura-maven-plugin</artifactId>
+ <configuration>
+ <check>
+ <!-- TODO: increase coverage -->
+ <totalLineRate>80</totalLineRate>
+ <totalBranchRate>80</totalBranchRate>
+ </check>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+</project>
--- /dev/null
+package org.apache.maven.archiva.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.query.Query;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Maintain an artifact index on the repository.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryArtifactIndex
+{
+ /**
+ * Indexes the artifacts found within the specified list of index records. If the artifacts are already in the
+ * repository they are updated.
+ *
+ * @param records the records to index
+ * @throws RepositoryIndexException if there is a problem indexing the records
+ */
+ void indexRecords( Collection records )
+ throws RepositoryIndexException;
+
+ /**
+ * Search the index based on the search criteria specified. Returns a list of index records.
+ *
+ * @param query The query that contains the search criteria
+ * @return the index records found
+ * @throws RepositoryIndexSearchException if there is a problem searching
+ * @todo should it return "SearchResult" instances that contain the index record and other search data (like score?)
+ */
+ List search( Query query )
+ throws RepositoryIndexSearchException;
+
+ /**
+ * Check if the index already exists.
+ *
+ * @return true if the index already exists
+ * @throws RepositoryIndexException if the index location is not valid
+ */
+ boolean exists()
+ throws RepositoryIndexException;
+
+ /**
+ * Delete records from the index. Simply ignore the request any did not exist.
+ *
+ * @param records the records to delete
+ * @throws RepositoryIndexException if there is a problem removing the record
+ */
+ void deleteRecords( Collection records )
+ throws RepositoryIndexException;
+
+ /**
+ * Retrieve all records in the index.
+ *
+ * @return the records
+ * @throws RepositoryIndexSearchException if there was an error searching the index
+ */
+ Collection getAllRecords()
+ throws RepositoryIndexSearchException;
+
+ /**
+ * Retrieve all primary keys of records in the index.
+ *
+ * @return the keys
+ * @throws RepositoryIndexException if there was an error searching the index
+ */
+ Collection getAllRecordKeys()
+ throws RepositoryIndexException;
+
+ /**
+ * Indexes the artifact specified. If the artifact is already in the repository they it is updated.
+ * This method should use less memory than indexRecords as the records can be created and disposed of on the fly.
+ *
+ * @param artifact the artifact to index
+ * @param factory the artifact to record factory
+ * @throws RepositoryIndexException if there is a problem indexing the artifacts
+ */
+ void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+ throws RepositoryIndexException;
+
+ /**
+ * Indexes the artifacts found within the specified list. If the artifacts are already in the
+ * repository they are updated. This method should use less memory than indexRecords as the records can be
+ * created and disposed of on the fly.
+ *
+ * @param artifacts the artifacts to index
+ * @param factory the artifact to record factory
+ * @throws RepositoryIndexException if there is a problem indexing the artifacts
+ */
+ void indexArtifacts( List artifacts, RepositoryIndexRecordFactory factory )
+ throws RepositoryIndexException;
+
+ /**
+ * Get all the group IDs in the index.
+ *
+ * @return list of groups as strings
+ * @throws RepositoryIndexException if there is a problem searching for the group ID
+ */
+ List getAllGroupIds()
+ throws RepositoryIndexException;
+
+ /**
+ * Get the list of artifact IDs in a group in the index.
+ *
+ * @param groupId the group ID to search
+ * @return the list of artifact ID strings
+ * @throws RepositoryIndexSearchException if there is a problem searching for the group ID
+ */
+ List getArtifactIds( String groupId )
+ throws RepositoryIndexSearchException;
+
+ /**
+ * Get the list of available versions for a given artifact.
+ *
+ * @param groupId the group ID to search for
+ * @param artifactId the artifact ID to search for
+ * @return the list of version strings
+ * @throws RepositoryIndexSearchException if there is a problem searching for the artifact
+ */
+ List getVersions( String groupId, String artifactId )
+ throws RepositoryIndexSearchException;
+
+ /**
+ * Get the time when the index was last updated. Note that this does not monitor external processes.
+ *
+ * @return the last updated time, or 0 if it has not been updated since the class was instantiated.
+ */
+ long getLastUpdatedTime();
+}
--- /dev/null
+package org.apache.maven.archiva.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+/**
+ * Obtain an index instance.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryArtifactIndexFactory
+{
+ /**
+ * Plexus role.
+ */
+ String ROLE = RepositoryArtifactIndexFactory.class.getName();
+
+ /**
+ * Method to create an instance of the standard index.
+ *
+ * @param indexPath the path where the index will be created/updated
+ * @return the index instance
+ */
+ RepositoryArtifactIndex createStandardIndex( File indexPath );
+
+ /**
+ * Method to create an instance of the minimal index.
+ *
+ * @param indexPath the path where the index will be created/updated
+ * @return the index instance
+ */
+ RepositoryArtifactIndex createMinimalIndex( File indexPath );
+}
--- /dev/null
+package org.apache.maven.archiva.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * @author Edwin Punzalan
+ */
+public class RepositoryIndexException
+ extends Exception
+{
+ public RepositoryIndexException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+
+ public RepositoryIndexException( String message )
+ {
+ super( message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * @author Brett Porter
+ */
+public class RepositoryIndexSearchException
+ extends Exception
+{
+ public RepositoryIndexSearchException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.lucene.document.Document;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
+
+import java.text.ParseException;
+
+/**
+ * Converts repository records to Lucene documents.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface LuceneIndexRecordConverter
+{
+ /**
+ * Convert an index record to a Lucene document.
+ *
+ * @param record the record
+ * @return the document
+ */
+ Document convert( RepositoryIndexRecord record );
+
+ /**
+ * Convert a Lucene document to an index record.
+ *
+ * @param document the document
+ * @return the record
+ * @throws java.text.ParseException if there is a problem parsing a field (specifically, dates)
+ */
+ RepositoryIndexRecord convert( Document document )
+ throws ParseException;
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.document.DateTools;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumberTools;
+import org.apache.maven.archiva.indexer.record.MinimalArtifactIndexRecord;
+import org.apache.maven.archiva.indexer.record.MinimalIndexRecordFields;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
+
+import java.text.ParseException;
+import java.util.Arrays;
+
+/**
+ * Convert the minimal index record to a Lucene document.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneMinimalIndexRecordConverter
+ implements LuceneIndexRecordConverter
+{
+ public Document convert( RepositoryIndexRecord record )
+ {
+ MinimalArtifactIndexRecord rec = (MinimalArtifactIndexRecord) record;
+
+ Document document = new Document();
+ addTokenizedField( document, MinimalIndexRecordFields.FILENAME, rec.getFilename() );
+ addUntokenizedField( document, MinimalIndexRecordFields.LAST_MODIFIED,
+ DateTools.timeToString( rec.getLastModified(), DateTools.Resolution.SECOND ) );
+ addUntokenizedField( document, MinimalIndexRecordFields.FILE_SIZE, NumberTools.longToString( rec.getSize() ) );
+ addUntokenizedField( document, MinimalIndexRecordFields.MD5, rec.getMd5Checksum() );
+ addTokenizedField( document, MinimalIndexRecordFields.CLASSES,
+ StringUtils.join( rec.getClasses().iterator(), "\n" ) );
+
+ return document;
+ }
+
+ public RepositoryIndexRecord convert( Document document )
+ throws ParseException
+ {
+ MinimalArtifactIndexRecord record = new MinimalArtifactIndexRecord();
+
+ record.setFilename( document.get( MinimalIndexRecordFields.FILENAME ) );
+ record.setLastModified( DateTools.stringToTime( document.get( MinimalIndexRecordFields.LAST_MODIFIED ) ) );
+ record.setSize( NumberTools.stringToLong( document.get( MinimalIndexRecordFields.FILE_SIZE ) ) );
+ record.setMd5Checksum( document.get( MinimalIndexRecordFields.MD5 ) );
+ record.setClasses( Arrays.asList( document.get( MinimalIndexRecordFields.CLASSES ).split( "\n" ) ) );
+
+ return record;
+ }
+
+ private static void addUntokenizedField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
+ }
+ }
+
+ private static void addTokenizedField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.YES, Field.Index.TOKENIZED ) );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.query.Query;
+
+/**
+ * A holder for a lucene query to pass to the indexer API.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneQuery
+ implements Query
+{
+ private final org.apache.lucene.search.Query query;
+
+ public LuceneQuery( org.apache.lucene.search.Query query )
+ {
+ this.query = query;
+ }
+
+ org.apache.lucene.search.Query getLuceneQuery()
+ {
+ return query;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.CharTokenizer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexModifier;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermEnum;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Hits;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexer.query.Query;
+import org.apache.maven.archiva.indexer.record.MinimalIndexRecordFields;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
+import org.apache.maven.artifact.Artifact;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.Reader;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Lucene implementation of a repository index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneRepositoryArtifactIndex
+ implements RepositoryArtifactIndex
+{
+ /**
+ * The location of the index on the file system.
+ */
+ private File indexLocation;
+
+ /**
+ * Convert repository records to Lucene documents.
+ */
+ private LuceneIndexRecordConverter converter;
+
+ private static final String FLD_PK = "pk";
+
+ private static Analyzer luceneAnalyzer = new LuceneAnalyzer();
+
+ private static long lastUpdatedTime = 0;
+
+ public LuceneRepositoryArtifactIndex( File indexPath, LuceneIndexRecordConverter converter )
+ {
+ this.indexLocation = indexPath;
+ this.converter = converter;
+ }
+
+ public void indexRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ deleteRecords( records );
+
+ addRecords( records );
+ }
+
+ private void addRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ IndexWriter indexWriter;
+ try
+ {
+ indexWriter = new IndexWriter( indexLocation, getAnalyzer(), !exists() );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Unable to open index", e );
+ }
+
+ try
+ {
+ for ( Iterator i = records.iterator(); i.hasNext(); )
+ {
+ RepositoryIndexRecord record = (RepositoryIndexRecord) i.next();
+
+ if ( record != null )
+ {
+ Document document = converter.convert( record );
+ document.add(
+ new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+
+ indexWriter.addDocument( document );
+ }
+ }
+
+ indexWriter.optimize();
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Failed to add an index document", e );
+ }
+ finally
+ {
+ closeQuietly( indexWriter );
+ lastUpdatedTime = System.currentTimeMillis();
+ }
+ }
+
+ public static Analyzer getAnalyzer()
+ {
+ return luceneAnalyzer;
+ }
+
+ private static class LuceneAnalyzer
+ extends Analyzer
+ {
+ private static final Analyzer STANDARD = new StandardAnalyzer();
+
+ public TokenStream tokenStream( String field, final Reader reader )
+ {
+ // do not tokenize field called 'element'
+ if ( StandardIndexRecordFields.DEPENDENCIES.equals( field ) )
+ {
+ return new CharTokenizer( reader )
+ {
+ protected boolean isTokenChar( char c )
+ {
+ return c != '\n';
+ }
+ };
+ }
+ else if ( StandardIndexRecordFields.FILES.equals( field ) )
+ {
+ return new CharTokenizer( reader )
+ {
+ protected boolean isTokenChar( char c )
+ {
+ return c != '\n' && c != '/';
+ }
+ };
+ }
+ else
+ if ( StandardIndexRecordFields.CLASSES.equals( field ) || MinimalIndexRecordFields.CLASSES.equals( field ) )
+ {
+ return new CharTokenizer( reader )
+ {
+ protected boolean isTokenChar( char c )
+ {
+ return c != '\n' && c != '.';
+ }
+
+ protected char normalize( char c )
+ {
+ return Character.toLowerCase( c );
+ }
+ };
+ }
+ else if ( StandardIndexRecordFields.GROUPID.equals( field ) )
+ {
+ return new CharTokenizer( reader )
+ {
+ protected boolean isTokenChar( char c )
+ {
+ return c != '.';
+ }
+
+ protected char normalize( char c )
+ {
+ return Character.toLowerCase( c );
+ }
+ };
+ }
+ else if ( StandardIndexRecordFields.VERSION.equals( field ) ||
+ StandardIndexRecordFields.BASE_VERSION.equals( field ) )
+ {
+ return new CharTokenizer( reader )
+ {
+ protected boolean isTokenChar( char c )
+ {
+ return c != '-';
+ }
+ };
+ }
+ else if ( StandardIndexRecordFields.FILENAME.equals( field ) ||
+ MinimalIndexRecordFields.FILENAME.equals( field ) )
+ {
+ return new CharTokenizer( reader )
+ {
+ protected boolean isTokenChar( char c )
+ {
+ return c != '-' && c != '.' && c != '/';
+ }
+ };
+ }
+ else
+ {
+ // use standard analyzer
+ return STANDARD.tokenStream( field, reader );
+ }
+ }
+ }
+
+ public void deleteRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ if ( exists() )
+ {
+ IndexReader indexReader = null;
+ try
+ {
+ indexReader = IndexReader.open( indexLocation );
+
+ for ( Iterator i = records.iterator(); i.hasNext(); )
+ {
+ RepositoryIndexRecord record = (RepositoryIndexRecord) i.next();
+
+ if ( record != null )
+ {
+ Term term = new Term( FLD_PK, record.getPrimaryKey() );
+
+ indexReader.deleteDocuments( term );
+ }
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexReader );
+ }
+ }
+ }
+
+ public Collection getAllRecords()
+ throws RepositoryIndexSearchException
+ {
+ return search( new LuceneQuery( new MatchAllDocsQuery() ) );
+ }
+
+ public Collection getAllRecordKeys()
+ throws RepositoryIndexException
+ {
+ return getAllFieldValues( FLD_PK );
+ }
+
+ private List getAllFieldValues( String fieldName )
+ throws RepositoryIndexException
+ {
+ List keys = new ArrayList();
+
+ if ( exists() )
+ {
+ IndexReader indexReader = null;
+ TermEnum terms = null;
+ try
+ {
+ indexReader = IndexReader.open( indexLocation );
+
+ terms = indexReader.terms( new Term( fieldName, "" ) );
+ while ( fieldName.equals( terms.term().field() ) )
+ {
+ keys.add( terms.term().text() );
+
+ if ( !terms.next() )
+ {
+ break;
+ }
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexReader );
+ closeQuietly( terms );
+ }
+ }
+ return keys;
+ }
+
+ public void indexArtifacts( List artifacts, RepositoryIndexRecordFactory factory )
+ throws RepositoryIndexException
+ {
+ IndexModifier indexModifier = null;
+ try
+ {
+ indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact artifact = (Artifact) i.next();
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ if ( record != null )
+ {
+ Term term = new Term( FLD_PK, record.getPrimaryKey() );
+
+ indexModifier.deleteDocuments( term );
+
+ Document document = converter.convert( record );
+ document.add(
+ new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+
+ indexModifier.addDocument( document );
+ }
+ }
+ indexModifier.optimize();
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexModifier );
+ lastUpdatedTime = System.currentTimeMillis();
+ }
+ }
+
+ public void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+ throws RepositoryIndexException
+ {
+ IndexModifier indexModifier = null;
+ try
+ {
+ indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ if ( record != null )
+ {
+ Term term = new Term( FLD_PK, record.getPrimaryKey() );
+
+ indexModifier.deleteDocuments( term );
+
+ Document document = converter.convert( record );
+ document.add( new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+
+ indexModifier.addDocument( document );
+ }
+ indexModifier.optimize();
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexModifier );
+ lastUpdatedTime = System.currentTimeMillis();
+ }
+ }
+
+ public List getAllGroupIds()
+ throws RepositoryIndexException
+ {
+ return getAllFieldValues( StandardIndexRecordFields.GROUPID_EXACT );
+ }
+
+ public List getArtifactIds( String groupId )
+ throws RepositoryIndexSearchException
+ {
+ return searchField( new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, groupId ) ),
+ StandardIndexRecordFields.ARTIFACTID );
+ }
+
+ public List getVersions( String groupId, String artifactId )
+ throws RepositoryIndexSearchException
+ {
+ BooleanQuery query = new BooleanQuery();
+ query.add( new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, groupId ) ),
+ BooleanClause.Occur.MUST );
+ query.add( new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, artifactId ) ),
+ BooleanClause.Occur.MUST );
+
+ return searchField( query, StandardIndexRecordFields.VERSION );
+ }
+
+ public long getLastUpdatedTime()
+ {
+ return lastUpdatedTime;
+ }
+
+ private List searchField( org.apache.lucene.search.Query luceneQuery, String fieldName )
+ throws RepositoryIndexSearchException
+ {
+ Set results = new LinkedHashSet();
+
+ IndexSearcher searcher;
+ try
+ {
+ searcher = new IndexSearcher( indexLocation.getAbsolutePath() );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexSearchException( "Unable to open index: " + e.getMessage(), e );
+ }
+
+ try
+ {
+ Hits hits = searcher.search( luceneQuery );
+ for ( int i = 0; i < hits.length(); i++ )
+ {
+ Document doc = hits.doc( i );
+
+ results.add( doc.get( fieldName ) );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( searcher );
+ }
+ return new ArrayList( results );
+ }
+
+ public boolean exists()
+ throws RepositoryIndexException
+ {
+ if ( IndexReader.indexExists( indexLocation ) )
+ {
+ return true;
+ }
+ else if ( !indexLocation.exists() )
+ {
+ return false;
+ }
+ else if ( indexLocation.isDirectory() )
+ {
+ if ( indexLocation.listFiles().length > 1 )
+ {
+ throw new RepositoryIndexException( indexLocation + " is not a valid index directory." );
+ }
+ else
+ {
+ return false;
+ }
+ }
+ else
+ {
+ throw new RepositoryIndexException( indexLocation + " is not a directory." );
+ }
+ }
+
+ public List search( Query query )
+ throws RepositoryIndexSearchException
+ {
+ LuceneQuery lQuery = (LuceneQuery) query;
+
+ org.apache.lucene.search.Query luceneQuery = lQuery.getLuceneQuery();
+
+ IndexSearcher searcher;
+ try
+ {
+ searcher = new IndexSearcher( indexLocation.getAbsolutePath() );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexSearchException( "Unable to open index: " + e.getMessage(), e );
+ }
+
+ List records = new ArrayList();
+ try
+ {
+ Hits hits = searcher.search( luceneQuery );
+ for ( int i = 0; i < hits.length(); i++ )
+ {
+ Document doc = hits.doc( i );
+
+ records.add( converter.convert( doc ) );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
+ }
+ catch ( ParseException e )
+ {
+ throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( searcher );
+ }
+
+ return records;
+ }
+
+ private static void closeQuietly( IndexSearcher searcher )
+ {
+ try
+ {
+ if ( searcher != null )
+ {
+ searcher.close();
+ }
+ }
+ catch ( IOException e )
+ {
+ // ignore
+ }
+ }
+
+ private static void closeQuietly( TermEnum terms )
+ throws RepositoryIndexException
+ {
+ if ( terms != null )
+ {
+ try
+ {
+ terms.close();
+ }
+ catch ( IOException e )
+ {
+ // ignore
+ }
+ }
+ }
+
+ private static void closeQuietly( IndexWriter indexWriter )
+ throws RepositoryIndexException
+ {
+ try
+ {
+ if ( indexWriter != null )
+ {
+ indexWriter.close();
+ }
+ }
+ catch ( IOException e )
+ {
+ // write should compain if it can't be closed, data probably not persisted
+ throw new RepositoryIndexException( e.getMessage(), e );
+ }
+ }
+
+ private static void closeQuietly( IndexModifier indexModifier )
+ {
+ if ( indexModifier != null )
+ {
+ try
+ {
+ indexModifier.close();
+ }
+ catch ( IOException e )
+ {
+ // ignore
+ }
+ }
+ }
+
+ private static void closeQuietly( IndexReader reader )
+ {
+ try
+ {
+ if ( reader != null )
+ {
+ reader.close();
+ }
+ }
+ catch ( IOException e )
+ {
+ // ignore
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+
+import java.io.File;
+
+/**
+ * Factory for Lucene artifact index instances.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component role="org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory" role-hint="lucene"
+ */
+public class LuceneRepositoryArtifactIndexFactory
+ implements RepositoryArtifactIndexFactory
+{
+ public RepositoryArtifactIndex createStandardIndex( File indexPath )
+ {
+ return new LuceneRepositoryArtifactIndex( indexPath, new LuceneStandardIndexRecordConverter() );
+ }
+
+ public RepositoryArtifactIndex createMinimalIndex( File indexPath )
+ {
+ return new LuceneRepositoryArtifactIndex( indexPath, new LuceneMinimalIndexRecordConverter() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.document.DateTools;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumberTools;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
+import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
+import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
+
+import java.text.ParseException;
+import java.util.Arrays;
+
+/**
+ * Convert the standard index record to a Lucene document.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneStandardIndexRecordConverter
+ implements LuceneIndexRecordConverter
+{
+ public Document convert( RepositoryIndexRecord record )
+ {
+ StandardArtifactIndexRecord rec = (StandardArtifactIndexRecord) record;
+
+ Document document = new Document();
+ addTokenizedField( document, StandardIndexRecordFields.FILENAME, rec.getFilename() );
+ addTokenizedField( document, StandardIndexRecordFields.GROUPID, rec.getGroupId() );
+ addExactField( document, StandardIndexRecordFields.GROUPID_EXACT, rec.getGroupId() );
+ addTokenizedField( document, StandardIndexRecordFields.ARTIFACTID, rec.getArtifactId() );
+ addExactField( document, StandardIndexRecordFields.ARTIFACTID_EXACT, rec.getArtifactId() );
+ addTokenizedField( document, StandardIndexRecordFields.VERSION, rec.getVersion() );
+ addExactField( document, StandardIndexRecordFields.VERSION_EXACT, rec.getVersion() );
+ addTokenizedField( document, StandardIndexRecordFields.BASE_VERSION, rec.getBaseVersion() );
+ addExactField( document, StandardIndexRecordFields.BASE_VERSION_EXACT, rec.getBaseVersion() );
+ addUntokenizedField( document, StandardIndexRecordFields.TYPE, rec.getType() );
+ addTokenizedField( document, StandardIndexRecordFields.CLASSIFIER, rec.getClassifier() );
+ addUntokenizedField( document, StandardIndexRecordFields.PACKAGING, rec.getPackaging() );
+ addUntokenizedField( document, StandardIndexRecordFields.REPOSITORY, rec.getRepository() );
+ addUntokenizedField( document, StandardIndexRecordFields.LAST_MODIFIED,
+ DateTools.timeToString( rec.getLastModified(), DateTools.Resolution.SECOND ) );
+ addUntokenizedField( document, StandardIndexRecordFields.FILE_SIZE, NumberTools.longToString( rec.getSize() ) );
+ addUntokenizedField( document, StandardIndexRecordFields.MD5, rec.getMd5Checksum() );
+ addUntokenizedField( document, StandardIndexRecordFields.SHA1, rec.getSha1Checksum() );
+ if ( rec.getClasses() != null )
+ {
+ addTokenizedField( document, StandardIndexRecordFields.CLASSES,
+ StringUtils.join( rec.getClasses().iterator(), "\n" ) );
+ }
+ if ( rec.getFiles() != null )
+ {
+ addTokenizedField( document, StandardIndexRecordFields.FILES,
+ StringUtils.join( rec.getFiles().iterator(), "\n" ) );
+ }
+ addUntokenizedField( document, StandardIndexRecordFields.PLUGIN_PREFIX, rec.getPluginPrefix() );
+ addUntokenizedField( document, StandardIndexRecordFields.INCEPTION_YEAR, rec.getInceptionYear() );
+ addTokenizedField( document, StandardIndexRecordFields.PROJECT_NAME, rec.getProjectName() );
+ addTokenizedField( document, StandardIndexRecordFields.PROJECT_DESCRIPTION, rec.getProjectDescription() );
+ if ( rec.getDependencies() != null )
+ {
+ addTokenizedField( document, StandardIndexRecordFields.DEPENDENCIES,
+ StringUtils.join( rec.getDependencies().iterator(), "\n" ) );
+ }
+ if ( rec.getDevelopers() != null )
+ {
+ addTokenizedField( document, StandardIndexRecordFields.DEVELOPERS,
+ StringUtils.join( rec.getDevelopers().iterator(), "\n" ) );
+ }
+/* TODO: add later
+ document.add( Field.Keyword( StandardIndexRecordFields.FLD_LICENSE_URLS, "" ) );
+ document.add( Field.Keyword( StandardIndexRecordFields.FLD_PLUGINS_REPORT, "" ) );
+ document.add( Field.Keyword( StandardIndexRecordFields.FLD_PLUGINS_BUILD, "" ) );
+*/
+
+ return document;
+ }
+
+ public RepositoryIndexRecord convert( Document document )
+ throws ParseException
+ {
+ StandardArtifactIndexRecord record = new StandardArtifactIndexRecord();
+
+ record.setFilename( document.get( StandardIndexRecordFields.FILENAME ) );
+ record.setGroupId( document.get( StandardIndexRecordFields.GROUPID ) );
+ record.setArtifactId( document.get( StandardIndexRecordFields.ARTIFACTID ) );
+ record.setVersion( document.get( StandardIndexRecordFields.VERSION ) );
+ record.setBaseVersion( document.get( StandardIndexRecordFields.BASE_VERSION ) );
+ record.setType( document.get( StandardIndexRecordFields.TYPE ) );
+ record.setClassifier( document.get( StandardIndexRecordFields.CLASSIFIER ) );
+ record.setPackaging( document.get( StandardIndexRecordFields.PACKAGING ) );
+ record.setRepository( document.get( StandardIndexRecordFields.REPOSITORY ) );
+ record.setLastModified( DateTools.stringToTime( document.get( StandardIndexRecordFields.LAST_MODIFIED ) ) );
+ record.setSize( NumberTools.stringToLong( document.get( StandardIndexRecordFields.FILE_SIZE ) ) );
+ record.setMd5Checksum( document.get( StandardIndexRecordFields.MD5 ) );
+ record.setSha1Checksum( document.get( StandardIndexRecordFields.SHA1 ) );
+ String classes = document.get( StandardIndexRecordFields.CLASSES );
+ if ( classes != null )
+ {
+ record.setClasses( Arrays.asList( classes.split( "\n" ) ) );
+ }
+ String files = document.get( StandardIndexRecordFields.FILES );
+ if ( files != null )
+ {
+ record.setFiles( Arrays.asList( files.split( "\n" ) ) );
+ }
+ String dependencies = document.get( StandardIndexRecordFields.DEPENDENCIES );
+ if ( dependencies != null )
+ {
+ record.setDependencies( Arrays.asList( dependencies.split( "\n" ) ) );
+ }
+ String developers = document.get( StandardIndexRecordFields.DEVELOPERS );
+ if ( developers != null )
+ {
+ record.setDevelopers( Arrays.asList( developers.split( "\n" ) ) );
+ }
+ record.setPluginPrefix( document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
+ record.setInceptionYear( document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
+ record.setProjectName( document.get( StandardIndexRecordFields.PROJECT_NAME ) );
+ record.setProjectDescription( document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
+
+ return record;
+ }
+
+ private static void addUntokenizedField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
+ }
+ }
+
+ private static void addExactField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+ }
+ }
+
+ private static void addTokenizedField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.YES, Field.Index.TOKENIZED ) );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.query;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Class to hold multiple SinglePhraseQueries and/or other CompoundQueries.
+ *
+ * @author Edwin Punzalan
+ */
+public class CompoundQuery
+ implements Query
+{
+ /**
+ * The query terms.
+ */
+ private final List compoundQueryTerms = new ArrayList();
+
+ /**
+ * Appends a required term to this query.
+ *
+ * @param term the term to be appended to this query
+ */
+ public void and( QueryTerm term )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.and( new SingleTermQuery( term ) ) );
+ }
+
+ /**
+ * Appends an optional term to this query.
+ *
+ * @param term the term to be appended to this query
+ */
+ public void or( QueryTerm term )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.or( new SingleTermQuery( term ) ) );
+ }
+
+ /**
+ * Appends a prohibited term to this query.
+ *
+ * @param term the term to be appended to this query
+ */
+ public void not( QueryTerm term )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.not( new SingleTermQuery( term ) ) );
+ }
+
+ /**
+ * Appends a required subquery to this query.
+ *
+ * @param query the subquery to be appended to this query
+ */
+ public void and( Query query )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.and( query ) );
+ }
+
+ /**
+ * Appends an optional subquery to this query.
+ *
+ * @param query the subquery to be appended to this query
+ */
+ public void or( Query query )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.or( query ) );
+ }
+
+ /**
+ * Appends a prohibited subquery to this query.
+ *
+ * @param query the subquery to be appended to this query
+ */
+ public void not( Query query )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.not( query ) );
+ }
+
+ /**
+ * Method to get the List of Queries appended into this
+ *
+ * @return List of all Queries added to this Query
+ */
+ public List getCompoundQueryTerms()
+ {
+ return compoundQueryTerms;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.query;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Base of all query terms.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class CompoundQueryTerm
+{
+ /**
+ * The query to add to the compound query.
+ */
+ private final Query query;
+
+ /**
+ * Whether the term is required (an AND).
+ */
+ private final boolean required;
+
+ /**
+ * Whether the term is prohibited (a NOT).
+ */
+ private final boolean prohibited;
+
+ /**
+ * Class constructor
+ *
+ * @param query the subquery to add
+ * @param required whether the term is required (an AND)
+ * @param prohibited whether the term is prohibited (a NOT)
+ */
+ private CompoundQueryTerm( Query query, boolean required, boolean prohibited )
+ {
+ this.query = query;
+ this.prohibited = prohibited;
+ this.required = required;
+ }
+
+ /**
+ * Method to test if the Query is a search requirement
+ *
+ * @return true if this Query is a search requirement, otherwise returns false
+ */
+ public boolean isRequired()
+ {
+ return required;
+ }
+
+ /**
+ * Method to test if the Query is prohibited in the search result
+ *
+ * @return true if this Query is prohibited in the search result
+ */
+ public boolean isProhibited()
+ {
+ return prohibited;
+ }
+
+
+ /**
+ * The subquery to execute.
+ *
+ * @return the query
+ */
+ public Query getQuery()
+ {
+ return query;
+ }
+
+ static CompoundQueryTerm and( Query query )
+ {
+ return new CompoundQueryTerm( query, true, false );
+ }
+
+ static CompoundQueryTerm or( Query query )
+ {
+ return new CompoundQueryTerm( query, false, false );
+ }
+
+ static CompoundQueryTerm not( Query query )
+ {
+ return new CompoundQueryTerm( query, false, true );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.query;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Interface to label the query classes
+ *
+ * @author Edwin Punzalan
+ */
+public interface Query
+{
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.query;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Class to hold a single field search condition
+ *
+ * @author Edwin Punzalan
+ */
+public class QueryTerm
+{
+ private String field;
+
+ private String value;
+
+ /**
+ * Class constructor
+ *
+ * @param field the index field to search
+ * @param value the index value requirement
+ */
+ public QueryTerm( String field, String value )
+ {
+ this.field = field;
+ this.value = value;
+ }
+
+ /**
+ * Method to retrieve the name of the index field searched
+ *
+ * @return the name of the index field
+ */
+ public String getField()
+ {
+ return field;
+ }
+
+ /**
+ * Method to retrieve the value used in searching the index field
+ *
+ * @return the value to corresspond the index field
+ */
+ public String getValue()
+ {
+ return value;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.query;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Query object that handles range queries (presently used for dates).
+ *
+ * @author Maria Odea Ching
+ * @author Brett Porter
+ */
+public class RangeQuery
+ implements Query
+{
+ /**
+ * Whether values equal to the boundaries are included in the query results.
+ */
+ private final boolean inclusive;
+
+ /**
+ * The lower bound.
+ */
+ private final QueryTerm begin;
+
+ /**
+ * The upper bound.
+ */
+ private final QueryTerm end;
+
+ /**
+ * Constructor.
+ *
+ * @param begin the lower bound
+ * @param end the upper bound
+ * @param inclusive whether to include the boundaries in the query
+ */
+ private RangeQuery( QueryTerm begin, QueryTerm end, boolean inclusive )
+ {
+ this.begin = begin;
+ this.end = end;
+ this.inclusive = inclusive;
+ }
+
+ /**
+ * Create an open range, including all results.
+ *
+ * @return the query object
+ */
+ public static RangeQuery createOpenRange()
+ {
+ return new RangeQuery( null, null, false );
+ }
+
+ /**
+ * Create a bounded range, excluding the endpoints.
+ *
+ * @param begin the lower bound value to compare to
+ * @param end the upper bound value to compare to
+ * @return the query object
+ */
+ public static RangeQuery createExclusiveRange( QueryTerm begin, QueryTerm end )
+ {
+ return new RangeQuery( begin, end, false );
+ }
+
+ /**
+ * Create a bounded range, including the endpoints.
+ *
+ * @param begin the lower bound value to compare to
+ * @param end the upper bound value to compare to
+ * @return the query object
+ */
+ public static RangeQuery createInclusiveRange( QueryTerm begin, QueryTerm end )
+ {
+ return new RangeQuery( begin, end, true );
+ }
+
+ /**
+ * Create a range that is greater than or equal to a given term.
+ *
+ * @param begin the value to compare to
+ * @return the query object
+ */
+ public static RangeQuery createGreaterThanOrEqualToRange( QueryTerm begin )
+ {
+ return new RangeQuery( begin, null, true );
+ }
+
+ /**
+ * Create a range that is greater than a given term.
+ *
+ * @param begin the value to compare to
+ * @return the query object
+ */
+ public static RangeQuery createGreaterThanRange( QueryTerm begin )
+ {
+ return new RangeQuery( begin, null, false );
+ }
+
+ /**
+ * Create a range that is less than or equal to a given term.
+ *
+ * @param end the value to compare to
+ * @return the query object
+ */
+ public static RangeQuery createLessThanOrEqualToRange( QueryTerm end )
+ {
+ return new RangeQuery( null, end, true );
+ }
+
+ /**
+ * Create a range that is less than a given term.
+ *
+ * @param end the value to compare to
+ * @return the query object
+ */
+ public static RangeQuery createLessThanRange( QueryTerm end )
+ {
+ return new RangeQuery( null, end, false );
+ }
+
+ public QueryTerm getBegin()
+ {
+ return begin;
+ }
+
+ public QueryTerm getEnd()
+ {
+ return end;
+ }
+
+ public boolean isInclusive()
+ {
+ return inclusive;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.query;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Query for a single term.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class SingleTermQuery
+ implements Query
+{
+ /**
+ * The term to query for.
+ */
+ private final QueryTerm term;
+
+ /**
+ * Constructor.
+ *
+ * @param term the term to query
+ */
+ public SingleTermQuery( QueryTerm term )
+ {
+ this.term = term;
+ }
+
+ /**
+ * Shorthand constructor - create a single term query from a field and value
+ *
+ * @param field the field name
+ * @param value the value to check for
+ */
+ public SingleTermQuery( String field, String value )
+ {
+ this.term = new QueryTerm( field, value );
+ }
+
+ public String getField()
+ {
+ return term.getField();
+ }
+
+ public String getValue()
+ {
+ return term.getValue();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.codehaus.plexus.digest.Digester;
+import org.codehaus.plexus.digest.DigesterException;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+
+/**
+ * Base class for the index record factories.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public abstract class AbstractArtifactIndexRecordFactory
+ extends AbstractLogEnabled
+ implements RepositoryIndexRecordFactory
+{
+ protected String readChecksum( File file, Digester digester )
+ {
+ String checksum;
+ try
+ {
+ checksum = digester.calc( file ).toLowerCase();
+ }
+ catch ( DigesterException e )
+ {
+ getLogger().error( "Error getting checksum for artifact file, leaving empty in index: " + e.getMessage() );
+ checksum = null;
+ }
+ return checksum;
+ }
+
+ protected List readFilesInArchive( File file )
+ throws IOException
+ {
+ ZipFile zipFile = new ZipFile( file );
+ List files;
+ try
+ {
+ files = new ArrayList( zipFile.size() );
+
+ for ( Enumeration entries = zipFile.entries(); entries.hasMoreElements(); )
+ {
+ ZipEntry entry = (ZipEntry) entries.nextElement();
+
+ files.add( entry.getName() );
+ }
+ }
+ finally
+ {
+ closeQuietly( zipFile );
+ }
+ return files;
+ }
+
+ protected static boolean isClass( String name )
+ {
+ // TODO: verify if class is public or protected (this might require the original ZipEntry)
+ return name.endsWith( ".class" ) && name.lastIndexOf( "$" ) < 0;
+ }
+
+ protected static void closeQuietly( ZipFile zipFile )
+ {
+ try
+ {
+ if ( zipFile != null )
+ {
+ zipFile.close();
+ }
+ }
+ catch ( IOException e )
+ {
+ // ignored
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
+
+import java.util.Collection;
+
+/**
+ * Filter that removes artifacts already in the index.
+ * TODO: we could do timestamp comparisons here
+ */
+public class IndexRecordExistsArtifactFilter
+ implements ArtifactFilter
+{
+ private final Collection keys;
+
+ public IndexRecordExistsArtifactFilter( Collection keys )
+ {
+ this.keys = keys;
+ }
+
+ public boolean include( Artifact artifact )
+ {
+ String artifactKey = artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion() +
+ ( artifact.getClassifier() != null ? ":" + artifact.getClassifier() : "" );
+ return !keys.contains( artifactKey );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.Date;
+import java.util.List;
+
+/**
+ * The a record with the fields in the minimal index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class MinimalArtifactIndexRecord
+ implements RepositoryIndexRecord
+{
+ /**
+ * The classes in the archive for the artifact, if it is a JAR.
+ */
+ private List classes;
+
+ /**
+ * The MD5 checksum of the artifact file.
+ */
+ private String md5Checksum;
+
+ /**
+ * The filename of the artifact file (no path).
+ */
+ private String filename;
+
+ /**
+ * The timestamp that the artifact file was last modified. Granularity is seconds.
+ */
+ private long lastModified;
+
+ /**
+ * The size of the artifact file in bytes.
+ */
+ private long size;
+
+ private static final int MS_PER_SEC = 1000;
+
+ public void setClasses( List classes )
+ {
+ this.classes = classes;
+ }
+
+ public void setMd5Checksum( String md5Checksum )
+ {
+ this.md5Checksum = md5Checksum;
+ }
+
+ public void setFilename( String filename )
+ {
+ this.filename = filename;
+ }
+
+ public void setLastModified( long lastModified )
+ {
+ this.lastModified = lastModified - lastModified % MS_PER_SEC;
+ }
+
+ public void setSize( long size )
+ {
+ this.size = size;
+ }
+
+ public List getClasses()
+ {
+ return classes;
+ }
+
+ public String getMd5Checksum()
+ {
+ return md5Checksum;
+ }
+
+ public String getFilename()
+ {
+ return filename;
+ }
+
+ public long getLastModified()
+ {
+ return lastModified;
+ }
+
+ public long getSize()
+ {
+ return size;
+ }
+
+ /**
+ * @noinspection RedundantIfStatement
+ */
+ public boolean equals( Object obj )
+ {
+ if ( this == obj )
+ {
+ return true;
+ }
+ if ( obj == null || getClass() != obj.getClass() )
+ {
+ return false;
+ }
+
+ MinimalArtifactIndexRecord that = (MinimalArtifactIndexRecord) obj;
+
+ if ( lastModified != that.lastModified )
+ {
+ return false;
+ }
+ if ( size != that.size )
+ {
+ return false;
+ }
+ if ( classes != null ? !classes.equals( that.classes ) : that.classes != null )
+ {
+ return false;
+ }
+ if ( !filename.equals( that.filename ) )
+ {
+ return false;
+ }
+ if ( md5Checksum != null ? !md5Checksum.equals( that.md5Checksum ) : that.md5Checksum != null )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * @noinspection UnnecessaryParentheses
+ */
+ public int hashCode()
+ {
+ int result = classes != null ? classes.hashCode() : 0;
+ result = 31 * result + ( md5Checksum != null ? md5Checksum.hashCode() : 0 );
+ result = 31 * result + filename.hashCode();
+ result = 31 * result + (int) ( lastModified ^ ( lastModified >>> 32 ) );
+ result = 31 * result + (int) ( size ^ ( size >>> 32 ) );
+ return result;
+ }
+
+ public String toString()
+ {
+ return "Filename: " + filename + "; checksum: " + md5Checksum + "; size: " + size + "; lastModified: " +
+ new Date( lastModified ) + "; classes: " + classes;
+ }
+
+ public String getPrimaryKey()
+ {
+ return filename;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.digest.Digester;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * An index record type for the minimal index.
+ *
+ * @author Edwin Punzalan
+ * @author Brett Porter
+ * @plexus.component role="org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory" role-hint="minimal"
+ */
+public class MinimalArtifactIndexRecordFactory
+ extends AbstractArtifactIndexRecordFactory
+{
+ /* List of types to index. */
+ private static final Set INDEXED_TYPES = new HashSet( Arrays.asList( new String[]{"jar", "maven-plugin"} ) );
+
+ /**
+ * @plexus.requirement role-hint="sha1"
+ */
+ protected Digester sha1Digester;
+
+ /**
+ * @plexus.requirement role-hint="md5"
+ */
+ protected Digester md5Digester;
+
+ public RepositoryIndexRecord createRecord( Artifact artifact )
+ {
+ MinimalArtifactIndexRecord record = null;
+
+ File file = artifact.getFile();
+ if ( file != null && INDEXED_TYPES.contains( artifact.getType() ) && file.exists() )
+ {
+ String md5 = readChecksum( file, md5Digester );
+
+ List files = null;
+ try
+ {
+ files = readFilesInArchive( file );
+ }
+ catch ( IOException e )
+ {
+ getLogger().error( "Error reading artifact file, omitting from index: " + e.getMessage() );
+ }
+
+ if ( files != null )
+ {
+ record = new MinimalArtifactIndexRecord();
+ record.setMd5Checksum( md5 );
+ record.setFilename( artifact.getRepository().pathOf( artifact ) );
+ record.setLastModified( file.lastModified() );
+ record.setSize( file.length() );
+ record.setClasses( getClassesFromFiles( files ) );
+ }
+ }
+ return record;
+ }
+
+ private List getClassesFromFiles( List files )
+ {
+ List classes = new ArrayList();
+
+ for ( Iterator i = files.iterator(); i.hasNext(); )
+ {
+ String name = (String) i.next();
+
+ if ( isClass( name ) )
+ {
+ classes.add( name.substring( 0, name.length() - 6 ).replace( '/', '.' ) );
+ }
+ }
+
+ return classes;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * The fields in a minimal artifact index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo should be an enum
+ */
+public class MinimalIndexRecordFields
+{
+ public static final String FILENAME = "j";
+
+ public static final String LAST_MODIFIED = "d";
+
+ public static final String FILE_SIZE = "s";
+
+ public static final String MD5 = "m";
+
+ public static final String CLASSES = "c";
+
+ private MinimalIndexRecordFields()
+ {
+ // No touchy!
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * A repository index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryIndexRecord
+{
+ /**
+ * Get the primary key used to identify the record uniquely in the index.
+ *
+ * @return the primary key
+ */
+ String getPrimaryKey();
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.artifact.Artifact;
+
+/**
+ * The layout of a record in a repository index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryIndexRecordFactory
+{
+ /**
+ * The Plexus role.
+ */
+ String ROLE = RepositoryIndexRecordFactory.class.getName();
+
+ /**
+ * Create an index record from an artifact.
+ *
+ * @param artifact the artifact
+ * @return the index record
+ * @throws RepositoryIndexException if there is a problem constructing the record (due to not being able to read the artifact file as a POM)
+ */
+ RepositoryIndexRecord createRecord( Artifact artifact )
+ throws RepositoryIndexException;
+
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * The a record with the fields in the standard index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class StandardArtifactIndexRecord
+ extends MinimalArtifactIndexRecord
+{
+ /**
+ * The SHA-1 checksum of the artifact file.
+ */
+ private String sha1Checksum;
+
+ /**
+ * The artifact's group.
+ */
+ private String groupId;
+
+ /**
+ * The artifact's identifier within the group.
+ */
+ private String artifactId;
+
+ /**
+ * The artifact's version.
+ */
+ private String version;
+
+ /**
+ * The classifier, if there is one.
+ */
+ private String classifier;
+
+ /**
+ * The artifact type (from the file).
+ */
+ private String type;
+
+ /**
+ * A list of files (separated by '\n') in the artifact if it is an archive.
+ */
+ private List files;
+
+ /**
+ * The identifier of the repository that the artifact came from.
+ */
+ private String repository;
+
+ /**
+ * The packaging specified in the POM for this artifact.
+ */
+ private String packaging;
+
+ /**
+ * The plugin prefix specified in the metadata if the artifact is a plugin.
+ */
+ private String pluginPrefix;
+
+ /**
+ * The year the project was started.
+ */
+ private String inceptionYear;
+
+ /**
+ * The description of the project.
+ */
+ private String projectDescription;
+
+ /**
+ * The name of the project.
+ */
+ private String projectName;
+
+ /**
+ * The base version (before the snapshot is determined).
+ */
+ private String baseVersion;
+
+ /**
+ * A list of dependencies for the artifact, each a string of the form <code>groupId:artifactId:version</code>.
+ */
+ private List dependencies;
+
+ /**
+ * A list of developers in the POM, each a string of the form <code>id:name:email</code>.
+ */
+ private List developers;
+
+ public void setSha1Checksum( String sha1Checksum )
+ {
+ this.sha1Checksum = sha1Checksum;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public void setArtifactId( String artifactId )
+ {
+ this.artifactId = artifactId;
+ }
+
+ public void setVersion( String version )
+ {
+ this.version = version;
+ }
+
+ public void setClassifier( String classifier )
+ {
+ this.classifier = classifier;
+ }
+
+ public void setType( String type )
+ {
+ this.type = type;
+ }
+
+ public void setFiles( List files )
+ {
+ this.files = files;
+ }
+
+ public void setRepository( String repository )
+ {
+ this.repository = repository;
+ }
+
+ /**
+ * @noinspection RedundantIfStatement
+ */
+ public boolean equals( Object obj )
+ {
+ if ( this == obj )
+ {
+ return true;
+ }
+ if ( obj == null || getClass() != obj.getClass() )
+ {
+ return false;
+ }
+ if ( !super.equals( obj ) )
+ {
+ return false;
+ }
+
+ StandardArtifactIndexRecord that = (StandardArtifactIndexRecord) obj;
+
+ if ( !artifactId.equals( that.artifactId ) )
+ {
+ return false;
+ }
+ if ( classifier != null ? !classifier.equals( that.classifier ) : that.classifier != null )
+ {
+ return false;
+ }
+
+ if ( dependencies != null && that.dependencies != null )
+ {
+ List sorted = new ArrayList( dependencies );
+ Collections.sort( sorted );
+
+ List sortedOther = new ArrayList( that.dependencies );
+ Collections.sort( sortedOther );
+
+ if ( !sorted.equals( sortedOther ) )
+ {
+ return false;
+ }
+ }
+ else if ( !( dependencies == null && that.dependencies == null ) )
+ {
+ return false;
+ }
+
+ if ( developers != null ? !developers.equals( that.developers ) : that.developers != null )
+ {
+ return false;
+ }
+ if ( files != null ? !files.equals( that.files ) : that.files != null )
+ {
+ return false;
+ }
+ if ( !groupId.equals( that.groupId ) )
+ {
+ return false;
+ }
+ if ( repository != null ? !repository.equals( that.repository ) : that.repository != null )
+ {
+ return false;
+ }
+ if ( sha1Checksum != null ? !sha1Checksum.equals( that.sha1Checksum ) : that.sha1Checksum != null )
+ {
+ return false;
+ }
+ if ( type != null ? !type.equals( that.type ) : that.type != null )
+ {
+ return false;
+ }
+ if ( !version.equals( that.version ) )
+ {
+ return false;
+ }
+ if ( !baseVersion.equals( that.baseVersion ) )
+ {
+ return false;
+ }
+ if ( packaging != null ? !packaging.equals( that.packaging ) : that.packaging != null )
+ {
+ return false;
+ }
+ if ( pluginPrefix != null ? !pluginPrefix.equals( that.pluginPrefix ) : that.pluginPrefix != null )
+ {
+ return false;
+ }
+ if ( projectName != null ? !projectName.equals( that.projectName ) : that.projectName != null )
+ {
+ return false;
+ }
+ if ( inceptionYear != null ? !inceptionYear.equals( that.inceptionYear ) : that.inceptionYear != null )
+ {
+ return false;
+ }
+ if ( projectDescription != null ? !projectDescription.equals( that.projectDescription )
+ : that.projectDescription != null )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ public int hashCode()
+ {
+ int result = super.hashCode();
+ result = 31 * result + ( sha1Checksum != null ? sha1Checksum.hashCode() : 0 );
+ result = 31 * result + groupId.hashCode();
+ result = 31 * result + artifactId.hashCode();
+ result = 31 * result + version.hashCode();
+ result = 31 * result + baseVersion.hashCode();
+ result = 31 * result + ( classifier != null ? classifier.hashCode() : 0 );
+ result = 31 * result + ( type != null ? type.hashCode() : 0 );
+ result = 31 * result + ( files != null ? files.hashCode() : 0 );
+ result = 31 * result + ( developers != null ? developers.hashCode() : 0 );
+
+ if ( dependencies != null )
+ {
+ List sorted = new ArrayList( dependencies );
+ Collections.sort( sorted );
+
+ result = 31 * result + sorted.hashCode();
+ }
+
+ result = 31 * result + ( repository != null ? repository.hashCode() : 0 );
+ result = 31 * result + ( packaging != null ? packaging.hashCode() : 0 );
+ result = 31 * result + ( pluginPrefix != null ? pluginPrefix.hashCode() : 0 );
+ result = 31 * result + ( inceptionYear != null ? inceptionYear.hashCode() : 0 );
+ result = 31 * result + ( projectName != null ? projectName.hashCode() : 0 );
+ result = 31 * result + ( projectDescription != null ? projectDescription.hashCode() : 0 );
+ return result;
+ }
+
+ public String getSha1Checksum()
+ {
+ return sha1Checksum;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public String getArtifactId()
+ {
+ return artifactId;
+ }
+
+ public String getVersion()
+ {
+ return version;
+ }
+
+ public String getClassifier()
+ {
+ return classifier;
+ }
+
+ public String getType()
+ {
+ return type;
+ }
+
+ public List getFiles()
+ {
+ return files;
+ }
+
+ public String getRepository()
+ {
+ return repository;
+ }
+
+ public String getPackaging()
+ {
+ return packaging;
+ }
+
+ public String getPluginPrefix()
+ {
+ return pluginPrefix;
+ }
+
+ public void setPackaging( String packaging )
+ {
+ this.packaging = packaging;
+ }
+
+ public void setPluginPrefix( String pluginPrefix )
+ {
+ this.pluginPrefix = pluginPrefix;
+ }
+
+ public void setInceptionYear( String inceptionYear )
+ {
+ this.inceptionYear = inceptionYear;
+ }
+
+ public void setProjectDescription( String description )
+ {
+ this.projectDescription = description;
+ }
+
+ public void setProjectName( String projectName )
+ {
+ this.projectName = projectName;
+ }
+
+ public String getInceptionYear()
+ {
+ return inceptionYear;
+ }
+
+ public String getProjectDescription()
+ {
+ return projectDescription;
+ }
+
+ public String getProjectName()
+ {
+ return projectName;
+ }
+
+ public void setBaseVersion( String baseVersion )
+ {
+ this.baseVersion = baseVersion;
+ }
+
+ public String getBaseVersion()
+ {
+ return baseVersion;
+ }
+
+ public void setDependencies( List dependencies )
+ {
+ this.dependencies = dependencies;
+ }
+
+ public void setDevelopers( List developers )
+ {
+ this.developers = developers;
+ }
+
+ public List getDevelopers()
+ {
+ return developers;
+ }
+
+ public List getDependencies()
+ {
+ return dependencies;
+ }
+
+ public String getPrimaryKey()
+ {
+ return groupId + ":" + artifactId + ":" + version + ( classifier != null ? ":" + classifier : "" );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.InvalidArtifactRTException;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Dependency;
+import org.apache.maven.model.Developer;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
+import org.codehaus.plexus.digest.Digester;
+import org.codehaus.plexus.util.xml.Xpp3Dom;
+import org.codehaus.plexus.util.xml.Xpp3DomBuilder;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipException;
+import java.util.zip.ZipFile;
+
+/**
+ * An index record type for the standard index.
+ *
+ * @author Edwin Punzalan
+ * @author Brett Porter
+ * @plexus.component role="org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory" role-hint="standard"
+ */
+public class StandardArtifactIndexRecordFactory
+ extends AbstractArtifactIndexRecordFactory
+{
+ /**
+ * A list of artifact types to treat as a zip archive.
+ *
+ * @todo this should be smarter (perhaps use plexus archiver to look for an unarchiver, and make the ones for zip configurable since sar, par, etc can be added at random.
+ */
+ private static final Set ARCHIVE_TYPES =
+ new HashSet( Arrays.asList( new String[]{"jar", "ejb", "par", "sar", "war", "ear", "rar"} ) );
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactFactory artifactFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private MavenProjectBuilder projectBuilder;
+
+ /**
+ * @plexus.requirement role-hint="sha1"
+ */
+ protected Digester sha1Digester;
+
+ /**
+ * @plexus.requirement role-hint="md5"
+ */
+ protected Digester md5Digester;
+
+ private static final String SITE_TEMPLATE_NAME = "META-INF/maven/site.vm";
+
+ private static final String SITE_CSS_NAME = "css/maven-theme.css";
+
+ private static final String PLUGIN_METADATA_NAME = "META-INF/maven/plugin.xml";
+
+ private static final String ARCHETYPE_METADATA_NAME = "META-INF/maven/archetype.xml";
+
+ // some current/old archetypes have the archetype.xml at different location.
+ private static final String ARCHETYPE_METADATA_NAME_OLD = "META-INF/archetype.xml";
+
+ public RepositoryIndexRecord createRecord( Artifact artifact )
+ throws RepositoryIndexException
+ {
+ StandardArtifactIndexRecord record = null;
+
+ File file = artifact.getFile();
+
+ // TODO: is this condition really a possibility?
+ if ( file != null && file.exists() )
+ {
+ String md5 = readChecksum( file, md5Digester );
+ String sha1 = readChecksum( file, sha1Digester );
+
+ List files = null;
+ boolean archive = ARCHIVE_TYPES.contains( artifact.getType() );
+ try
+ {
+ if ( archive )
+ {
+ files = readFilesInArchive( file );
+ }
+ }
+ catch ( IOException e )
+ {
+ getLogger().error( "Error reading artifact file, omitting from index: " + e.getMessage() );
+ }
+
+ // If it's an archive with no files, don't create a record
+ if ( !archive || files != null )
+ {
+ record = new StandardArtifactIndexRecord();
+
+ record.setGroupId( artifact.getGroupId() );
+ record.setArtifactId( artifact.getArtifactId() );
+ record.setBaseVersion( artifact.getBaseVersion() );
+ record.setVersion( artifact.getVersion() );
+ record.setClassifier( artifact.getClassifier() );
+ record.setType( artifact.getType() );
+ record.setMd5Checksum( md5 );
+ record.setSha1Checksum( sha1 );
+ record.setFilename( artifact.getRepository().pathOf( artifact ) );
+ record.setLastModified( file.lastModified() );
+ record.setSize( file.length() );
+ record.setRepository( artifact.getRepository().getId() );
+
+ if ( files != null )
+ {
+ populateArchiveEntries( files, record, artifact.getFile() );
+ }
+
+ if ( !"pom".equals( artifact.getType() ) )
+ {
+ Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
+ artifact.getArtifactId(),
+ artifact.getVersion() );
+ pomArtifact.isSnapshot(); // gross hack around bug in maven-artifact
+ File pomFile = new File( artifact.getRepository().getBasedir(),
+ artifact.getRepository().pathOf( pomArtifact ) );
+ if ( pomFile.exists() )
+ {
+ try
+ {
+ populatePomEntries( readPom( pomArtifact, artifact.getRepository() ), record );
+ }
+ catch ( ProjectBuildingException e )
+ {
+ getLogger().error( "Error reading POM file [" + pomFile + "] for " + artifact +
+ ", not populating in index: " + e.getMessage() );
+ }
+ }
+ }
+ else
+ {
+ Model model;
+ try
+ {
+ model = readPom( artifact, artifact.getRepository() );
+
+ if ( !"pom".equals( model.getPackaging() ) )
+ {
+ // Don't return a record for a POM that is does not belong on its own
+ record = null;
+ }
+ else
+ {
+ populatePomEntries( model, record );
+ }
+ }
+ catch ( ProjectBuildingException e )
+ {
+ getLogger().error(
+ "Error reading POM file for " + artifact + ", not populating in index: " + e.getMessage() );
+ }
+ }
+ }
+ }
+
+ return record;
+ }
+
+ private void populatePomEntries( Model pom, StandardArtifactIndexRecord record )
+ {
+ record.setPackaging( pom.getPackaging() );
+ record.setProjectName( pom.getName() );
+ record.setProjectDescription( pom.getDescription() );
+ record.setInceptionYear( pom.getInceptionYear() );
+
+ List dependencies = populateDependencies( pom.getDependencies() );
+ if ( !dependencies.isEmpty() )
+ {
+ record.setDependencies( dependencies );
+ }
+ List developers = populateDevelopers( pom.getDevelopers() );
+ if ( !developers.isEmpty() )
+ {
+ record.setDevelopers( developers );
+ }
+
+/* TODO: fields for later
+ indexPlugins( doc, FLD_PLUGINS_BUILD, pom.getBuild().getPlugins().iterator() );
+ indexReportPlugins( doc, FLD_PLUGINS_REPORT, pom.getReporting().getPlugins().iterator() );
+ record.setLicenses( licenses );
+*/
+ }
+
+ private List populateDependencies( List dependencies )
+ {
+ List convertedDependencies = new ArrayList();
+
+ for ( Iterator i = dependencies.iterator(); i.hasNext(); )
+ {
+ Dependency dependency = (Dependency) i.next();
+
+ convertedDependencies.add(
+ dependency.getGroupId() + ":" + dependency.getArtifactId() + ":" + dependency.getVersion() );
+ }
+
+ return convertedDependencies;
+ }
+
+ private List populateDevelopers( List developers )
+ {
+ List convertedDevelopers = new ArrayList();
+
+ for ( Iterator i = developers.iterator(); i.hasNext(); )
+ {
+ Developer developer = (Developer) i.next();
+
+ convertedDevelopers.add( developer.getId() + ":" + developer.getName() + ":" + developer.getEmail() );
+ }
+
+ return convertedDevelopers;
+ }
+
+ private Model readPom( Artifact artifact, ArtifactRepository repository )
+ throws RepositoryIndexException, ProjectBuildingException
+ {
+ // TODO: this can create a -SNAPSHOT.pom when it didn't exist and a timestamped one did. This is harmless, but should be avoided
+ // TODO: will this pollute with local repo metadata?
+
+ try
+ {
+ MavenProject project = projectBuilder.buildFromRepository( artifact, Collections.EMPTY_LIST, repository );
+ return project.getModel();
+ }
+ catch ( InvalidArtifactRTException e )
+ {
+ throw new ProjectBuildingException( artifact.getId(),
+ "Unable to build project from invalid artifact [" + artifact + "]", e );
+ }
+ }
+
+ private void populateArchiveEntries( List files, StandardArtifactIndexRecord record, File artifactFile )
+ throws RepositoryIndexException
+ {
+ List classes = new ArrayList();
+ List fileList = new ArrayList();
+
+ for ( Iterator i = files.iterator(); i.hasNext(); )
+ {
+ String name = (String) i.next();
+
+ // ignore directories
+ if ( !name.endsWith( "/" ) )
+ {
+ fileList.add( name );
+
+ if ( isClass( name ) )
+ {
+ classes.add( name.substring( 0, name.length() - 6 ).replace( '/', '.' ) );
+ }
+ else if ( PLUGIN_METADATA_NAME.equals( name ) )
+ {
+ populatePluginEntries( readXmlMetadataFileInJar( artifactFile, PLUGIN_METADATA_NAME ), record );
+ }
+ else if ( ARCHETYPE_METADATA_NAME.equals( name ) || ARCHETYPE_METADATA_NAME_OLD.equals( name ) )
+ {
+ populateArchetypeEntries( record );
+ }
+ else if ( SITE_TEMPLATE_NAME.equals( name ) || SITE_CSS_NAME.equals( name ) )
+ {
+ populateSkinEntries( record );
+ }
+ }
+ }
+
+ if ( !classes.isEmpty() )
+ {
+ record.setClasses( classes );
+ }
+ if ( !fileList.isEmpty() )
+ {
+ record.setFiles( fileList );
+ }
+ }
+
+ private void populateArchetypeEntries( StandardArtifactIndexRecord record )
+ {
+ // Typically discovered as a JAR
+ record.setType( "maven-archetype" );
+ }
+
+ private void populateSkinEntries( StandardArtifactIndexRecord record )
+ {
+ // Typically discovered as a JAR
+ record.setType( "maven-skin" );
+ }
+
+ private Xpp3Dom readXmlMetadataFileInJar( File file, String name )
+ throws RepositoryIndexException
+ {
+ // TODO: would be more efficient with original ZipEntry still around
+
+ Xpp3Dom xpp3Dom;
+ ZipFile zipFile = null;
+ try
+ {
+ zipFile = new ZipFile( file );
+ ZipEntry entry = zipFile.getEntry( name );
+ xpp3Dom = Xpp3DomBuilder.build( new InputStreamReader( zipFile.getInputStream( entry ) ) );
+ }
+ catch ( ZipException e )
+ {
+ throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( zipFile );
+ }
+ return xpp3Dom;
+ }
+
+ public void populatePluginEntries( Xpp3Dom metadata, StandardArtifactIndexRecord record )
+ {
+ // Typically discovered as a JAR
+ record.setType( "maven-plugin" );
+
+ Xpp3Dom prefix = metadata.getChild( "goalPrefix" );
+
+ if ( prefix != null )
+ {
+ record.setPluginPrefix( prefix.getValue() );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * The fields in a minimal artifact index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo should be an enum
+ */
+public class StandardIndexRecordFields
+{
+ public static final String FILENAME = "filename";
+
+ public static final String GROUPID = "groupId";
+
+ public static final String GROUPID_EXACT = GROUPID + "_u";
+
+ public static final String ARTIFACTID = "artifactId";
+
+ public static final String ARTIFACTID_EXACT = ARTIFACTID + "_u";
+
+ public static final String VERSION = "version";
+
+ public static final String VERSION_EXACT = VERSION + "_u";
+
+ public static final String BASE_VERSION = "baseVersion";
+
+ public static final String BASE_VERSION_EXACT = BASE_VERSION + "_u";
+
+ public static final String TYPE = "type";
+
+ public static final String CLASSIFIER = "classifier";
+
+ public static final String PACKAGING = "packaging";
+
+ public static final String REPOSITORY = "repo";
+
+ public static final String LAST_MODIFIED = "lastModified";
+
+ public static final String FILE_SIZE = "fileSize";
+
+ public static final String MD5 = "md5";
+
+ public static final String SHA1 = "sha1";
+
+ public static final String CLASSES = "classes";
+
+ public static final String PLUGIN_PREFIX = "pluginPrefix";
+
+ public static final String FILES = "files";
+
+ public static final String INCEPTION_YEAR = "inceptionYear";
+
+ public static final String PROJECT_NAME = "projectName";
+
+ public static final String PROJECT_DESCRIPTION = "projectDesc";
+
+ public static final String DEVELOPERS = "developers";
+
+ public static final String DEPENDENCIES = "dependencies";
+
+ private StandardIndexRecordFields()
+ {
+ // No touchy!
+ }
+}
--- /dev/null
+ -----
+ Indexer Design
+ -----
+ Brett Porter
+ -----
+ 25 July 2006
+ -----
+
+~~ Copyright 2006 The Apache Software Foundation.
+~~
+~~ Licensed under the Apache License, Version 2.0 (the "License");
+~~ you may not use this file except in compliance with the License.
+~~ You may obtain a copy of the License at
+~~
+~~ http://www.apache.org/licenses/LICENSE-2.0
+~~
+~~ Unless required by applicable law or agreed to in writing, software
+~~ distributed under the License is distributed on an "AS IS" BASIS,
+~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~~ See the License for the specific language governing permissions and
+~~ limitations under the License.
+
+~~ NOTE: For help with the syntax of this file, see:
+~~ http://maven.apache.org/guides/mini/guide-apt-format.html
+
+Indexer Design
+
+ <<Note: The current indexer design is under review. This document will grow into what it should be, and the code and
+ tests refactored to match>>
+
+ ~~TODO: separate API design from Lucene implementation design
+
+* Standard Artifact Index
+
+ We currently want to index these elements from the repository:
+
+ * for each artifact file: the artifact ID, version, group ID, classifier, type (extension), filename (including path
+ from the repository base), checksums (md5, sha1) and size
+
+ * for each artifact POM: the packaging, licenses, dependencies, build plugins, reporting plugins
+
+ * plugin prefix
+
+ * Java classes within a JAR artifact (delimited by \n)
+
+ * filenames within an archive (delimited by \n)
+
+ * the identifier of the source repository
+
+ Each record in the index refers to an artifact. Since the content for a record can come from various sources, the
+ record may need to be updated when different files that are related to the same artifact are discovered (ie, the
+ POM, or for plugins the metadata that contains their prefix).
+
+ To simplify this, the process for discovery is as follows:
+
+ * Discovered artifacts will read the related POM and metadata from the repository to index, rather than relying on
+ it being discovered. This ensures that partial discovery still yields correct results in all cases, and it is
+ possible to construct the entire record without having to read back from the index.
+
+ * POMs that do not have a packaging of POM are not sent to the indexer.
+
+ The result of this process is that updates to a POM or repository metadata and not the corresponding artifact(s) will
+ not update the index. As POMs should not be modified, this will not be a major concern. Likewise, updates to metadata
+ will only accompany updates to the artifact itself, so will not cause a problem.
+
+ The above case may have a problem if the discovery happens during the middle of a deployment outside of the
+ repository manager (where the artifact is present, but the metadata or POM is not). To avoid such cases, the
+ discoverer should only detect changes more than a minute old (this blackout should be configurable).
+
+ Other techniques were considered:
+
+ * Processing each artifact file individually, updating each record as needed. This would result in having to read
+ back each index record before writing. This is quite costly in Lucene as it would be "read, delete, add". You
+ must have a reader and writer open for that process, and it greatly complicates the code.
+
+ * Have three indices, one for each. This would complicate searching (and may affect ranking of results, though this
+ was not analysed). While Lucene is
+ {{{http://wiki.apache.org/jakarta-lucene/LuceneFAQ#head-b11296f9e7b2a5e7496d67118d0a5898f2fd9823} capable of
+ searching multiple indices}}, it is expected that the results would be in the form of a list of separate records
+ rather than the "table join" this effectively is. A similar derivative of this technique would be to store
+ everything in one index, using a field (previously, doctype) to identify each record.
+
+ Records in the index are keyed by their path from the repository root. While this is longer than using the
+ dependency conflict ID, Lucene cannot delete by a combination of terms, so would require storing an additional
+ field in the index where the file already exists.
+
+ The plugin prefix could be found either from inside the plugin JAR (<<<META-INF/maven/plugin.xml>>>), or from the
+ repository metadata for the plugin's group. For simplicity, the first approach will be used. This means at present
+ there is no need to index the repository metadata, however that may be considered in future.
+
+ Note that archetypes currently don't have a packaging associated with them in Maven, so it is not recorded in the POM.
+ However, to be able to search by this type, the indexer will look for a <<<META-INF/maven/archetype.xml>>> file, and
+ if found set its packaging to <<<maven-archetype>>>. In the future, this handling will be deprecated as the POMs
+ can start using the appropriate packaging.
+
+ The index is shared among multiple repositories. The source repository is recorded in the index record. The
+ discovery/conversion/reporting mechanisms are expected to deal with duplicates before reaching the indexer, so if the
+ indexer encounters an artifact from a different repository than it was already added, it will simply replace the
+ record.
+
+ When indexing metadata from a POM, the POM should be loaded using the Maven project builder so that inheritance and
+ interpolation are performed. This ensures that the record is as complete as possible, and that searching by
+ fields that are inherited will reveal both the parent and the children in the search results.
+
+* Reduced Size Index
+
+ An additional index is maintained by the repository manager in the
+ {{{../apidocs/org/apache/maven/archiva/indexing/MinimalArtifactIndexRecord.html} MinimalIndex}} class. This
+ indexes all of the same artifacts as the first index, but stores them with shorter field names and less information to
+ maintain a smaller size. This index is appropriate for use by certain clients such as IDE integration for fast
+ searching. For a fuller interface to the repository information, the integration should use the XMLRPC interface.
+
+ The following fields are in the reduced index:
+
+ * <<<j>>>: The JAR filename
+
+ * <<<s>>>: The JAR size
+
+ * <<<d>>>: The last modified timestamp
+
+ * <<<c>>>: A list of classes in the JAR (\n delimited)
+
+ * <<<m>>>: md5 checksum of the JAR
+
+ * <<<pk>>>: the primary key of the artifact
+
+ Only JARs are indexed at present. The JAR filename is used as the key for later deleting entries.
+
+* Searching
+
+ Searching will be reasonably flexible, though the general use case will be to enter a single parsed query that is
+ applied to all fields in the index.
+
+ Some features that will be available:
+
+ * <Search through most fields for a particular keyword>: the general case described above.
+
+ * <Search by a particular field (exact match)>: This would be needed for search by checksum.
+
+ * <Search in a range of field values>: This would be needed for searching based on update time. Note that in
+ Lucene it may be better to search by other fields (or return all), and then filter the results by dates rather
+ than making dates part of a search query.
+
+ * <Limit search to particular fields>: It will be useful to only search Java classes and packages, for example
+
+ Another thing to note is that the search results should be able to be composed entirely from the index for performance
+ reasons. It should not have to read any metadata files or properties of files such as size and checksum from the disk.
+ This enables searching a repository remotely without having the physical repository available, which is useful for
+ IDE integration among other things.
+
+ Note that to be able to do an exact match search, a field must be stored untokenized. For fields where it makes sense
+ to search both tokenized and untokenized, they will be stored twice. This currently includes: artifact ID, group ID,
+ and version.
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <body>
+ <menu name="Design Documentation">
+ <item name="Indexing Design" href="/design.html"/>
+ </menu>
+ </body>
+</project>
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexer.record.MinimalIndexRecordFields;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Test the Lucene implementation of the artifact index search.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo would be nice to abstract some of the query away, but for now passing in a Lucene query directly is good enough
+ */
+public class LuceneMinimalArtifactIndexSearchTest
+ extends PlexusTestCase
+{
+ private RepositoryArtifactIndex index;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private File indexLocation;
+
+ private RepositoryIndexRecordFactory recordFactory;
+
+ private Map records = new HashMap();
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ indexLocation = getTestFile( "target/test-index" );
+
+ FileUtils.deleteDirectory( indexLocation );
+
+ index = factory.createMinimalIndex( indexLocation );
+
+ records.put( "test-jar", recordFactory.createRecord( createArtifact( "test-jar" ) ) );
+ records.put( "test-jar-jdk14",
+ recordFactory.createRecord( createArtifact( "test-jar", "1.0", "jar", "jdk14" ) ) );
+ records.put( "test-jar-and-pom",
+ recordFactory.createRecord( createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" ) ) );
+ records.put( "test-jar-and-pom-jdk14", recordFactory.createRecord(
+ createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" ) ) );
+ records.put( "test-child-pom",
+ recordFactory.createRecord( createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" ) ) );
+ records.put( "test-archetype", recordFactory.createRecord( createArtifact( "test-archetype" ) ) );
+ records.put( "test-plugin", recordFactory.createRecord( createArtifact( "test-plugin" ) ) );
+ records.put( "test-pom", recordFactory.createRecord( createArtifact( "test-pom", "1.0", "pom" ) ) );
+ records.put( "parent-pom", recordFactory.createRecord( createArtifact( "parent-pom", "1", "pom" ) ) );
+ records.put( "test-dll", recordFactory.createRecord( createArtifact( "test-dll", "1.0.1.34", "dll" ) ) );
+
+ index.indexRecords( records.values() );
+ }
+
+ public void testExactMatchMd5()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( MinimalIndexRecordFields.MD5, "3a0adc365f849366cd8b633cad155cb7" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( MinimalIndexRecordFields.MD5, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchFilename()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( MinimalIndexRecordFields.FILENAME, "maven" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 7, results.size() );
+
+ query = createMatchQuery( MinimalIndexRecordFields.FILENAME, "plugin" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createMatchQuery( MinimalIndexRecordFields.FILENAME, "test" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 7, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( MinimalIndexRecordFields.FILENAME, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchClass()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( MinimalIndexRecordFields.CLASSES, "b.c.C" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = createMatchQuery( MinimalIndexRecordFields.CLASSES, "C" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = createMatchQuery( MinimalIndexRecordFields.CLASSES, "MyMojo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( MinimalIndexRecordFields.CLASSES, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ private static Query createExactMatchQuery( String field, String value )
+ {
+ return new TermQuery( new Term( field, value ) );
+ }
+
+ private static Query createMatchQuery( String field, String value )
+ throws ParseException
+ {
+ return new QueryParser( field, LuceneRepositoryArtifactIndex.getAnalyzer() ).parse( value );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar", null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ return createArtifact( artifactId, version, type, null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type, String classifier )
+ {
+ Artifact artifact = artifactFactory.createDependencyArtifact( "org.apache.maven.archiva.record", artifactId,
+ VersionRange.createFromVersion( version ), type,
+ classifier, Artifact.SCOPE_RUNTIME );
+ artifact.isSnapshot();
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.NumberTools;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.record.MinimalIndexRecordFields;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+
+/**
+ * Test the Lucene implementation of the artifact index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneMinimalArtifactIndexTest
+ extends PlexusTestCase
+{
+ private RepositoryArtifactIndex index;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private File indexLocation;
+
+ private RepositoryIndexRecordFactory recordFactory;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ indexLocation = getTestFile( "target/test-index" );
+
+ FileUtils.deleteDirectory( indexLocation );
+
+ index = factory.createMinimalIndex( indexLocation );
+ }
+
+ public void testIndexExists()
+ throws IOException, RepositoryIndexException
+ {
+ assertFalse( "check index doesn't exist", index.exists() );
+
+ // create empty directory
+ indexLocation.mkdirs();
+ assertFalse( "check index doesn't exist even if directory does", index.exists() );
+
+ // create index, with no records
+ createEmptyIndex();
+ assertTrue( "check index is considered to exist", index.exists() );
+
+ // Test non-directory
+ FileUtils.deleteDirectory( indexLocation );
+ indexLocation.createNewFile();
+ try
+ {
+ index.exists();
+ fail( "Index operation should fail as the location is not valid" );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ // great
+ }
+ finally
+ {
+ indexLocation.delete();
+ }
+ }
+
+ public void testAddRecordNoIndex()
+ throws IOException, RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertEquals( "Check document", repository.pathOf( artifact ),
+ document.get( MinimalIndexRecordFields.FILENAME ) );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddRecordExistingEmptyIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertRecord( document, artifact, "3a0adc365f849366cd8b633cad155cb7", "A\nb.B\nb.c.C" );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddRecordInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ // Do it again
+ record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertRecord( document, artifact, "3a0adc365f849366cd8b633cad155cb7", "A\nb.B\nb.c.C" );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ index.deleteRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordNotInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+
+ index.deleteRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordNoIndex()
+ throws IOException, RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.deleteRecords( Collections.singleton( record ) );
+
+ assertFalse( index.exists() );
+ }
+
+ public void testAddPomRecord()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddPlugin()
+ throws IOException, RepositoryIndexException, XmlPullParserException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-plugin" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertRecord( document, artifact, "3530896791670ebb45e17708e5d52c40",
+ "org.apache.maven.archiva.record.MyMojo" );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar" );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ Artifact artifact =
+ artifactFactory.createBuildArtifact( "org.apache.maven.archiva.record", artifactId, version, type );
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+
+ private void createEmptyIndex()
+ throws IOException
+ {
+ createIndex( Collections.EMPTY_LIST );
+ }
+
+ private void createIndex( List docments )
+ throws IOException
+ {
+ IndexWriter writer = new IndexWriter( indexLocation, LuceneRepositoryArtifactIndex.getAnalyzer(), true );
+ for ( Iterator i = docments.iterator(); i.hasNext(); )
+ {
+ Document document = (Document) i.next();
+ writer.addDocument( document );
+ }
+ writer.optimize();
+ writer.close();
+ }
+
+ private void assertRecord( Document document, Artifact artifact, String expectedChecksum, String expectedClasses )
+ {
+ assertEquals( "Check document filename", repository.pathOf( artifact ),
+ document.get( MinimalIndexRecordFields.FILENAME ) );
+ assertEquals( "Check document timestamp", getLastModified( artifact.getFile() ),
+ document.get( MinimalIndexRecordFields.LAST_MODIFIED ) );
+ assertEquals( "Check document checksum", expectedChecksum, document.get( MinimalIndexRecordFields.MD5 ) );
+ assertEquals( "Check document size", artifact.getFile().length(),
+ NumberTools.stringToLong( document.get( MinimalIndexRecordFields.FILE_SIZE ) ) );
+ assertEquals( "Check document classes", expectedClasses, document.get( MinimalIndexRecordFields.CLASSES ) );
+ }
+
+ private String getLastModified( File file )
+ {
+ SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
+ dateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) );
+ return dateFormat.format( new Date( file.lastModified() ) );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Test the Lucene implementation of the artifact index search.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo would be nice to abstract some of the query away, but for now passing in a Lucene query directly is good enough
+ */
+public class LuceneStandardArtifactIndexSearchTest
+ extends PlexusTestCase
+{
+ private RepositoryArtifactIndex index;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private File indexLocation;
+
+ private RepositoryIndexRecordFactory recordFactory;
+
+ private Map records = new HashMap();
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ indexLocation = getTestFile( "target/test-index" );
+
+ FileUtils.deleteDirectory( indexLocation );
+
+ index = factory.createStandardIndex( indexLocation );
+
+ records.put( "test-jar", recordFactory.createRecord( createArtifact( "test-jar" ) ) );
+ records.put( "test-jar-jdk14",
+ recordFactory.createRecord( createArtifact( "test-jar", "1.0", "jar", "jdk14" ) ) );
+ records.put( "test-jar-and-pom",
+ recordFactory.createRecord( createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" ) ) );
+ records.put( "test-jar-and-pom-jdk14", recordFactory.createRecord(
+ createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" ) ) );
+ records.put( "test-child-pom",
+ recordFactory.createRecord( createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" ) ) );
+ records.put( "test-archetype", recordFactory.createRecord( createArtifact( "test-archetype" ) ) );
+ records.put( "test-plugin", recordFactory.createRecord( createArtifact( "test-plugin" ) ) );
+ records.put( "test-pom", recordFactory.createRecord( createArtifact( "test-pom", "1.0", "pom" ) ) );
+ records.put( "parent-pom", recordFactory.createRecord( createArtifact( "parent-pom", "1", "pom" ) ) );
+ records.put( "test-dll", recordFactory.createRecord( createArtifact( "test-dll", "1.0.1.34", "dll" ) ) );
+
+ index.indexRecords( records.values() );
+ }
+
+ public void testExactMatchVersion()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "1.0" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "1.0-SNAPSHOT" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "1.0-snapshot" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "1.0-20060728.121314-1" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchBaseVersion()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-SNAPSHOT" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-snapshot" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-20060728.121314-1" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchGroupId()
+ throws RepositoryIndexSearchException
+ {
+ Query query =
+ createExactMatchQuery( StandardIndexRecordFields.GROUPID_EXACT, "org.apache.maven.archiva.record" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+ // test partial match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.GROUPID_EXACT, "org.apache.maven" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.GROUPID_EXACT, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchArtifactId()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.ARTIFACTID_EXACT, "test-jar" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ // test partial match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.ARTIFACTID_EXACT, "test" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.ARTIFACTID_EXACT, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchType()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "maven-plugin" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "jar" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "dll" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "maven-archetype" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchPackaging()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "maven-plugin" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "jar" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 4, results.size() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "dll" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "maven-archetype" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchPluginPrefix()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.PLUGIN_PREFIX, "test" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.PLUGIN_PREFIX, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchRepository()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.REPOSITORY, "test" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.REPOSITORY, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchMd5()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.MD5, "3a0adc365f849366cd8b633cad155cb7" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.MD5, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchSha1()
+ throws RepositoryIndexSearchException
+ {
+ Query query =
+ createExactMatchQuery( StandardIndexRecordFields.SHA1, "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.SHA1, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchInceptionYear()
+ throws RepositoryIndexSearchException
+ {
+ Query query = createExactMatchQuery( StandardIndexRecordFields.INCEPTION_YEAR, "2005" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertEquals( "Check results size", 3, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.INCEPTION_YEAR, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchFilename()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( StandardIndexRecordFields.FILENAME, "maven" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.FILENAME, "plugin" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.FILENAME, "pLuGiN" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createMatchQuery( StandardIndexRecordFields.FILENAME, "test" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertEquals( "Check results size", 9, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.FILENAME, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchGroupId()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( StandardIndexRecordFields.GROUPID, "org.apache.maven.archiva.record" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.GROUPID, "maven" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.GROUPID, "Maven" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.GROUPID, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchArtifactId()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( StandardIndexRecordFields.ARTIFACTID, "plugin" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.ARTIFACTID, "test" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertEquals( "Check results size", 9, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.ARTIFACTID, "maven" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchVersion()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ // If partial matches are desired, need to change the analyzer for versions to split on '.'
+ Query query = createMatchQuery( StandardIndexRecordFields.VERSION, "1" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 4, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.VERSION, "1.0" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 8, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.VERSION, "snapshot" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createMatchQuery( StandardIndexRecordFields.VERSION, "SNAPSHOT" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createMatchQuery( StandardIndexRecordFields.VERSION, "alpha" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.VERSION, "1.0-alpha-1" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.VERSION, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchBaseVersion()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ // If partial matches are desired, need to change the analyzer for versions to split on '.'
+ Query query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "1" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 3, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "1.0" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 8, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "SNAPSHOT" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "SnApShOt" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "snapshot" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "alpha" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "1.0-alpha-1" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchClassifier()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ BooleanQuery bQuery = new BooleanQuery();
+ bQuery.add( new MatchAllDocsQuery(), BooleanClause.Occur.MUST );
+ bQuery.add( createMatchQuery( StandardIndexRecordFields.CLASSIFIER, "jdk14" ), BooleanClause.Occur.MUST_NOT );
+ List results = index.search( new LuceneQuery( bQuery ) );
+
+ assertFalse( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 8, results.size() );
+
+ // TODO: can we search for "anything with no classifier" ?
+
+ Query query = createMatchQuery( StandardIndexRecordFields.CLASSIFIER, "jdk14" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.CLASSIFIER, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchClass()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( StandardIndexRecordFields.CLASSES, "b.c.C" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.CLASSES, "C" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.CLASSES, "MyMojo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.CLASSES, "MYMOJO" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.CLASSES, "mymojo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.CLASSES, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchFiles()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( StandardIndexRecordFields.FILES, "MANIFEST.MF" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 7, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.FILES, "META-INF" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 7, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.FILES, "plugin.xml" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.FILES, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchDependency()
+ throws RepositoryIndexSearchException
+ {
+ Query query =
+ createExactMatchQuery( StandardIndexRecordFields.DEPENDENCIES, "org.apache.maven:maven-plugin-api:2.0" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = createExactMatchQuery( StandardIndexRecordFields.DEPENDENCIES, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchProjectName()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( StandardIndexRecordFields.PROJECT_NAME, "mojo" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = createMatchQuery( StandardIndexRecordFields.PROJECT_NAME, "maven" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.PROJECT_NAME, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchProjectDescription()
+ throws RepositoryIndexSearchException, ParseException
+ {
+ Query query = createMatchQuery( StandardIndexRecordFields.PROJECT_DESCRIPTION, "description" );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertEquals( "Check results size", 3, results.size() );
+
+ // test non-match fails
+ query = createMatchQuery( StandardIndexRecordFields.PROJECT_DESCRIPTION, "foo" );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ private static Query createExactMatchQuery( String field, String value )
+ {
+ return new TermQuery( new Term( field, value ) );
+ }
+
+ private static Query createMatchQuery( String field, String value )
+ throws ParseException
+ {
+ return new QueryParser( field, LuceneRepositoryArtifactIndex.getAnalyzer() ).parse( value );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar", null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ return createArtifact( artifactId, version, type, null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type, String classifier )
+ {
+ Artifact artifact = artifactFactory.createDependencyArtifact( "org.apache.maven.archiva.record", artifactId,
+ VersionRange.createFromVersion( version ), type,
+ classifier, Artifact.SCOPE_RUNTIME );
+ artifact.isSnapshot();
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.lucene;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.NumberTools;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+
+/**
+ * Test the Lucene implementation of the artifact index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneStandardArtifactIndexTest
+ extends PlexusTestCase
+{
+ private RepositoryArtifactIndex index;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private File indexLocation;
+
+ private RepositoryIndexRecordFactory recordFactory;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ indexLocation = getTestFile( "target/test-index" );
+
+ FileUtils.deleteDirectory( indexLocation );
+
+ index = factory.createStandardIndex( indexLocation );
+ }
+
+ public void testIndexExists()
+ throws IOException, RepositoryIndexException
+ {
+ assertFalse( "check index doesn't exist", index.exists() );
+
+ // create empty directory
+ indexLocation.mkdirs();
+ assertFalse( "check index doesn't exist even if directory does", index.exists() );
+
+ // create index, with no records
+ createEmptyIndex();
+ assertTrue( "check index is considered to exist", index.exists() );
+
+ // Test non-directory
+ FileUtils.deleteDirectory( indexLocation );
+ indexLocation.createNewFile();
+ try
+ {
+ index.exists();
+ fail( "Index operation should fail as the location is not valid" );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ // great
+ }
+ finally
+ {
+ indexLocation.delete();
+ }
+ }
+
+ public void testAddRecordNoIndex()
+ throws IOException, RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertJarRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddRecordExistingEmptyIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertJarRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddRecordInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ // Do it again
+ record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertJarRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddPomRecord()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertPomRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddPlugin()
+ throws IOException, RepositoryIndexException, XmlPullParserException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-plugin" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertPluginRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ index.deleteRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordNotInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+
+ index.deleteRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordNoIndex()
+ throws IOException, RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.deleteRecords( Collections.singleton( record ) );
+
+ assertFalse( index.exists() );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar" );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ Artifact artifact =
+ artifactFactory.createBuildArtifact( "org.apache.maven.archiva.record", artifactId, version, type );
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+
+ private void createEmptyIndex()
+ throws IOException
+ {
+ createIndex( Collections.EMPTY_LIST );
+ }
+
+ private void createIndex( List docments )
+ throws IOException
+ {
+ IndexWriter writer = new IndexWriter( indexLocation, LuceneRepositoryArtifactIndex.getAnalyzer(), true );
+ for ( Iterator i = docments.iterator(); i.hasNext(); )
+ {
+ Document document = (Document) i.next();
+ writer.addDocument( document );
+ }
+ writer.optimize();
+ writer.close();
+ }
+
+ private void assertRecord( Artifact artifact, Document document, String expectedArtifactId, String expectedType,
+ String expectedMd5, String expectedSha1 )
+ {
+ assertEquals( "Check document filename", repository.pathOf( artifact ),
+ document.get( StandardIndexRecordFields.FILENAME ) );
+ assertEquals( "Check document groupId", "org.apache.maven.archiva.record",
+ document.get( StandardIndexRecordFields.GROUPID ) );
+ assertEquals( "Check document artifactId", expectedArtifactId,
+ document.get( StandardIndexRecordFields.ARTIFACTID ) );
+ assertEquals( "Check document version", "1.0", document.get( StandardIndexRecordFields.VERSION ) );
+ assertEquals( "Check document type", expectedType, document.get( StandardIndexRecordFields.TYPE ) );
+ assertEquals( "Check document repository", "test", document.get( StandardIndexRecordFields.REPOSITORY ) );
+ assertEquals( "Check document timestamp", getLastModified( artifact.getFile() ),
+ document.get( StandardIndexRecordFields.LAST_MODIFIED ) );
+ assertEquals( "Check document md5", expectedMd5, document.get( StandardIndexRecordFields.MD5 ) );
+ assertEquals( "Check document sha1", expectedSha1, document.get( StandardIndexRecordFields.SHA1 ) );
+ assertEquals( "Check document file size", artifact.getFile().length(),
+ NumberTools.stringToLong( document.get( StandardIndexRecordFields.FILE_SIZE ) ) );
+ assertNull( "Check document classifier", document.get( StandardIndexRecordFields.CLASSIFIER ) );
+ }
+
+ private void assertPomRecord( Artifact artifact, Document document )
+ {
+ assertRecord( artifact, document, "test-pom", "pom", "758e1ae96dff63dab7278a62e3eb174d",
+ "770fde06cd5c3dccb5f5e8c6754b8c4c77b98560" );
+ assertNull( "Check document classes", document.get( StandardIndexRecordFields.CLASSES ) );
+ assertNull( "Check document files", document.get( StandardIndexRecordFields.FILES ) );
+ assertNull( "Check document pluginPrefix", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
+ assertEquals( "Check document year", "2005", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
+ assertEquals( "Check document project name", "Maven Repository Manager Test POM",
+ document.get( StandardIndexRecordFields.PROJECT_NAME ) );
+ assertEquals( "Check document project description", "Description",
+ document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
+ assertEquals( "Check document packaging", "pom", document.get( StandardIndexRecordFields.PACKAGING ) );
+ }
+
+ private void assertJarRecord( Artifact artifact, Document document )
+ {
+ assertRecord( artifact, document, "test-jar", "jar", "3a0adc365f849366cd8b633cad155cb7",
+ "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ assertEquals( "Check document classes", "A\nb.B\nb.c.C", document.get( StandardIndexRecordFields.CLASSES ) );
+ assertEquals( "Check document files", "META-INF/MANIFEST.MF\nA.class\nb/B.class\nb/c/C.class",
+ document.get( StandardIndexRecordFields.FILES ) );
+ assertNull( "Check document inceptionYear", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
+ assertNull( "Check document projectName", document.get( StandardIndexRecordFields.PROJECT_NAME ) );
+ assertNull( "Check document projectDesc", document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
+ assertNull( "Check document pluginPrefix", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
+ assertNull( "Check document packaging", document.get( StandardIndexRecordFields.PACKAGING ) );
+ }
+
+ private void assertPluginRecord( Artifact artifact, Document document )
+ {
+ assertRecord( artifact, document, "test-plugin", "maven-plugin", "3530896791670ebb45e17708e5d52c40",
+ "2cd2619d59a684e82e97471d2c2e004144c8f24e" );
+ assertEquals( "Check document classes", "org.apache.maven.archiva.record.MyMojo",
+ document.get( StandardIndexRecordFields.CLASSES ) );
+ assertEquals( "Check document files", "META-INF/MANIFEST.MF\n" +
+ "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.properties\n" +
+ "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.xml\n" + "META-INF/maven/plugin.xml\n" +
+ "org/apache/maven/archiva/record/MyMojo.class", document.get( StandardIndexRecordFields.FILES ) );
+ assertEquals( "Check document pluginPrefix", "test", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
+ assertEquals( "Check document packaging", "maven-plugin", document.get( StandardIndexRecordFields.PACKAGING ) );
+ assertNull( "Check document inceptionYear", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
+ assertEquals( "Check document project name", "Maven Mojo Archetype",
+ document.get( StandardIndexRecordFields.PROJECT_NAME ) );
+ assertNull( "Check document projectDesc", document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
+ }
+
+ private String getLastModified( File file )
+ {
+ SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
+ dateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) );
+ return dateFormat.format( new Date( file.lastModified() ) );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.query;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.TestCase;
+
+import java.util.Iterator;
+
+/**
+ * @author Brett Porter
+ */
+public class QueryTest
+ extends TestCase
+{
+ private QueryTerm term1 = new QueryTerm( "field1", "value1" );
+
+ private QueryTerm term2 = new QueryTerm( "field2", "value2" );
+
+ private QueryTerm term3 = new QueryTerm( "field3", "value3" );
+
+ public void testQueryTerm()
+ {
+ QueryTerm query = new QueryTerm( "Field", "Value" );
+ assertEquals( "check field setting", "Field", query.getField() );
+ assertEquals( "check value setting", "Value", query.getValue() );
+ }
+
+ public void testSingleTermQuery()
+ {
+ SingleTermQuery query = new SingleTermQuery( "Field", "Value" );
+ assertEquals( "check field setting", "Field", query.getField() );
+ assertEquals( "check value setting", "Value", query.getValue() );
+
+ query = new SingleTermQuery( term1 );
+ assertEquals( "check field setting", "field1", query.getField() );
+ assertEquals( "check value setting", "value1", query.getValue() );
+ }
+
+ public void testRangeQueryOpen()
+ {
+ RangeQuery rangeQuery = RangeQuery.createOpenRange();
+ assertNull( "Check range has no start", rangeQuery.getBegin() );
+ assertNull( "Check range has no end", rangeQuery.getEnd() );
+ }
+
+ public void testRangeQueryExclusive()
+ {
+ RangeQuery rangeQuery = RangeQuery.createExclusiveRange( term1, term2 );
+ assertEquals( "Check range start", term1, rangeQuery.getBegin() );
+ assertEquals( "Check range end", term2, rangeQuery.getEnd() );
+ assertFalse( "Check exclusive", rangeQuery.isInclusive() );
+ }
+
+ public void testRangeQueryInclusive()
+ {
+ RangeQuery rangeQuery = RangeQuery.createInclusiveRange( term1, term2 );
+ assertEquals( "Check range start", term1, rangeQuery.getBegin() );
+ assertEquals( "Check range end", term2, rangeQuery.getEnd() );
+ assertTrue( "Check inclusive", rangeQuery.isInclusive() );
+ }
+
+ public void testRangeQueryOpenEnded()
+ {
+ RangeQuery rangeQuery = RangeQuery.createGreaterThanOrEqualToRange( term1 );
+ assertEquals( "Check range start", term1, rangeQuery.getBegin() );
+ assertNull( "Check range end", rangeQuery.getEnd() );
+ assertTrue( "Check inclusive", rangeQuery.isInclusive() );
+
+ rangeQuery = RangeQuery.createGreaterThanRange( term1 );
+ assertEquals( "Check range start", term1, rangeQuery.getBegin() );
+ assertNull( "Check range end", rangeQuery.getEnd() );
+ assertFalse( "Check exclusive", rangeQuery.isInclusive() );
+
+ rangeQuery = RangeQuery.createLessThanOrEqualToRange( term1 );
+ assertNull( "Check range start", rangeQuery.getBegin() );
+ assertEquals( "Check range end", term1, rangeQuery.getEnd() );
+ assertTrue( "Check inclusive", rangeQuery.isInclusive() );
+
+ rangeQuery = RangeQuery.createLessThanRange( term1 );
+ assertNull( "Check range start", rangeQuery.getBegin() );
+ assertEquals( "Check range end", term1, rangeQuery.getEnd() );
+ assertFalse( "Check exclusive", rangeQuery.isInclusive() );
+ }
+
+ public void testCompundQuery()
+ {
+ CompoundQuery query = new CompoundQuery();
+ assertTrue( "check query is empty", query.getCompoundQueryTerms().isEmpty() );
+
+ query.and( term1 );
+ query.or( term2 );
+ query.not( term3 );
+
+ Iterator i = query.getCompoundQueryTerms().iterator();
+ CompoundQueryTerm term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check first term", "field1", getQuery( term ).getField() );
+ assertEquals( "Check first term", "value1", getQuery( term ).getValue() );
+ assertTrue( "Check first term", term.isRequired() );
+ assertFalse( "Check first term", term.isProhibited() );
+
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check second term", "field2", getQuery( term ).getField() );
+ assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
+ assertFalse( "Check second term", term.isRequired() );
+ assertFalse( "Check second term", term.isProhibited() );
+
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check third term", "field3", getQuery( term ).getField() );
+ assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
+ assertFalse( "Check third term", term.isRequired() );
+ assertTrue( "Check third term", term.isProhibited() );
+
+ CompoundQuery query2 = new CompoundQuery();
+ query2.and( query );
+ query2.or( new SingleTermQuery( term2 ) );
+ query2.not( new SingleTermQuery( term3 ) );
+
+ i = query2.getCompoundQueryTerms().iterator();
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check first term", query, term.getQuery() );
+ assertTrue( "Check first term", term.isRequired() );
+ assertFalse( "Check first term", term.isProhibited() );
+
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check second term", "field2", getQuery( term ).getField() );
+ assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
+ assertFalse( "Check second term", term.isRequired() );
+ assertFalse( "Check second term", term.isProhibited() );
+
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check third term", "field3", getQuery( term ).getField() );
+ assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
+ assertFalse( "Check third term", term.isRequired() );
+ assertTrue( "Check third term", term.isProhibited() );
+ }
+
+ private static SingleTermQuery getQuery( CompoundQueryTerm term )
+ {
+ return (SingleTermQuery) term.getQuery();
+ }
+}
+
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Test the minimal artifact index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class MinimalArtifactIndexRecordFactoryTest
+ extends PlexusTestCase
+{
+ private RepositoryIndexRecordFactory factory;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private static final String TEST_GROUP_ID = "org.apache.maven.archiva.record";
+
+ private static final List JAR_CLASS_LIST = Arrays.asList( new String[]{"A", "b.B", "b.c.C"} );
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ factory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+ }
+
+ public void testIndexedJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarWithClassifier()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar", "1.0", "jar", "jdk14" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarAndPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarAndPomWithClassifier()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+ }
+
+ public void testNonIndexedPom()
+ throws RepositoryIndexException
+ {
+ // If we pass in only the POM that belongs to a JAR, then expect null not the POM
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "pom" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-plugin", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-archetype", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-skin", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+ }
+
+ public void testIndexedPlugin()
+ throws RepositoryIndexException, IOException, XmlPullParserException
+ {
+ Artifact artifact = createArtifact( "test-plugin" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3530896791670ebb45e17708e5d52c40" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( Collections.singletonList( "org.apache.maven.archiva.record.MyMojo" ) );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testCorruptJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-corrupt-jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ public void testNonJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-dll", "1.0.1.34", "dll" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ public void testMissingFile()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-foo" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar" );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ return createArtifact( artifactId, version, type, null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type, String classifier )
+ {
+ Artifact artifact = artifactFactory.createDependencyArtifact( TEST_GROUP_ID, artifactId,
+ VersionRange.createFromVersion( version ), type,
+ classifier, Artifact.SCOPE_RUNTIME );
+ artifact.isSnapshot();
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexer.record;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Test the minimal artifact index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class StandardArtifactIndexRecordFactoryTest
+ extends PlexusTestCase
+{
+ private RepositoryIndexRecordFactory factory;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private static final String TEST_GROUP_ID = "org.apache.maven.archiva.record";
+
+ private static final List JAR_CLASS_LIST = Arrays.asList( new String[]{"A", "b.B", "b.c.C"} );
+
+ private static final List JAR_FILE_LIST =
+ Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "A.class", "b/B.class", "b/c/C.class"} );
+
+ private static final String JUNIT_DEPENDENCY = "junit:junit:3.8.1";
+
+ private static final String PLUGIN_API_DEPENDENCY = "org.apache.maven:maven-plugin-api:2.0";
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ factory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+ }
+
+ public void testIndexedJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-jar" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarWithClassifier()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar", "1.0", "jar", "jdk14" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-jar" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setClassifier( "jdk14" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarAndPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-jar-and-pom" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0-alpha-1" );
+ expectedRecord.setVersion( "1.0-alpha-1" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Test JAR and POM" );
+ expectedRecord.setDependencies( createDependencies() );
+ expectedRecord.setDevelopers( createDevelopers() );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarAndPomWithClassifier()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-jar-and-pom" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0-alpha-1" );
+ expectedRecord.setVersion( "1.0-alpha-1" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Test JAR and POM" );
+ expectedRecord.setClassifier( "jdk14" );
+ expectedRecord.setDependencies( createDependencies() );
+ expectedRecord.setDevelopers( createDevelopers() );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarWithParentPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-child-pom" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0-SNAPSHOT" );
+ expectedRecord.setVersion( "1.0-20060728.121314-1" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Child Project" );
+ expectedRecord.setProjectDescription( "Description" );
+ expectedRecord.setInceptionYear( "2005" );
+ expectedRecord.setDependencies( Collections.singletonList( JUNIT_DEPENDENCY ) );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "758e1ae96dff63dab7278a62e3eb174d" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-pom" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setSha1Checksum( "770fde06cd5c3dccb5f5e8c6754b8c4c77b98560" );
+ expectedRecord.setType( "pom" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setPackaging( "pom" );
+ expectedRecord.setInceptionYear( "2005" );
+ expectedRecord.setProjectName( "Maven Repository Manager Test POM" );
+ expectedRecord.setProjectDescription( "Description" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testNonIndexedPom()
+ throws RepositoryIndexException
+ {
+ // If we pass in only the POM that belongs to a JAR, then expect null not the POM
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-plugin", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-archetype", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-skin", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+ }
+
+ public void testIndexedPlugin()
+ throws RepositoryIndexException, IOException, XmlPullParserException
+ {
+ Artifact artifact = createArtifact( "test-plugin" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3530896791670ebb45e17708e5d52c40" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-plugin" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setSha1Checksum( "2cd2619d59a684e82e97471d2c2e004144c8f24e" );
+ expectedRecord.setType( "maven-plugin" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setClasses( Arrays.asList( new String[]{"org.apache.maven.archiva.record.MyMojo"} ) );
+ expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF",
+ "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.properties",
+ "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.xml", "META-INF/maven/plugin.xml",
+ "org/apache/maven/archiva/record/MyMojo.class"} ) );
+ expectedRecord.setPackaging( "maven-plugin" );
+ expectedRecord.setProjectName( "Maven Mojo Archetype" );
+ expectedRecord.setPluginPrefix( "test" );
+ expectedRecord.setDependencies( Arrays.asList( new String[]{JUNIT_DEPENDENCY, PLUGIN_API_DEPENDENCY} ) );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedArchetype()
+ throws RepositoryIndexException, IOException, XmlPullParserException
+ {
+ Artifact artifact = createArtifact( "test-archetype" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "52b7ea4b53818b8a5f4c329d88fd60d9" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-archetype" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setSha1Checksum( "05841f5e51c124f1729d86c1687438c36b9255d9" );
+ expectedRecord.setType( "maven-archetype" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "META-INF/maven/archetype.xml",
+ "META-INF/maven/org.apache.maven.archiva.record/test-archetype/pom.properties",
+ "META-INF/maven/org.apache.maven.archiva.record/test-archetype/pom.xml", "archetype-resources/pom.xml",
+ "archetype-resources/src/main/java/App.java", "archetype-resources/src/test/java/AppTest.java"} ) );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Archetype - test-archetype" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedSkin()
+ throws RepositoryIndexException, IOException, XmlPullParserException
+ {
+ Artifact artifact = createArtifact( "test-skin" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "ba2d8a722f763db2950ad63119585f45" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-skin" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setSha1Checksum( "44855e3e56c18ce766db315a2d4c114d7a8c8ab0" );
+ expectedRecord.setType( "maven-skin" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "css/maven-theme.css",
+ "META-INF/maven/org.apache.maven.skins/test-skin/pom.xml",
+ "META-INF/maven/org.apache.maven.skins/test-skin/pom.properties"} ) );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Skin - test-skin" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testCorruptJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-corrupt-jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ public void testDll()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-dll", "1.0.1.34", "dll" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "d41d8cd98f00b204e9800998ecf8427e" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-dll" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0.1.34" );
+ expectedRecord.setVersion( "1.0.1.34" );
+ expectedRecord.setSha1Checksum( "da39a3ee5e6b4b0d3255bfef95601890afd80709" );
+ expectedRecord.setType( "dll" );
+ expectedRecord.setRepository( "test" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testMissingFile()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-foo" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar" );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ return createArtifact( artifactId, version, type, null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type, String classifier )
+ {
+ Artifact artifact = artifactFactory.createDependencyArtifact( TEST_GROUP_ID, artifactId,
+ VersionRange.createFromVersion( version ), type,
+ classifier, Artifact.SCOPE_RUNTIME );
+ artifact.isSnapshot();
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+
+ private static List createDevelopers()
+ {
+ List developers = new ArrayList();
+ developers.add( "brett:Brett Porter:brett@apache.org" );
+ return developers;
+ }
+
+ private static List createDependencies()
+ {
+ List dependencies = new ArrayList();
+ dependencies.add( JUNIT_DEPENDENCY );
+ dependencies.add( "org.apache.maven:maven-project:2.0" );
+ return dependencies;
+ }
+}
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>parent-pom</artifactId>
+ <version>1</version>
+ <packaging>pom</packaging>
+ <name>Test Parent POM</name>
+ <description>Description</description>
+ <inceptionYear>2005</inceptionYear>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <modules>
+ <module>test-child-pom</module>
+ </modules>
+</project>
+
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-archetype</artifactId>
+ <version>1.0</version>
+ <name>Archetype - test-archetype</name>
+</project>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>parent-pom</artifactId>
+ <version>1</version>
+ </parent>
+ <artifactId>test-child-pom</artifactId>
+ <version>1.0-20060731-121314-1</version>
+ <name>Child Project</name>
+</project>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-jar-and-pom</artifactId>
+ <version>1.0-alpha-1</version>
+ <name>Test JAR and POM</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-project</artifactId>
+ <version>2.0</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <developers>
+ <developer>
+ <id>brett</id>
+ <name>Brett Porter</name>
+ <email>brett@apache.org</email>
+ <roles>
+ <role>Developer</role>
+ </roles>
+ </developer>
+ </developers>
+</project>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-plugin</artifactId>
+ <packaging>maven-plugin</packaging>
+ <version>1.0</version>
+ <name>Maven Mojo Archetype</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-plugin-api</artifactId>
+ <version>2.0</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-pom</artifactId>
+ <version>1.0</version>
+ <name>Maven Repository Manager Test POM</name>
+ <inceptionYear>2005</inceptionYear>
+ <description>Description</description>
+ <packaging>pom</packaging>
+</project>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-skin</artifactId>
+ <version>1.0</version>
+ <name>Skin - test-skin</name>
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <parent>
+ <artifactId>maven</artifactId>
+ <groupId>org.apache.maven</groupId>
+ <version>2.0.1</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-artifact</artifactId>
+ <name>Maven Artifact</name>
+ <version>2.0.1</version>
+ <licenses>
+ <license>
+ <name>The Apache Software License, Version 2.0</name>
+ <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <dependencies>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-utils</artifactId>
+ <version>1.0.5</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-container-default</artifactId>
+ <version>1.0-alpha-9</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <status>deployed</status>
+ </distributionManagement>
+ <reporting>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-checkstyle-plugin</artifactId>
+ <version>2.0</version>
+ </plugin>
+ </plugins>
+ </reporting>
+</project>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-artifact</artifactId>
+ <version>2.0.1</version>
+</metadata>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-artifact</artifactId>
+ <version>2.0.1</version>
+ <versioning>
+ <release>2.0.1</release>
+ <versions>
+ <version>2.0.1</version>
+ </versions>
+ <lastUpdated>20051212044643</lastUpdated>
+ </versioning>
+</metadata>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <parent>
+ <artifactId>maven</artifactId>
+ <groupId>org.apache.maven</groupId>
+ <version>2.0</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-corrupt-jar</artifactId>
+ <name>Maven Model</name>
+ <version>2.0</version>
+ <description>Maven Model</description>
+ <licenses>
+ <license>
+ <name>The Apache Software License, Version 2.0</name>
+ <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.modello</groupId>
+ <artifactId>modello-maven-plugin</artifactId>
+ <version>2.0</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>xpp3-writer</goal>
+ <goal>java</goal>
+ <goal>xpp3-reader</goal>
+ <goal>xsd</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <version>4.0.0</version>
+ <model>maven.mdo</model>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <profiles>
+ <profile>
+ <id>all-models</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.modello</groupId>
+ <artifactId>modello-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>v3</id>
+ <goals>
+ <goal>xpp3-writer</goal>
+ <goal>java</goal>
+ <goal>xpp3-reader</goal>
+ <goal>xsd</goal>
+ </goals>
+ <configuration>
+ <version>3.0.0</version>
+ <packageWithVersion>true</packageWithVersion>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ <configuration>
+ <classifier>all</classifier>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+ <dependencies>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-utils</artifactId>
+ <version>1.0.5</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <status>deployed</status>
+ </distributionManagement>
+</project>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven</groupId>
+ <plugins>
+ <plugin>
+ <prefix>org.apache.maven</prefix>
+ <artifactId>org.apache.maven-maven-plugin</artifactId>
+ </plugin>
+ </plugins>
+</metadata>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <parent>
+ <artifactId>maven</artifactId>
+ <groupId>org.apache.maven</groupId>
+ <version>2.0</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-model</artifactId>
+ <name>Maven Model</name>
+ <version>2.0</version>
+ <description>Maven Model</description>
+ <licenses>
+ <license>
+ <name>The Apache Software License, Version 2.0</name>
+ <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.modello</groupId>
+ <artifactId>modello-maven-plugin</artifactId>
+ <version>2.0</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>xpp3-writer</goal>
+ <goal>java</goal>
+ <goal>xpp3-reader</goal>
+ <goal>xsd</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <version>4.0.0</version>
+ <model>maven.mdo</model>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <profiles>
+ <profile>
+ <id>all-models</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.modello</groupId>
+ <artifactId>modello-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>v3</id>
+ <goals>
+ <goal>xpp3-writer</goal>
+ <goal>java</goal>
+ <goal>xpp3-reader</goal>
+ <goal>xsd</goal>
+ </goals>
+ <configuration>
+ <version>3.0.0</version>
+ <packageWithVersion>true</packageWithVersion>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ <configuration>
+ <classifier>all</classifier>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+ <dependencies>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-utils</artifactId>
+ <version>1.0.5</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <status>deployed</status>
+ </distributionManagement>
+</project>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>test.inherited</groupId>
+ <version>1.0.15</version>
+ <artifactId>test-inherited-parent</artifactId>
+ </parent>
+ <!-- groupID, version are inherited -->
+ <artifactId>test-inherited</artifactId>
+ <packaging>pom</packaging>
+</project>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>test</groupId>
+ <plugins>
+ <plugin>
+ <prefix></prefix>
+ <artifactId>test-test-plugin</artifactId>
+ </plugin>
+ </plugins>
+</metadata>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>test</groupId>
+ <artifactId>test-artifactId</artifactId>
+ <version>1.0</version>
+</project>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.apache.maven.archiva</groupId>
+ <artifactId>archiva</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>archiva-proxy</artifactId>
+ <name>Archiva Proxy</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.maven.archiva</groupId>
+ <artifactId>archiva-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-file</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-provider-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-digest</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>easymock</groupId>
+ <artifactId>easymock</artifactId>
+ <version>1.2_Java1.3</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.maven.archiva.common.artifact.builder.BuilderException;
+import org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
+import org.apache.maven.model.DistributionManagement;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.Relocation;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.apache.maven.wagon.ConnectionException;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authentication.AuthenticationException;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.apache.maven.wagon.observers.ChecksumObserver;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.apache.maven.wagon.repository.Repository;
+import org.codehaus.plexus.digest.DigestUtils;
+import org.codehaus.plexus.digest.DigesterException;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Reader;
+import java.security.NoSuchAlgorithmException;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.TimeZone;
+
+/**
+ * An implementation of the proxy handler. This class is not thread safe (the class itself is, but the wagons it uses
+ * are not) - it is declared <code>per-lookup</code> for that reason.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component instantiation-strategy="per-lookup"
+ * @todo use wagonManager for cache use file:// as URL
+ * @todo this currently duplicates a lot of the wagon manager, and doesn't do things like snapshot resolution, etc.
+ * The checksum handling is inconsistent with that of the wagon manager.
+ * Should we have a more artifact based one? This will merge metadata so should behave correctly, and it is able to
+ * correct some limitations of the wagon manager (eg, it can retrieve newer SNAPSHOT files without metadata)
+ */
+public class DefaultProxyRequestHandler
+ extends AbstractLogEnabled
+ implements ProxyRequestHandler
+{
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactFactory factory;
+
+ /**
+ * @plexus.requirement role-hint="default"
+ * @todo use a map, and have priorities in them.
+ */
+ private LayoutArtifactBuilder defaultArtifactBuilder;
+
+ /**
+ * @plexus.requirement role-hint="legacy"
+ */
+ private LayoutArtifactBuilder legacyArtifactBuilder;
+
+ /**
+ * @plexus.requirement role="org.apache.maven.wagon.Wagon"
+ */
+ private Map/*<String,Wagon>*/ wagons;
+
+ private static final TimeZone UTC_TIMEZONE = TimeZone.getTimeZone( "UTC" );
+
+ public File get( String path, List proxiedRepositories, ArtifactRepository managedRepository )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ return get( path, proxiedRepositories, managedRepository, null );
+ }
+
+ public File get( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ return get( managedRepository, path, proxiedRepositories, wagonProxy, false );
+ }
+
+ public File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ return getAlways( path, proxiedRepositories, managedRepository, null );
+ }
+
+ public File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository,
+ ProxyInfo wagonProxy )
+ throws ResourceDoesNotExistException, ProxyException
+ {
+ return get( managedRepository, path, proxiedRepositories, wagonProxy, true );
+ }
+
+ private File get( ArtifactRepository managedRepository, String path, List proxiedRepositories, ProxyInfo wagonProxy,
+ boolean force )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ File target = new File( managedRepository.getBasedir(), path );
+
+ if ( path.endsWith( "maven-metadata.xml" ) )
+ {
+ // Request for managed repository metadatas
+ getMetadata( path, target, proxiedRepositories, managedRepository, wagonProxy, force );
+ }
+ else
+ {
+ boolean checksum = false;
+ String checksumExtension = null;
+ String artifactPath = path;
+ if ( path.endsWith( ".md5" ) || path.endsWith( ".sha1" ) )
+ {
+ int index = path.lastIndexOf( '.' );
+ checksumExtension = path.substring( index + 1 );
+ checksum = true;
+ artifactPath = path.substring( 0, index );
+ }
+
+ String msg = "";
+
+ // Request for artifact: parse the requested path to build an Artifact.
+ Artifact artifact = null;
+ try
+ {
+ artifact = defaultArtifactBuilder.build( artifactPath );
+ getLogger().debug( "Artifact requested is: " + artifact );
+ }
+ catch ( BuilderException e )
+ {
+ msg = "Failed to build artifact from path:\n\tfrom default: " + e.getMessage();
+ }
+
+ if ( artifact == null )
+ {
+ try
+ {
+ artifact = legacyArtifactBuilder.build( artifactPath );
+ getLogger().debug( "Artifact requested is: " + artifact );
+ }
+ catch ( BuilderException e )
+ {
+ getLogger().debug( msg + "\n\tfrom legacy: " + e.getMessage() );
+ }
+ }
+
+ if ( artifact != null )
+ {
+ applyRelocation( managedRepository, artifact, proxiedRepositories, wagonProxy, force );
+
+ if ( !checksum )
+ {
+ // Build the target file name
+ target = new File( managedRepository.getBasedir(), managedRepository.pathOf( artifact ) );
+
+ // Get the requested artifact from proxiedRepositories
+ getArtifactFromRepository( managedRepository, target, artifact, proxiedRepositories, wagonProxy,
+ force );
+ }
+ else
+ {
+ // Just adjust the filename for relocation, don't actualy get it
+ target = new File( managedRepository.getBasedir(),
+ managedRepository.pathOf( artifact ) + "." + checksumExtension );
+ }
+ }
+ else if ( !checksum )
+ {
+ // Some other unknown file in the repository, proxy as is, unless it was a checksum
+ if ( force || !target.exists() )
+ {
+ getFileFromRepository( managedRepository, target, path, proxiedRepositories, wagonProxy, force );
+ }
+ }
+ }
+
+ if ( !target.exists() )
+ {
+ throw new ResourceDoesNotExistException( "Could not find " + path + " in any of the repositories." );
+ }
+
+ return target;
+ }
+
+ private void getFileFromRepository( ArtifactRepository managedRepository, File target, String path,
+ List proxiedRepositories, ProxyInfo wagonProxy, boolean force )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
+ {
+ ProxiedArtifactRepository repository = (ProxiedArtifactRepository) i.next();
+
+ if ( !force && repository.isCachedFailure( path ) )
+ {
+ processCachedRepositoryFailure( repository, "Cached failure found for: " + path );
+ }
+ else
+ {
+ ArtifactRepositoryPolicy policy = repository.getRepository().getReleases();
+ getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, target, policy,
+ force );
+ }
+ }
+ }
+
+ private void getArtifactFromRepository( ArtifactRepository managedRepository, File target, Artifact artifact,
+ List proxiedRepositories, ProxyInfo wagonProxy, boolean force )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
+ {
+ ProxiedArtifactRepository repository = (ProxiedArtifactRepository) i.next();
+ String path = repository.getRepository().getLayout().pathOf( artifact );
+
+ if ( !force && repository.isCachedFailure( path ) )
+ {
+ processCachedRepositoryFailure( repository, "Cached failure found for: " + path );
+ }
+ else
+ {
+ get( artifact, target, repository, managedRepository, wagonProxy, force );
+ }
+ }
+ }
+
+ private void applyRelocation( ArtifactRepository managedRepository, Artifact artifact, List proxiedRepositories,
+ ProxyInfo wagonProxy, boolean force )
+ {
+ Artifact pomArtifact =
+ factory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() );
+
+ File pomFile = new File( managedRepository.getBasedir(), managedRepository.pathOf( pomArtifact ) );
+ try
+ {
+ getArtifactFromRepository( managedRepository, pomFile, pomArtifact, proxiedRepositories, wagonProxy,
+ force );
+ }
+ catch ( ProxyException e )
+ {
+ getLogger().warn( "Error getting POM for artifact - not relocating: " + e.getMessage() );
+ getLogger().debug( "Cause", e );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ getLogger().debug( "Remote POM not found for artifact - not relocating" );
+ }
+
+ if ( pomFile.exists() )
+ {
+ Model model = null;
+ try
+ {
+ // Parse the pom and look at relocation metadata
+ Reader reader = new FileReader( pomFile );
+ model = new MavenXpp3Reader().read( reader );
+ }
+ catch ( IOException e )
+ {
+ getLogger().warn( "Error reading POM for artifact - not relocating: " + e.getMessage() );
+ getLogger().debug( "Cause", e );
+ }
+ catch ( XmlPullParserException e )
+ {
+ getLogger().warn( "Error parsing POM for artifact - not relocating: " + e.getMessage() );
+ getLogger().debug( "Cause", e );
+ }
+
+ if ( model != null )
+ {
+ DistributionManagement dist;
+ dist = model.getDistributionManagement();
+
+ if ( dist != null )
+ {
+ Relocation relocation = dist.getRelocation();
+ if ( relocation != null )
+ {
+ String requestedId =
+ artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion();
+
+ // artifact is relocated : update the artifact
+ if ( relocation.getGroupId() != null )
+ {
+ artifact.setGroupId( relocation.getGroupId() );
+ }
+ if ( relocation.getArtifactId() != null )
+ {
+ artifact.setArtifactId( relocation.getArtifactId() );
+ }
+ if ( relocation.getVersion() != null )
+ {
+ artifact.setVersion( relocation.getVersion() );
+ }
+
+ String relocatedId =
+ artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion();
+
+ getLogger().debug( "Artifact " + requestedId + " has been relocated to " + relocatedId +
+ ( relocation.getMessage() != null ? ": " + relocation.getMessage() : "" ) );
+
+ applyRelocation( managedRepository, artifact, proxiedRepositories, wagonProxy, force );
+ }
+ }
+ }
+ }
+ }
+
+ private void getMetadata( String path, File target, List proxiedRepositories, ArtifactRepository managedRepository,
+ ProxyInfo wagonProxy, boolean force )
+ throws ProxyException
+ {
+ for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
+ {
+ ProxiedArtifactRepository repository = (ProxiedArtifactRepository) i.next();
+ File metadataFile = new File( target.getParentFile(), ".metadata-" + repository.getRepository().getId() );
+
+ ArtifactRepositoryPolicy policy = repository.getRepository().getReleases();
+
+ // if it is snapshot metadata, use a different policy
+ if ( path.endsWith( "-SNAPSHOT/maven-metadata.xml" ) )
+ {
+ policy = repository.getRepository().getSnapshots();
+ }
+
+ if ( force || !metadataFile.exists() || isOutOfDate( policy, metadataFile ) )
+ {
+ getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, metadataFile,
+ policy, force );
+
+ mergeMetadataFiles( target, metadataFile );
+ }
+ }
+ }
+
+ private void get( Artifact artifact, File target, ProxiedArtifactRepository repository,
+ ArtifactRepository managedRepository, ProxyInfo wagonProxy, boolean force )
+ throws ProxyException
+ {
+ ArtifactRepository artifactRepository = repository.getRepository();
+
+ // we use the release policy for tracking failures, but only check for updates on snapshots
+ // also, we don't look for updates on timestamp snapshot files, only non-unique-version ones
+ ArtifactRepositoryPolicy policy =
+ artifact.isSnapshot() ? artifactRepository.getSnapshots() : artifactRepository.getReleases();
+
+ boolean needsUpdate = false;
+ if ( artifact.getVersion().endsWith( "-SNAPSHOT" ) && isOutOfDate( policy, target ) )
+ {
+ needsUpdate = true;
+ }
+
+ if ( needsUpdate || force || !target.exists() )
+ {
+ getFileFromRepository( artifactRepository.pathOf( artifact ), repository, managedRepository.getBasedir(),
+ wagonProxy, target, policy, force );
+ }
+ }
+
+ private void mergeMetadataFiles( File target, File metadataFile )
+ throws ProxyException
+ {
+ MetadataXpp3Reader reader = new MetadataXpp3Reader();
+ if ( metadataFile.exists() )
+ {
+ Metadata metadata = null;
+ if ( target.exists() )
+ {
+ FileReader fileReader = null;
+ try
+ {
+ fileReader = new FileReader( target );
+ metadata = reader.read( fileReader );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new ProxyException( "Unable to parse existing metadata: " + e.getMessage(), e );
+ }
+ catch ( IOException e )
+ {
+ throw new ProxyException( "Unable to read existing metadata: " + e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtils.closeQuietly( fileReader );
+ }
+ }
+
+ FileReader fileReader = null;
+ boolean changed = false;
+ try
+ {
+ fileReader = new FileReader( metadataFile );
+ Metadata newMetadata = reader.read( fileReader );
+
+ if ( metadata != null )
+ {
+ setLastUpdatedIfEmpty( newMetadata, metadataFile );
+ setLastUpdatedIfEmpty( metadata, target );
+
+ changed = metadata.merge( newMetadata );
+ }
+ else
+ {
+ metadata = newMetadata;
+ changed = true;
+ }
+ }
+ catch ( IOException e )
+ {
+ // ignore the merged file
+ getLogger().warn( "Unable to read new metadata: " + e.getMessage() );
+ }
+ catch ( XmlPullParserException e )
+ {
+ // ignore the merged file
+ getLogger().warn( "Unable to parse new metadata: " + e.getMessage() );
+ }
+ finally
+ {
+ IOUtils.closeQuietly( fileReader );
+ }
+
+ if ( changed )
+ {
+ FileWriter fileWriter = null;
+ try
+ {
+ fileWriter = new FileWriter( target );
+ new MetadataXpp3Writer().write( fileWriter, metadata );
+ }
+ catch ( IOException e )
+ {
+ getLogger().warn( "Unable to store new metadata: " + e.getMessage() );
+ }
+ finally
+ {
+ IOUtils.closeQuietly( fileWriter );
+ }
+ }
+ }
+ }
+
+ private void setLastUpdatedIfEmpty( Metadata metadata, File metadataFile )
+ {
+ if ( metadata.getVersioning() == null )
+ {
+ metadata.setVersioning( new Versioning() );
+ }
+ if ( metadata.getVersioning().getLastUpdated() == null )
+ {
+ DateFormat fmt = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
+ fmt.setTimeZone( UTC_TIMEZONE );
+ metadata.getVersioning().setLastUpdated( fmt.format( new Date( metadataFile.lastModified() ) ) );
+ }
+ }
+
+ private void getFileFromRepository( String path, ProxiedArtifactRepository repository, String repositoryCachePath,
+ ProxyInfo httpProxy, File target, ArtifactRepositoryPolicy policy,
+ boolean force )
+ throws ProxyException
+ {
+ if ( !policy.isEnabled() )
+ {
+ getLogger().debug( "Skipping disabled repository " + repository.getName() );
+ return;
+ }
+
+ Map checksums = null;
+ Wagon wagon = null;
+
+ File temp = new File( target.getAbsolutePath() + ".tmp" );
+ temp.deleteOnExit();
+
+ boolean connected = false;
+ try
+ {
+ String protocol = repository.getRepository().getProtocol();
+ wagon = (Wagon) wagons.get( protocol );
+ if ( wagon == null )
+ {
+ throw new ProxyException( "Unsupported remote protocol: " + protocol );
+ }
+
+ //@todo configure wagon (ssh settings, etc)
+
+ checksums = prepareChecksumListeners( wagon );
+
+ connected = connectToRepository( wagon, repository, httpProxy );
+ if ( connected )
+ {
+ int tries = 0;
+ boolean success;
+
+ do
+ {
+ tries++;
+
+ boolean downloaded = true;
+ if ( force || !target.exists() )
+ {
+ getLogger().debug( "Retrieving " + path + " from " + repository.getName() );
+ wagon.get( path, temp );
+ }
+ else
+ {
+ getLogger().debug( "Retrieving " + path + " from " + repository.getName() + " if updated" );
+ downloaded = wagon.getIfNewer( path, temp, target.lastModified() );
+ }
+
+ if ( downloaded )
+ {
+ success = checkChecksum( checksums, path, wagon, repositoryCachePath );
+
+ if ( tries > 1 && !success )
+ {
+ processRepositoryFailure( repository,
+ "Checksum failures occurred while downloading " + path, path,
+ policy );
+ return;
+ }
+ }
+ else
+ {
+ // getIfNewer determined we were up to date
+ success = true;
+ }
+ }
+ while ( !success );
+
+ // temp won't exist if we called getIfNewer and it was older, but its still a successful return
+ if ( temp.exists() )
+ {
+ moveTempToTarget( temp, target );
+ }
+
+ getLogger().debug( "Successfully downloaded" );
+ }
+ //try next repository
+ }
+ catch ( TransferFailedException e )
+ {
+ processRepositoryFailure( repository, e, path, policy );
+ }
+ catch ( AuthorizationException e )
+ {
+ processRepositoryFailure( repository, e, path, policy );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // hard failure setting doesn't affect "not found".
+ getLogger().debug( "Artifact not found in repository: " + repository.getName() + ": " + e.getMessage() );
+ }
+ finally
+ {
+ temp.delete();
+
+ if ( wagon != null && checksums != null )
+ {
+ releaseChecksumListeners( wagon, checksums );
+ }
+
+ if ( connected )
+ {
+ disconnectWagon( wagon );
+ }
+ }
+ }
+
+ private static boolean isOutOfDate( ArtifactRepositoryPolicy policy, File target )
+ {
+ return policy != null && policy.checkOutOfDate( new Date( target.lastModified() ) );
+ }
+
+ /**
+ * Used to add checksum observers as transfer listeners to the wagonManager object
+ *
+ * @param wagon the wagonManager object to use the checksum with
+ * @return map of ChecksumObservers added into the wagonManager transfer listeners
+ */
+ private Map prepareChecksumListeners( Wagon wagon )
+ {
+ Map checksums = new LinkedHashMap();
+ try
+ {
+ ChecksumObserver checksum = new ChecksumObserver( "SHA-1" );
+ wagon.addTransferListener( checksum );
+ checksums.put( "sha1", checksum );
+
+ checksum = new ChecksumObserver( "MD5" );
+ wagon.addTransferListener( checksum );
+ checksums.put( "md5", checksum );
+ }
+ catch ( NoSuchAlgorithmException e )
+ {
+ getLogger().error( "An error occurred while preparing checksum observers: " + e.getMessage() );
+ }
+ return checksums;
+ }
+
+ private void releaseChecksumListeners( Wagon wagon, Map checksumMap )
+ {
+ for ( Iterator checksums = checksumMap.values().iterator(); checksums.hasNext(); )
+ {
+ ChecksumObserver listener = (ChecksumObserver) checksums.next();
+ wagon.removeTransferListener( listener );
+ }
+ }
+
+ private boolean connectToRepository( Wagon wagon, ProxiedArtifactRepository repository, ProxyInfo httpProxy )
+ {
+ boolean connected = false;
+ try
+ {
+ ArtifactRepository artifactRepository = repository.getRepository();
+ Repository wagonRepository = new Repository( artifactRepository.getId(), artifactRepository.getUrl() );
+ if ( repository.isUseNetworkProxy() && httpProxy != null )
+ {
+ wagon.connect( wagonRepository, httpProxy );
+ }
+ else
+ {
+ wagon.connect( wagonRepository );
+ }
+ connected = true;
+ }
+ catch ( ConnectionException e )
+ {
+ getLogger().info( "Could not connect to " + repository.getName() + ": " + e.getMessage() );
+ }
+ catch ( AuthenticationException e )
+ {
+ getLogger().info( "Could not connect to " + repository.getName() + ": " + e.getMessage() );
+ }
+
+ return connected;
+ }
+
+ private boolean checkChecksum( Map checksumMap, String path, Wagon wagon, String repositoryCachePath )
+ throws ProxyException
+ {
+ releaseChecksumListeners( wagon, checksumMap );
+
+ boolean correctChecksum = false;
+
+ boolean allNotFound = true;
+
+ for ( Iterator i = checksumMap.keySet().iterator(); i.hasNext() && !correctChecksum; )
+ {
+ String checksumExt = (String) i.next();
+ ChecksumObserver checksum = (ChecksumObserver) checksumMap.get( checksumExt );
+ String checksumPath = path + "." + checksumExt;
+ File checksumFile = new File( repositoryCachePath, checksumPath );
+
+ File tempChecksumFile = new File( checksumFile.getAbsolutePath() + ".tmp" );
+ tempChecksumFile.deleteOnExit();
+
+ try
+ {
+ wagon.get( checksumPath, tempChecksumFile );
+
+ allNotFound = false;
+
+ String remoteChecksum = DigestUtils.cleanChecksum( FileUtils.readFileToString( tempChecksumFile, null ),
+ checksumExt.toUpperCase(),
+ path.substring( path.lastIndexOf( '/' ) + 1 ) );
+
+ String actualChecksum = checksum.getActualChecksum();
+
+ remoteChecksum = remoteChecksum.toUpperCase();
+
+ if ( actualChecksum != null && remoteChecksum.equals( actualChecksum.toUpperCase() ) )
+ {
+ moveTempToTarget( tempChecksumFile, checksumFile );
+
+ correctChecksum = true;
+ }
+ else
+ {
+ getLogger().warn(
+ "The checksum '" + actualChecksum + "' did not match the remote value: " + remoteChecksum );
+ }
+ }
+ catch ( TransferFailedException e )
+ {
+ getLogger().warn( "An error occurred during the download of " + checksumPath + ": " + e.getMessage() );
+ // do nothing try the next checksum
+
+ allNotFound = false;
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ getLogger().debug( "The checksum did not exist: " + checksumPath + "; " + e.getMessage() );
+ // do nothing try the next checksum
+ // remove it if it is present locally in case there is an old incorrect one
+ if ( checksumFile.exists() )
+ {
+ checksumFile.delete();
+ }
+ }
+ catch ( AuthorizationException e )
+ {
+ getLogger().warn( "An error occurred during the download of " + checksumPath + ": " + e.getMessage() );
+ // do nothing try the next checksum
+
+ allNotFound = false;
+ }
+ catch ( IOException e )
+ {
+ getLogger().warn( "An error occurred while reading the temporary checksum file: " + e.getMessage() );
+ // do nothing try the next checksum
+
+ allNotFound = false;
+ }
+ catch ( DigesterException e )
+ {
+ getLogger().warn( "The checksum was invalid: " + checksumPath + ": " + e.getMessage() );
+ // do nothing try the next checksum
+
+ allNotFound = false;
+ }
+ finally
+ {
+ tempChecksumFile.delete();
+ }
+ }
+ return correctChecksum || allNotFound;
+ }
+
+ /**
+ * Used to move the temporary file to its real destination. This is patterned from the way WagonManager handles
+ * its downloaded files.
+ *
+ * @param temp The completed download file
+ * @param target The final location of the downloaded file
+ * @throws ProxyException when the temp file cannot replace the target file
+ */
+ private void moveTempToTarget( File temp, File target )
+ throws ProxyException
+ {
+ if ( target.exists() && !target.delete() )
+ {
+ throw new ProxyException( "Unable to overwrite existing target file: " + target.getAbsolutePath() );
+ }
+
+ if ( !temp.renameTo( target ) )
+ {
+ getLogger().warn( "Unable to rename tmp file to its final name... resorting to copy command." );
+
+ try
+ {
+ FileUtils.copyFile( temp, target );
+ }
+ catch ( IOException e )
+ {
+ throw new ProxyException( "Cannot copy tmp file to its final location", e );
+ }
+ finally
+ {
+ temp.delete();
+ }
+ }
+ }
+
+ /**
+ * Used to disconnect the wagonManager from its repository
+ *
+ * @param wagon the connected wagonManager object
+ */
+ private void disconnectWagon( Wagon wagon )
+ {
+ try
+ {
+ wagon.disconnect();
+ }
+ catch ( ConnectionException e )
+ {
+ getLogger().error( "Problem disconnecting from wagonManager - ignoring: " + e.getMessage() );
+ }
+ }
+
+ private void processRepositoryFailure( ProxiedArtifactRepository repository, Throwable t, String path,
+ ArtifactRepositoryPolicy policy )
+ throws ProxyException
+ {
+ repository.addFailure( path, policy );
+
+ String message = t.getMessage();
+ if ( repository.isHardFail() )
+ {
+ repository.addFailure( path, policy );
+ throw new ProxyException(
+ "An error occurred in hardfailing repository " + repository.getName() + "...\n " + message, t );
+ }
+
+ getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
+ getLogger().debug( "Cause", t );
+ }
+
+ private void processRepositoryFailure( ProxiedArtifactRepository repository, String message, String path,
+ ArtifactRepositoryPolicy policy )
+ throws ProxyException
+ {
+ repository.addFailure( path, policy );
+
+ processCachedRepositoryFailure( repository, message );
+ }
+
+ private void processCachedRepositoryFailure( ProxiedArtifactRepository repository, String message )
+ throws ProxyException
+ {
+ if ( repository.isHardFail() )
+ {
+ throw new ProxyException(
+ "An error occurred in hardfailing repository " + repository.getName() + "...\n " + message );
+ }
+
+ getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
+
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A proxied artifact repository - contains the artifact repository and additional information about
+ * the proxied repository.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class ProxiedArtifactRepository
+{
+ /**
+ * Whether to cache failures or not.
+ */
+ private boolean cacheFailures;
+
+ /**
+ * Whether failures on this repository cause the whole group to fail.
+ */
+ private boolean hardFail;
+
+ /**
+ * Whether to use the network proxy for any requests.
+ */
+ private boolean useNetworkProxy;
+
+ /**
+ * The artifact repository on the other end of the proxy.
+ */
+ private final ArtifactRepository repository;
+
+ /**
+ * Cache of failures that have already occurred, containing paths from the repository root. The value given
+ * specifies when the failure should expire.
+ */
+ private Map/*<String,Long>*/ failureCache = new HashMap/*<String,Long>*/();
+
+ /**
+ * A user friendly name for the repository.
+ */
+ private String name;
+
+ public ProxiedArtifactRepository( ArtifactRepository repository )
+ {
+ this.repository = repository;
+ }
+
+ public boolean isHardFail()
+ {
+ return hardFail;
+ }
+
+ public boolean isUseNetworkProxy()
+ {
+ return useNetworkProxy;
+ }
+
+ public boolean isCacheFailures()
+ {
+ return cacheFailures;
+ }
+
+ public ArtifactRepository getRepository()
+ {
+ return repository;
+ }
+
+ /**
+ * Check if there is a previously cached failure for requesting the given path.
+ *
+ * @param path the path
+ * @return whether there is a failure
+ */
+ public boolean isCachedFailure( String path )
+ {
+ boolean failed = false;
+ if ( cacheFailures )
+ {
+ Long time = (Long) failureCache.get( path );
+ if ( time != null )
+ {
+ if ( System.currentTimeMillis() < time.longValue() )
+ {
+ failed = true;
+ }
+ else
+ {
+ clearFailure( path );
+ }
+ }
+ }
+ return failed;
+ }
+
+ /**
+ * Add a failure to the cache.
+ *
+ * @param path the path that failed
+ * @param policy the policy for when the failure should expire
+ */
+ public void addFailure( String path, ArtifactRepositoryPolicy policy )
+ {
+ failureCache.put( path, new Long( calculateExpiryTime( policy ) ) );
+ }
+
+ private long calculateExpiryTime( ArtifactRepositoryPolicy policy )
+ {
+ String updatePolicy = policy.getUpdatePolicy();
+ long time;
+ if ( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS.equals( updatePolicy ) )
+ {
+ time = 0;
+ }
+ else if ( ArtifactRepositoryPolicy.UPDATE_POLICY_DAILY.equals( updatePolicy ) )
+ {
+ // Get midnight boundary
+ Calendar cal = Calendar.getInstance();
+ cal.set( Calendar.HOUR_OF_DAY, 0 );
+ cal.set( Calendar.MINUTE, 0 );
+ cal.set( Calendar.SECOND, 0 );
+ cal.set( Calendar.MILLISECOND, 0 );
+ cal.add( Calendar.DAY_OF_MONTH, 1 );
+ time = cal.getTime().getTime();
+ }
+ else if ( updatePolicy.startsWith( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL ) )
+ {
+ String s = updatePolicy.substring( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL.length() + 1 );
+ int minutes = Integer.valueOf( s ).intValue();
+ Calendar cal = Calendar.getInstance();
+ cal.add( Calendar.MINUTE, minutes );
+ time = cal.getTime().getTime();
+ }
+ else
+ {
+ // else assume "never"
+ time = Long.MAX_VALUE;
+ }
+ return time;
+ }
+
+ /**
+ * Remove a failure.
+ *
+ * @param path the path that had previously failed
+ */
+ public void clearFailure( String path )
+ {
+ failureCache.remove( path );
+ }
+
+ public String getName()
+ {
+ return name;
+ }
+
+ public void setCacheFailures( boolean cacheFailures )
+ {
+ this.cacheFailures = cacheFailures;
+ }
+
+ public void setHardFail( boolean hardFail )
+ {
+ this.hardFail = hardFail;
+ }
+
+ public void setUseNetworkProxy( boolean useNetworkProxy )
+ {
+ this.useNetworkProxy = useNetworkProxy;
+ }
+
+ public void setName( String name )
+ {
+ this.name = name;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * @author Edwin Punzalan
+ */
+public class ProxyException
+ extends Exception
+{
+ public ProxyException( String message )
+ {
+ super( message );
+ }
+
+ public ProxyException( String message, Throwable t )
+ {
+ super( message, t );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * An individual request handler for the proxy.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface ProxyRequestHandler
+{
+ /**
+ * The Plexus role of the component.
+ */
+ String ROLE = ProxyRequestHandler.class.getName();
+
+ /**
+ * Used to retrieve an artifact at a particular path, giving the cached version if it exists.
+ *
+ * @param path the expected repository path
+ * @param proxiedRepositories the repositories being proxied to
+ * @param managedRepository the locally managed repository to cache artifacts in
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File get( String path, List proxiedRepositories, ArtifactRepository managedRepository )
+ throws ProxyException, ResourceDoesNotExistException;
+
+ /**
+ * Used to retrieve an artifact at a particular path, giving the cached version if it exists.
+ *
+ * @param path the expected repository path
+ * @param proxiedRepositories the repositories being proxied to
+ * @param managedRepository the locally managed repository to cache artifacts in
+ * @param wagonProxy a network proxy to use when transferring files if needed
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File get( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
+ throws ProxyException, ResourceDoesNotExistException;
+
+ /**
+ * Used to force remote download of the requested path from any the configured repositories. This method will
+ * only bypass the cache for searching but the requested path will still be cached.
+ *
+ * @param path the expected repository path
+ * @param proxiedRepositories the repositories being proxied to
+ * @param managedRepository the locally managed repository to cache artifacts in
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository )
+ throws ProxyException, ResourceDoesNotExistException;
+
+ /**
+ * Used to force remote download of the requested path from any the configured repositories. This method will
+ * only bypass the cache for searching but the requested path will still be cached.
+ *
+ * @param path the expected repository path
+ * @param proxiedRepositories the repositories being proxied to
+ * @param managedRepository the locally managed repository to cache artifacts in
+ * @param wagonProxy a network proxy to use when transferring files if needed
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
+ throws ProxyException, ResourceDoesNotExistException;
+}
--- /dev/null
+~~ Copyright 2006 The Apache Software Foundation.\r
+~~\r
+~~ Licensed under the Apache License, Version 2.0 (the "License");\r
+~~ you may not use this file except in compliance with the License.\r
+~~ You may obtain a copy of the License at\r
+~~\r
+~~ http://www.apache.org/licenses/LICENSE-2.0\r
+~~\r
+~~ Unless required by applicable law or agreed to in writing, software\r
+~~ distributed under the License is distributed on an "AS IS" BASIS,\r
+~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+~~ See the License for the specific language governing permissions and\r
+~~ limitations under the License.\r
+\r
+~~ NOTE: For help with the syntax of this file, see:\r
+~~ http://maven.apache.org/guides/mini/guide-apt-format.html\r
+\r
+ProxyManager\r
+\r
+ The ProxyManager is designed to be used as a simple object or bean for use by\r
+ a command-line application or web application.\r
+\r
+Configuration\r
+\r
+ An instance of a ProxyManager requires a configuration object that will\r
+ define its behavior called ProxyConfiguration. The ProxyConfiguration is a\r
+ plexus component and can be looked up to get an instance of it. Below is a sample\r
+ plexus lookup statement:\r
+\r
+----------\r
+ ProxyConfiguration config = (ProxyConfiguration) container.lookup( ProxyConfiguration.ROLE );\r
+----------\r
+\r
+ Currently, a ProxyConfiguration lookup will return an empty instance of the \r
+ ProxyConfiguration which means it doesn't have any default definitions yet on\r
+ how the ProxyManager should behave. So the next step is to explicitly define\r
+ its behavior.\r
+\r
+----------\r
+ ProxyConfiguration config = (ProxyConfiguration) container.lookup( ProxyConfiguration.ROLE );\r
+\r
+ config.setRepositoryCachePath( "/user/proxy-cache" );\r
+\r
+ ArtifactRepositoryLayout defLayout = new DefaultRepositoryLayout();\r
+\r
+ File repo1File = new File( "src/test/remote-repo1" );\r
+\r
+ ProxyRepository repo1 = new ProxyRepository( "central", "http://www.ibiblio.org/maven2", defLayout );\r
+\r
+ config.addRepository( repo1 );\r
+----------\r
+\r
+ The above statements sets up the ProxyConfiguration to use the directory \r
+ <<</user/proxy-cache>>> as the location of the proxy's repository cache.\r
+ Then it creates a ProxyRepository instance with an id of <<<central>>> to\r
+ look for remote files in ibiblio.org.\r
+\r
+Instantiation\r
+\r
+ To create or retrieve an instance of a ProxyManager, one will need to use the\r
+ ProxyManagerFactory.\r
+\r
+----------\r
+ ProxyManagerFactory factory = (ProxyManagerFactory) container.lookup( ProxyManagerFactory.ROLE );\r
+ proxy = factory.getProxyManager( "default", config );\r
+----------\r
+\r
+ The factory requires two parameters. The first parameter is the proxy_type\r
+ that you will want to use. And the second parameter is the ProxyConfiguration\r
+ which we already did above. The proxy_type defines the client that the\r
+ ProxyManager is expected to service. Currently, only <<<default>>>\r
+ ProxyManager type is available and is defined to be for Maven 2.x clients.\r
+\r
+Usage\r
+\r
+* The get() method\r
+\r
+ The ProxyManager get( target ) method is used to retrieve a path file. This\r
+ method first looks into the cache if the target exists. If it does not, then\r
+ the ProxyManager will search all the ProxyRepositories present in its\r
+ ProxyConfiguration. When the target path is found, the ProxyManager creates\r
+ a copy of it in its cache and returns a File instance of the cached copy.\r
+\r
+* The getRemoteFile() method\r
+\r
+ The ProxyManager getRemoteFile( path ) method is used to force the\r
+ ProxyManager to ignore the contents of its cache and search all the\r
+ ProxyRepository objects for the specified path and retrieve it when\r
+ available. When successful, the ProxyManager creates a copy of the remote\r
+ file in its cache and then returns a File instance of the cached copy.
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.codehaus.plexus.PlexusTestCase;
+import org.easymock.MockControl;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.net.MalformedURLException;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+
+/**
+ * Test the proxy handler.
+ *
+ * @author Brett Porter
+ */
+public class ProxyRequestHandlerTest
+ extends PlexusTestCase
+{
+ private ProxyRequestHandler requestHandler;
+
+ private List proxiedRepositories;
+
+ private List legacyProxiedRepositories;
+
+ private ArtifactRepository defaultManagedRepository;
+
+ private ArtifactRepository legacyManagedRepository;
+
+ private ArtifactRepository proxiedRepository1;
+
+ private ArtifactRepository proxiedRepository2;
+
+ private ArtifactRepository legacyProxiedRepository;
+
+ private ArtifactRepositoryLayout defaultLayout;
+
+ private ArtifactRepositoryFactory factory;
+
+ private MockControl wagonMockControl;
+
+ private Wagon wagonMock;
+
+ private static final ArtifactRepositoryPolicy DEFAULT_POLICY =
+ new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER, null );
+
+ private static final ArtifactRepositoryPolicy ALWAYS_UPDATE_POLICY =
+ new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS, null );
+
+ private static final TimeZone UTC_TIMEZONE = TimeZone.getTimeZone( "UTC" );
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ requestHandler = (ProxyRequestHandler) lookup( ProxyRequestHandler.ROLE );
+
+ factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ File repoLocation = getTestFile( "target/test-repository/managed" );
+ // faster only to delete this one before copying, the others are done case by case
+ FileUtils.deleteDirectory( new File( repoLocation, "org/apache/maven/test/get-merged-metadata" ) );
+ copyDirectoryStructure( getTestFile( "src/test/repositories/managed" ), repoLocation );
+
+ defaultLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ defaultManagedRepository = createRepository( "managed-repository", repoLocation );
+
+ repoLocation = getTestFile( "target/test-repository/legacy-managed" );
+ FileUtils.deleteDirectory( repoLocation );
+ copyDirectoryStructure( getTestFile( "src/test/repositories/legacy-managed" ), repoLocation );
+
+ ArtifactRepositoryLayout legacyLayout =
+ (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
+
+ legacyManagedRepository = createRepository( "managed-repository", repoLocation, legacyLayout );
+
+ File location = getTestFile( "src/test/repositories/proxied1" );
+ proxiedRepository1 = createRepository( "proxied1", location );
+
+ location = getTestFile( "src/test/repositories/proxied2" );
+ proxiedRepository2 = createRepository( "proxied2", location );
+
+ proxiedRepositories = new ArrayList( 2 );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ location = getTestFile( "src/test/repositories/legacy-proxied" );
+ legacyProxiedRepository = createRepository( "legacy-proxied", location, legacyLayout );
+
+ legacyProxiedRepositories = Collections.singletonList( createProxiedRepository( legacyProxiedRepository ) );
+
+ wagonMockControl = MockControl.createNiceControl( Wagon.class );
+ wagonMock = (Wagon) wagonMockControl.getMock();
+ WagonDelegate delegate = (WagonDelegate) lookup( Wagon.ROLE, "test" );
+ delegate.setDelegate( wagonMock );
+ }
+
+ public void testGetDefaultLayoutNotPresent()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ // TODO: timestamp preservation requires support for that in wagon
+// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
+ }
+
+ public void testGetDefaultLayoutAlreadyPresent()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+ long originalModificationTime = expectedFile.lastModified();
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
+ assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
+ file.lastModified() );
+ }
+
+ public void testGetDefaultLayoutRemoteUpdate()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ expectedFile.setLastModified( getPastDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetWhenInBothProxiedRepos()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetInSecondProxiedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ }
+
+ public void testNotFoundInAnyProxies()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/does-not-exist/1.0/does-not-exist-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "File returned was: " + file + "; should have got a not found exception" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected, but check file was not created
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testGetInSecondProxiedRepoFirstFails()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMockControl.replay();
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ wagonMockControl.verify();
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+
+ public void testGetButAllRepositoriesFail()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepository2 = createRepository( "proxied2", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository1 );
+ ProxiedArtifactRepository proxiedArtifactRepository2 = createProxiedRepository( proxiedRepository2 );
+ proxiedRepositories.add( proxiedArtifactRepository2 );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // as expected
+ wagonMockControl.verify();
+ assertTrue( "Check failure", proxiedArtifactRepository1.isCachedFailure( path ) );
+ assertTrue( "Check failure", proxiedArtifactRepository2.isCachedFailure( path ) );
+
+ // TODO: do not want failures to present as a not found!
+ // TODO: How much information on each failure should we pass back to the user vs. logging in the proxy?
+ }
+ }
+
+ public void testGetInSecondProxiedRepoFirstHardFails()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ TransferFailedException failedException = new TransferFailedException( "transfer failed" );
+ wagonMockControl.setThrowable( failedException );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ProxyException e )
+ {
+ // expect a failure
+ wagonMockControl.verify();
+
+ assertEquals( "Check cause", failedException, e.getCause() );
+ assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+ }
+
+ public void testGetInSecondProxiedRepoFirstFailsFromCache()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ // fail from the cache, even though it is in the first repo now
+
+ String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetInSecondProxiedRepoFirstHardFailsFromCache()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ // fail from the cache, even though it is in the first repo now
+
+ String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ProxyException e )
+ {
+ // expect a failure
+ assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+ }
+
+ public void testGetInSecondProxiedRepoFirstFailsDisabledCacheFailure()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
+
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
+ proxiedArtifactRepository.setCacheFailures( false );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMockControl.replay();
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ wagonMockControl.verify();
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+
+ public void testGetWhenInBothProxiedReposFirstHasExpiredCacheFailure()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+
+ assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+
+ public void testGetAlwaysAlreadyPresent()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetAlwaysAlreadyPresentRemovedFromProxies()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-removed-from-proxies/1.0/get-removed-from-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ }
+
+ public void testGetAlwaysWithCachedFailure()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetRemovesTemporaryFileOnSuccess()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File tempFile = new File( file.getParentFile(), file.getName() + ".tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+ }
+
+ public void testGetRemovesTemporaryFileOnError()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository1 );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // as expected
+ wagonMockControl.verify();
+
+ File tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+ }
+ }
+
+ public void testGetRemovesTemporaryChecksumFileOnSuccess()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File tempFile = new File( file.getParentFile(), file.getName() + ".sha1.tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+ }
+
+ public void testGetRemovesTemporaryChecksumFileOnError()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository1 );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+
+ mockFailedChecksums( path, expectedFile );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // as expected
+ wagonMockControl.verify();
+
+ File tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+
+ tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+ }
+ }
+
+ public void testGetChecksumBothCorrect()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-both-right/1.0/get-checksum-both-right-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "sha1" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar",
+ FileUtils.readFileToString( checksumFile, null ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
+ }
+
+ public void testGetCorrectSha1NoMd5()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "sha1" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
+ FileUtils.readFileToString( checksumFile, null ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
+ }
+
+ public void testGetCorrectSha1BadMd5()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-bad-md5/1.0/get-checksum-sha1-bad-md5-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "sha1" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
+ FileUtils.readFileToString( checksumFile, null ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
+ }
+
+ public void testGetCorrectMd5NoSha1()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-md5-only/1.0/get-checksum-md5-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "md5" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar",
+ FileUtils.readFileToString( checksumFile, null ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
+ }
+
+ public void testGetCorrectMd5BadSha1()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-md5-bad-sha1/1.0/get-checksum-md5-bad-sha1-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "md5" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "8a02aa67549d27b2a03cd4547439c6d3 *get-checksum-md5-bad-sha1-1.0.jar",
+ FileUtils.readFileToString( checksumFile, null ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
+ }
+
+ public void testGetWithNoChecksums()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
+ assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
+ }
+
+ public void testGetBadMd5BadSha1()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-both-bad/1.0/get-checksum-both-bad-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expect a failure
+ assertFalse( "Check file not created", expectedFile.exists() );
+
+ assertFalse( "Check file not created", getChecksumFile( expectedFile, "md5" ).exists() );
+ assertFalse( "Check file not created", getChecksumFile( expectedFile, "sha1" ).exists() );
+ }
+ }
+
+ public void testGetChecksumTransferFailed()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository1 );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+
+ mockFailedChecksums( path, expectedFile );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // as expected
+ wagonMockControl.verify();
+
+ assertFalse( "Check file not created", expectedFile.exists() );
+
+ assertFalse( "Check file not created", getChecksumFile( expectedFile, "md5" ).exists() );
+ assertFalse( "Check file not created", getChecksumFile( expectedFile, "sha1" ).exists() );
+ }
+ }
+
+ public void testGetAlwaysBadChecksumPresentLocallyAbsentRemote()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-bad-local-checksum/1.0/get-bad-local-checksum-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+
+ assertFalse( "Check checksum removed", new File( file.getParentFile(), file.getName() + ".sha1" ).exists() );
+ assertFalse( "Check checksum removed", new File( file.getParentFile(), file.getName() + ".md5" ).exists() );
+ }
+
+ public void testGetChecksumPresentInManagedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-checksum-from-managed-repo/1.0/get-checksum-from-managed-repo-1.0.jar.sha1";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetAlwaysChecksumPresentInManagedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-checksum-from-managed-repo/1.0/get-checksum-from-managed-repo-1.0.jar.sha1";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetChecksumNotPresentInManagedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar.sha1";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected
+
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testGetAlwaysChecksumNotPresentInManagedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar.sha1";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected
+
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testGetMetadataNotPresent()
+ throws ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/dummy-artifact/1.0/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected
+
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testGetMetadataProxied()
+ throws ProxyException, ResourceDoesNotExistException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-metadata/1.0/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ String expectedContents = getExpectedMetadata( "get-default-metadata", "1.0" );
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+ }
+
+/* TODO: test keeps failing in the reactor - needs to be made more robust before re-enabling
+ public void testGetMetadataMergeRepos()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-merged-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents = getExpectedMetadata( "get-merged-metadata", getVersioning(
+ Arrays.asList( new String[]{"0.9", "1.0", "2.0", "3.0", "5.0", "4.0"} ), file ) );
+
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+ }
+*/
+
+ public void testGetMetadataRemovedFromProxies()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-removed-metadata/1.0/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents =
+ FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+ }
+
+ public void testGetReleaseMetadataNotExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents =
+ FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ String unexpectedContents =
+ FileUtils.readFileToString( new File( proxiedRepository1.getBasedir(), path ), null );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetSnapshotMetadataNotExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents =
+ FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ String unexpectedContents =
+ FileUtils.readFileToString( new File( proxiedRepository1.getBasedir(), path ), null );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetReleaseMetadataExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents =
+ FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents = getExpectedMetadata( "get-updated-metadata", getVersioning(
+ Arrays.asList( new String[]{"1.0", "2.0"} ), file ) );
+
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+/* TODO: test keeps failing in the reactor - needs to be made more robust before re-enabling
+ public void testGetSnapshotMetadataExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents =
+ FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents =
+ getExpectedMetadata( "get-updated-metadata", "1.0-SNAPSHOT", getVersioning( "20050831.111213", 2, file ) );
+
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetMetadataNotUpdated()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents =
+ FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
+
+ assertTrue( expectedFile.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( proxiedFile.lastModified() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetMetadataUpdated()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents =
+ FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents = getExpectedMetadata( "get-updated-metadata", getVersioning(
+ Arrays.asList( new String[]{"1.0", "2.0"} ), file ) );
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check content doesn't match old version",
+ unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testGetAlwaysMetadata()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents =
+ FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents = getExpectedMetadata( "get-updated-metadata", getVersioning(
+ Arrays.asList( new String[]{"1.0", "2.0"} ), file ) );
+
+ assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
+ assertFalse( "Check content doesn't match old version",
+ unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+*/
+
+ public void testSnapshotNonExistant()
+ throws ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/does-not-exist/1.0-SNAPSHOT/does-not-exist-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "File returned was: " + file + "; should have got a not found exception" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected, but check file was not created
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testTimestampDrivenSnapshotNotPresentAlready()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ }
+
+ public void testNewerTimestampDrivenSnapshotOnFirstRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ String path =
+ "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertTrue( expectedFile.exists() );
+
+ expectedFile.setLastModified( getPastDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ }
+
+ public void testOlderTimestampDrivenSnapshotOnFirstRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ expectedFile.setLastModified( getFutureDate().getTime() );
+
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+/* TODO: won't pass until Wagon preserves timestamp on download
+ public void testNewerTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ String path =
+ "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ File repoLocation = getTestFile( "target/test-repository/proxied1" );
+ FileUtils.deleteDirectory( repoLocation );
+ copyDirectoryStructure( getTestFile( "src/test/repositories/proxied1" ), repoLocation );
+ proxiedRepository1 = createRepository( "proxied1", repoLocation );
+
+ new File( proxiedRepository1.getBasedir(), path ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepositories.clear();
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+*/
+
+ public void testOlderTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
+ throws ParseException, ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File repoLocation = getTestFile( "target/test-repository/proxied2" );
+ FileUtils.deleteDirectory( repoLocation );
+ copyDirectoryStructure( getTestFile( "src/test/repositories/proxied2" ), repoLocation );
+ proxiedRepository2 = createRepository( "proxied2", repoLocation );
+
+ new File( proxiedRepository2.getBasedir(), path ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepositories.clear();
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testTimestampDrivenSnapshotNotExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path =
+ "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertTrue( expectedFile.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ proxiedFile.setLastModified( getFutureDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testTimestampDrivenSnapshotNotUpdated()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path =
+ "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ expectedFile.setLastModified( proxiedFile.lastModified() );
+
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testTimestampDrivenSnapshotNotPresentAlreadyExpiredCacheFailure()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+
+ assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+
+ public void testMetadataDrivenSnapshotNotPresentAlready()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-metadata-snapshot/1.0-SNAPSHOT/get-metadata-snapshot-1.0-20050831.101112-1.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ }
+
+ public void testGetMetadataDrivenSnapshotRemoteUpdate()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ // Metadata driven snapshots (using a full timestamp) are treated like a release. It is the timing of the
+ // updates to the metadata files that triggers which will be downloaded
+
+ String path =
+ "org/apache/maven/test/get-present-metadata-snapshot/1.0-SNAPSHOT/get-present-metadata-snapshot-1.0-20050831.101112-1.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+
+ assertTrue( expectedFile.exists() );
+
+ expectedFile.setLastModified( getPastDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ }
+
+ public void testLegacyManagedRepoGetNotPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(),
+ "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar" );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ // TODO: timestamp preservation requires support for that in wagon
+// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
+ }
+
+ public void testLegacyManagedRepoGetAlreadyPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+ long originalModificationTime = expectedFile.lastModified();
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(),
+ "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar" );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
+ assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
+ file.lastModified() );
+ }
+
+ public void testLegacyProxyRepoGetNotPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile =
+ new File( legacyProxiedRepository.getBasedir(), "org.apache.maven.test/jars/get-default-layout-1.0.jar" );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ // TODO: timestamp preservation requires support for that in wagon
+// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
+ }
+
+ public void testLegacyProxyRepoGetAlreadyPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+ long originalModificationTime = expectedFile.lastModified();
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ File proxiedFile = new File( legacyProxiedRepository.getBasedir(),
+ "org.apache.maven.test/jars/get-default-layout-present-1.0.jar" );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
+ assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
+ file.lastModified() );
+ }
+
+ public void testLegacyManagedAndProxyRepoGetNotPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ // TODO: timestamp preservation requires support for that in wagon
+// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
+ }
+
+ public void testLegacyManagedAndProxyRepoGetAlreadyPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.readFileToString( expectedFile, null );
+ long originalModificationTime = expectedFile.lastModified();
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
+ File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
+ assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
+ assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
+ file.lastModified() );
+ }
+
+ public void testLegacyRequestConvertedToDefaultPathInManagedRepo()
+ throws Exception
+ {
+ // Check that a Maven1 legacy request is translated to a maven2 path in
+ // the managed repository.
+
+ String legacyPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( legacyPath, legacyProxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ }
+
+ public void testDefaultRequestConvertedToLegacyPathInManagedRepo()
+ throws Exception
+ {
+ // Check that a Maven2 default request is translated to a legacy path in
+ // the managed repository.
+
+ String legacyPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), legacyPath );
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ }
+
+ public void testRelocateMaven1Request()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar";
+ String relocatedPath =
+ "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ }
+
+ public void testDoublyRelocateMaven1Request()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-doubly-relocated-artefact-1.0.jar";
+ String relocatedPath =
+ "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ }
+
+ public void testRelocateMaven1PomRequest()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/poms/get-relocated-artefact-with-pom-1.0.pom";
+ String relocatedPath =
+ "org/apache/maven/test/get-default-layout-present-with-pom/1.0/get-default-layout-present-with-pom-1.0.pom";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+
+ assertTrue( expectedFile.exists() );
+ }
+
+ public void testRelocateMaven1PomRequestMissingTarget()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/poms/get-relocated-artefact-1.0.pom";
+ String relocatedPath =
+ "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.pom";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
+
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Should have failed to find target POM" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ assertTrue( true );
+ }
+ }
+
+ public void testRelocateMaven1ChecksumRequest()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar.md5";
+ String relocatedPath =
+ "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar.md5";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+
+ assertTrue( expectedFile.exists() );
+
+ path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar.sha1";
+ relocatedPath = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar.sha1";
+ expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
+
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Checksum was not present, should not be found" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ assertTrue( true );
+ }
+ }
+
+ public void testRelocateMaven2Request()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-relocated-artefact/1.0/get-relocated-artefact-1.0.jar";
+ String relocatedPath =
+ "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ }
+
+ public void testRelocateMaven2RequestInLegacyManagedRepo()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-relocated-artefact/1.0/get-relocated-artefact-1.0.jar";
+ String relocatedPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), relocatedPath );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ }
+
+ private static Versioning getVersioning( List versions, File file )
+ {
+ Versioning versioning = new Versioning();
+ for ( Iterator i = versions.iterator(); i.hasNext(); )
+ {
+ String v = (String) i.next();
+ versioning.addVersion( v );
+ }
+ setLastUpdatedTimestamp( versioning, file );
+ return versioning;
+ }
+
+ private static String getExpectedMetadata( String artifactId, Versioning versioning )
+ throws IOException
+ {
+ return getExpectedMetadata( artifactId, null, versioning );
+ }
+
+ private static String getExpectedMetadata( String artifactId, String version, Versioning versioning )
+ throws IOException
+ {
+ StringWriter expectedContents = new StringWriter();
+
+ Metadata m = new Metadata();
+ m.setGroupId( "org.apache.maven.test" );
+ m.setArtifactId( artifactId );
+ m.setVersion( version );
+ m.setVersioning( versioning );
+ m.setModelEncoding( null );
+
+ new MetadataXpp3Writer().write( expectedContents, m );
+ return expectedContents.toString();
+ }
+
+ private static String getExpectedMetadata( String artifactId, String version )
+ throws IOException
+ {
+ return getExpectedMetadata( artifactId, version, null );
+ }
+
+ private static Versioning getVersioning( String timestamp, int buildNumber, File file )
+ {
+ Versioning versioning = new Versioning();
+ versioning.setSnapshot( new Snapshot() );
+ versioning.getSnapshot().setTimestamp( timestamp );
+ versioning.getSnapshot().setBuildNumber( buildNumber );
+ setLastUpdatedTimestamp( versioning, file );
+ return versioning;
+ }
+
+ private static void setLastUpdatedTimestamp( Versioning versioning, File file )
+ {
+ DateFormat fmt = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
+ fmt.setTimeZone( UTC_TIMEZONE );
+ versioning.setLastUpdated( fmt.format( new Date( file.lastModified() ) ) );
+ }
+
+ private static Date getPastDate()
+ throws ParseException
+ {
+ return new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" );
+ }
+
+ private static Date getFutureDate()
+ {
+ Calendar cal = Calendar.getInstance();
+ cal.add( Calendar.YEAR, 1 );
+ return cal.getTime();
+ }
+
+ private void mockFailedChecksums( String path, File expectedFile )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ // must do it twice as it will re-attempt it
+ wagonMock.get( path + ".sha1", new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMock.get( path + ".md5", new File( expectedFile.getParentFile(), expectedFile.getName() + ".md5.tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMock.get( path + ".sha1", new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMock.get( path + ".md5", new File( expectedFile.getParentFile(), expectedFile.getName() + ".md5.tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+ }
+
+ private File getChecksumFile( File file, String algorithm )
+ {
+ return new File( file.getParentFile(), file.getName() + "." + algorithm );
+ }
+
+ /**
+ * A faster recursive copy that omits .svn directories.
+ *
+ * @param sourceDirectory the source directory to copy
+ * @param destDirectory the target location
+ * @throws java.io.IOException if there is a copying problem
+ * @todo get back into plexus-utils, share with converter module
+ */
+ private static void copyDirectoryStructure( File sourceDirectory, File destDirectory )
+ throws IOException
+ {
+ if ( !sourceDirectory.exists() )
+ {
+ throw new IOException( "Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")." );
+ }
+
+ File[] files = sourceDirectory.listFiles();
+
+ String sourcePath = sourceDirectory.getAbsolutePath();
+
+ for ( int i = 0; i < files.length; i++ )
+ {
+ File file = files[i];
+
+ String dest = file.getAbsolutePath();
+
+ dest = dest.substring( sourcePath.length() + 1 );
+
+ File destination = new File( destDirectory, dest );
+
+ if ( file.isFile() )
+ {
+ destination = destination.getParentFile();
+
+ FileUtils.copyFile( file, new File( destination, file.getName() ), false );
+ // TODO: Change when there is a FileUtils.copyFileToDirectory(file, destination, boolean) option
+ //FileUtils.copyFileToDirectory( file, destination );
+ }
+ else if ( file.isDirectory() )
+ {
+ if ( !".svn".equals( file.getName() ) )
+ {
+ if ( !destination.exists() && !destination.mkdirs() )
+ {
+ throw new IOException(
+ "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
+ }
+
+ copyDirectoryStructure( file, destination );
+ }
+ }
+ else
+ {
+ throw new IOException( "Unknown file type: " + file.getAbsolutePath() );
+ }
+ }
+ }
+
+ private static ProxiedArtifactRepository createProxiedRepository( ArtifactRepository repository )
+ {
+ ProxiedArtifactRepository proxiedArtifactRepository = new ProxiedArtifactRepository( repository );
+ proxiedArtifactRepository.setName( repository.getId() );
+ proxiedArtifactRepository.setCacheFailures( true );
+ return proxiedArtifactRepository;
+ }
+
+ private static ProxiedArtifactRepository createHardFailProxiedRepository( ArtifactRepository repository )
+ {
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( repository );
+ proxiedArtifactRepository.setHardFail( true );
+ return proxiedArtifactRepository;
+ }
+
+ private ArtifactRepository createRepository( String id, File repoLocation )
+ throws MalformedURLException
+ {
+ return createRepository( id, repoLocation.toURI().toURL().toExternalForm() );
+ }
+
+ private ArtifactRepository createRepository( String id, File location, ArtifactRepositoryLayout layout )
+ throws MalformedURLException
+ {
+ return createRepository( id, location.toURI().toURL().toExternalForm(), layout );
+ }
+
+ private ArtifactRepository createRepository( String id, String url )
+ {
+ return createRepository( id, url, defaultLayout );
+ }
+
+ private ArtifactRepository createRepository( String id, String url, ArtifactRepositoryLayout repositoryLayout )
+ {
+ return factory.createArtifactRepository( id, url, repositoryLayout, null, null );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.maven.wagon.ConnectionException;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authentication.AuthenticationException;
+import org.apache.maven.wagon.authentication.AuthenticationInfo;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.apache.maven.wagon.events.SessionListener;
+import org.apache.maven.wagon.events.TransferListener;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.apache.maven.wagon.repository.Repository;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * A dummy wagon implementation
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class WagonDelegate
+ implements Wagon
+{
+ private Wagon delegate;
+
+ private String contentToGet;
+
+ public void get( String resourceName, File destination )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ delegate.get( resourceName, destination );
+ create( destination );
+ }
+
+ public boolean getIfNewer( String resourceName, File destination, long timestamp )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ boolean result = delegate.getIfNewer( resourceName, destination, timestamp );
+ createIfMissing( destination );
+ return result;
+ }
+
+ public void put( File source, String destination )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ delegate.put( source, destination );
+ }
+
+ public void putDirectory( File sourceDirectory, String destinationDirectory )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ delegate.putDirectory( sourceDirectory, destinationDirectory );
+ }
+
+ public boolean resourceExists( String resourceName )
+ throws TransferFailedException, AuthorizationException
+ {
+ return delegate.resourceExists( resourceName );
+ }
+
+ public List getFileList( String destinationDirectory )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ return delegate.getFileList( destinationDirectory );
+ }
+
+ public boolean supportsDirectoryCopy()
+ {
+ return delegate.supportsDirectoryCopy();
+ }
+
+ public Repository getRepository()
+ {
+ return delegate.getRepository();
+ }
+
+ public void connect( Repository source )
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.connect( source );
+ }
+
+ public void connect( Repository source, ProxyInfo proxyInfo )
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.connect( source, proxyInfo );
+ }
+
+ public void connect( Repository source, AuthenticationInfo authenticationInfo )
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.connect( source, authenticationInfo );
+ }
+
+ public void connect( Repository source, AuthenticationInfo authenticationInfo, ProxyInfo proxyInfo )
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.connect( source, authenticationInfo, proxyInfo );
+ }
+
+ public void openConnection()
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.openConnection();
+ }
+
+ public void disconnect()
+ throws ConnectionException
+ {
+ delegate.disconnect();
+ }
+
+ public void addSessionListener( SessionListener listener )
+ {
+ delegate.addSessionListener( listener );
+ }
+
+ public void removeSessionListener( SessionListener listener )
+ {
+ delegate.removeSessionListener( listener );
+ }
+
+ public boolean hasSessionListener( SessionListener listener )
+ {
+ return delegate.hasSessionListener( listener );
+ }
+
+ public void addTransferListener( TransferListener listener )
+ {
+ delegate.addTransferListener( listener );
+ }
+
+ public void removeTransferListener( TransferListener listener )
+ {
+ delegate.removeTransferListener( listener );
+ }
+
+ public boolean hasTransferListener( TransferListener listener )
+ {
+ return delegate.hasTransferListener( listener );
+ }
+
+ public boolean isInteractive()
+ {
+ return delegate.isInteractive();
+ }
+
+ public void setInteractive( boolean interactive )
+ {
+ delegate.setInteractive( interactive );
+ }
+
+ public void setDelegate( Wagon delegate )
+ {
+ this.delegate = delegate;
+ }
+
+ void setContentToGet( String content )
+ {
+ contentToGet = content;
+ }
+
+ private void createIfMissing( File destination )
+ {
+ // since the mock won't actually copy a file, create an empty one to simulate file existence
+ if ( !destination.exists() )
+ {
+ create( destination );
+ }
+ }
+
+ private void create( File destination )
+ {
+ try
+ {
+ destination.getParentFile().mkdirs();
+ if ( contentToGet == null )
+ {
+ destination.createNewFile();
+ }
+ else
+ {
+ FileUtils.writeStringToFile( new File( destination.getAbsolutePath() ), contentToGet, null );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RuntimeException( e.getMessage(), e );
+ }
+ }
+}
--- /dev/null
+get-default-layout-present-1.0.jar\r
+(managed)\r
--- /dev/null
+7dfb7ade9a8fa90bfbfac52d3090b8c2 *get-default-layout-present-1.0.jar
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-relocated-artefact</artifactId>
+ <version>1.0</version>
+
+ <distributionManagement>
+ <relocation>
+ <artifactId>get-default-layout-present</artifactId>
+ </relocation>
+ </distributionManagement>
+
+</project>
\ No newline at end of file
--- /dev/null
+get-default-layout-1.0.jar
--- /dev/null
+get-default-layout-present-1.0.jar\r
+(proxied)\r
--- /dev/null
+get-bad-local-checksum-1.0.jar\r
+(managed)\r
+\r
--- /dev/null
+invalid checksum file
\ No newline at end of file
--- /dev/null
+invalid checksum file
\ No newline at end of file
--- /dev/null
+066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-from-managed-repo-1.0.jar\r
--- /dev/null
+get-default-layout-present-1.0.jar
+(managed)
+
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-default-layout-present-with-pom</artifactId>
+ <version>1.0</version>
+</project>
--- /dev/null
+get-default-layout-present-1.0.jar
+(managed)
+
--- /dev/null
+7dfb7ade9a8fa90bfbfac52d3090b8c2 *get-default-layout-present-1.0.jar
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-doubly-relocated-artefact</artifactId>
+ <version>1.0</version>
+
+ <distributionManagement>
+ <relocation>
+ <artifactId>get-relocated-artefact</artifactId>
+ </relocation>
+ </distributionManagement>
+
+</project>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-merged-metadata</artifactId>
+ <versioning>
+ <versions>
+ <version>0.9</version>
+ <!-- unique -->
+ <version>1.0</version>
+ <!-- merged with proxied2 -->
+ <version>2.0</version>
+ <!-- merged with proxied1 -->
+ </versions>
+ </versioning>
+</metadata>
\ No newline at end of file
--- /dev/null
+get-present-metadata-snapshot-1.0-20050831.101112-1.jar\r
+(managed)
\ No newline at end of file
--- /dev/null
+get-present-timestamped-snapshot-1.0-SNAPSHOT.jar\r
+(managed)
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-relocated-artefact-with-pom</artifactId>
+ <version>1.0</version>
+
+ <distributionManagement>
+ <relocation>
+ <artifactId>get-default-layout-present-with-pom</artifactId>
+ </relocation>
+ </distributionManagement>
+
+</project>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-relocated-artefact</artifactId>
+ <version>1.0</version>
+
+ <distributionManagement>
+ <relocation>
+ <artifactId>get-default-layout-present</artifactId>
+ </relocation>
+ </distributionManagement>
+
+</project>
\ No newline at end of file
--- /dev/null
+get-removed-from-proxies-1.0.jar
+(managed)
+
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+
+</metadata>
\ No newline at end of file
--- /dev/null
+<!--\r
+ ~ Copyright 2005-2006 The Apache Software Foundation.\r
+ ~\r
+ ~ Licensed under the Apache License, Version 2.0 (the "License");\r
+ ~ you may not use this file except in compliance with the License.\r
+ ~ You may obtain a copy of the License at\r
+ ~\r
+ ~ http://www.apache.org/licenses/LICENSE-2.0\r
+ ~\r
+ ~ Unless required by applicable law or agreed to in writing, software\r
+ ~ distributed under the License is distributed on an "AS IS" BASIS,\r
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ ~ See the License for the specific language governing permissions and\r
+ ~ limitations under the License.\r
+ -->\r
+\r
+<metadata>\r
+ <groupId>org.apache.maven.test</groupId>\r
+ <artifactId>get-updated-metadata</artifactId>\r
+ <versioning>\r
+ <versions>\r
+ <version>1.0</version>\r
+ </versions>\r
+ </versioning>\r
+</metadata>\r
--- /dev/null
+<!--\r
+ ~ Copyright 2005-2006 The Apache Software Foundation.\r
+ ~\r
+ ~ Licensed under the Apache License, Version 2.0 (the "License");\r
+ ~ you may not use this file except in compliance with the License.\r
+ ~ You may obtain a copy of the License at\r
+ ~\r
+ ~ http://www.apache.org/licenses/LICENSE-2.0\r
+ ~\r
+ ~ Unless required by applicable law or agreed to in writing, software\r
+ ~ distributed under the License is distributed on an "AS IS" BASIS,\r
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ ~ See the License for the specific language governing permissions and\r
+ ~ limitations under the License.\r
+ -->\r
+\r
+<metadata>\r
+ <groupId>org.apache.maven.test</groupId>\r
+ <artifactId>get-updated-metadata</artifactId>\r
+ <version>1.0-SNAPSHOT</version>\r
+ <versioning>\r
+ <snapshot>\r
+ <timestamp>20050831.1011112</timestamp>\r
+ <buildNumber>1</buildNumber>\r
+ </snapshot>\r
+ </versioning>\r
+</metadata>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-updated-metadata</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ <versioning>
+ <snapshot>
+ <timestamp>20050831.1011112</timestamp>
+ <buildNumber>1</buildNumber>
+ </snapshot>
+ </versioning>
+</metadata>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-updated-metadata</artifactId>
+ <versioning>
+ <versions>
+ <version>1.0</version>
+ </versions>
+ </versioning>
+</metadata>
\ No newline at end of file
--- /dev/null
+get-bad-local-checksum-1.0.jar\r
+(proxied 1)\r
+\r
--- /dev/null
+get-checksum-both-bad-1.0.jar\r
+\r
--- /dev/null
+invalid checksum file
\ No newline at end of file
--- /dev/null
+invalid checksum file\r
--- /dev/null
+get-checksum-both-right-1.0.jar\r
--- /dev/null
+e58f30c6a150a2e843552438d18e15cb *get-checksum-both-right-1.0.jar
\ No newline at end of file
--- /dev/null
+066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar\r
--- /dev/null
+066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-from-managed-repo-1.0.jar\r
+(proxied 1)\r
--- /dev/null
+get-checksum-md5-bad-sha1-1.0.jar\r
+\r
--- /dev/null
+8a02aa67549d27b2a03cd4547439c6d3 *get-checksum-md5-bad-sha1-1.0.jar
\ No newline at end of file
--- /dev/null
+invalid checksum file\r
--- /dev/null
+get-checksum-md5-only-1.0.jar
+
--- /dev/null
+f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar\r
--- /dev/null
+get-checksum-sha1-bad-md5-1.0.jar
+
--- /dev/null
+invalid checksum file\r
--- /dev/null
+3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar\r
--- /dev/null
+get-checksum-sha1-only-1.0.jar
+
--- /dev/null
+748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar
--- /dev/null
+get-default-layout-present-1.0.jar
+(proxied 1)
+
--- /dev/null
+get-default-layout-1.0.jar
+
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-default-metadata</artifactId>
+ <version>1.0</version>
+</metadata>
\ No newline at end of file
--- /dev/null
+get-in-both-proxies-1.0.jar
+(proxied 1)
+
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-merged-metadata</artifactId>
+ <versioning>
+ <versions>
+ <version>2.0</version>
+ <!-- merge with managed -->
+ <version>3.0</version>
+ <!-- merge with proxied2 -->
+ <version>5.0</version>
+ <!-- unique -->
+ </versions>
+ </versioning>
+</metadata>
\ No newline at end of file
--- /dev/null
+get-metadata-snapshot-1.0-SNAPSHOT.jar
\ No newline at end of file
--- /dev/null
+get-present-metadata-snapshot-1.0-20050831.101112-1.jar\r
+(proxied 1)
\ No newline at end of file
--- /dev/null
+get-present-timestamped-snapshot-1.0-SNAPSHOT.jar\r
+(proxied 1)
\ No newline at end of file
--- /dev/null
+get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar\r
+(proxied 1)
\ No newline at end of file
--- /dev/null
+get-timestamped-snapshot-1.0-SNAPSHOT.jar
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-updated-metadata</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ <versioning>
+ <snapshot>
+ <timestamp>20050831.111213</timestamp>
+ <buildNumber>2</buildNumber>
+ </snapshot>
+ </versioning>
+</metadata>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-updated-metadata</artifactId>
+ <versioning>
+ <versions>
+ <version>1.0</version>
+ <version>2.0</version>
+ </versions>
+ </versioning>
+</metadata>
\ No newline at end of file
--- /dev/null
+get-in-both-proxies-1.0.jar
+(proxied 2)
+
--- /dev/null
+get-in-second-proxy-1.0.jar
+
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.test</groupId>
+ <artifactId>get-merged-metadata</artifactId>
+ <versioning>
+ <versions>
+ <version>1.0</version>
+ <!-- merged with managed -->
+ <version>3.0</version>
+ <!-- merged with proxied1 -->
+ <version>4.0</version>
+ <!-- unique -->
+ </versions>
+ </versioning>
+</metadata>
\ No newline at end of file
--- /dev/null
+get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar\r
+(proxied 2)
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.wagon.Wagon</role>
+ <role-hint>test</role-hint>
+ <implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
+ </component>
+ <component>
+ <role>org.codehaus.plexus.logging.LoggerManager</role>
+ <implementation>org.codehaus.plexus.logging.console.ConsoleLoggerManager</implementation>
+ <lifecycle-handler>basic</lifecycle-handler>
+ <configuration>
+ <threshold>ERROR</threshold>
+ </configuration>
+ </component>
+ </components>
+</component-set>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <parent>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva</artifactId>
- <version>1.0-SNAPSHOT</version>
- </parent>
- <modelVersion>4.0.0</modelVersion>
- <artifactId>archiva-indexer</artifactId>
- <name>Archiva Indexer</name>
- <dependencies>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-artifact</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-artifact-manager</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-project</artifactId>
- <exclusions>
- <exclusion>
- <groupId>org.codehaus.plexus.cache</groupId>
- <artifactId>plexus-cache-hashmap</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-model</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.lucene</groupId>
- <artifactId>lucene-core</artifactId>
- <version>2.0.0</version>
- </dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-utils</artifactId>
- </dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-container-default</artifactId>
- </dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-digest</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-repository-metadata</artifactId>
- </dependency>
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- </dependency>
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>cobertura-maven-plugin</artifactId>
- <configuration>
- <check>
- <!-- TODO: increase coverage -->
- <totalLineRate>80</totalLineRate>
- <totalBranchRate>80</totalBranchRate>
- </check>
- </configuration>
- </plugin>
- </plugins>
- </build>
-</project>
+++ /dev/null
-package org.apache.maven.archiva.indexer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.indexer.query.Query;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.artifact.Artifact;
-
-import java.util.Collection;
-import java.util.List;
-
-/**
- * Maintain an artifact index on the repository.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryArtifactIndex
-{
- /**
- * Indexes the artifacts found within the specified list of index records. If the artifacts are already in the
- * repository they are updated.
- *
- * @param records the records to index
- * @throws RepositoryIndexException if there is a problem indexing the records
- */
- void indexRecords( Collection records )
- throws RepositoryIndexException;
-
- /**
- * Search the index based on the search criteria specified. Returns a list of index records.
- *
- * @param query The query that contains the search criteria
- * @return the index records found
- * @throws RepositoryIndexSearchException if there is a problem searching
- * @todo should it return "SearchResult" instances that contain the index record and other search data (like score?)
- */
- List search( Query query )
- throws RepositoryIndexSearchException;
-
- /**
- * Check if the index already exists.
- *
- * @return true if the index already exists
- * @throws RepositoryIndexException if the index location is not valid
- */
- boolean exists()
- throws RepositoryIndexException;
-
- /**
- * Delete records from the index. Simply ignore the request any did not exist.
- *
- * @param records the records to delete
- * @throws RepositoryIndexException if there is a problem removing the record
- */
- void deleteRecords( Collection records )
- throws RepositoryIndexException;
-
- /**
- * Retrieve all records in the index.
- *
- * @return the records
- * @throws RepositoryIndexSearchException if there was an error searching the index
- */
- Collection getAllRecords()
- throws RepositoryIndexSearchException;
-
- /**
- * Retrieve all primary keys of records in the index.
- *
- * @return the keys
- * @throws RepositoryIndexException if there was an error searching the index
- */
- Collection getAllRecordKeys()
- throws RepositoryIndexException;
-
- /**
- * Indexes the artifact specified. If the artifact is already in the repository they it is updated.
- * This method should use less memory than indexRecords as the records can be created and disposed of on the fly.
- *
- * @param artifact the artifact to index
- * @param factory the artifact to record factory
- * @throws RepositoryIndexException if there is a problem indexing the artifacts
- */
- void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
- throws RepositoryIndexException;
-
- /**
- * Indexes the artifacts found within the specified list. If the artifacts are already in the
- * repository they are updated. This method should use less memory than indexRecords as the records can be
- * created and disposed of on the fly.
- *
- * @param artifacts the artifacts to index
- * @param factory the artifact to record factory
- * @throws RepositoryIndexException if there is a problem indexing the artifacts
- */
- void indexArtifacts( List artifacts, RepositoryIndexRecordFactory factory )
- throws RepositoryIndexException;
-
- /**
- * Get all the group IDs in the index.
- *
- * @return list of groups as strings
- * @throws RepositoryIndexException if there is a problem searching for the group ID
- */
- List getAllGroupIds()
- throws RepositoryIndexException;
-
- /**
- * Get the list of artifact IDs in a group in the index.
- *
- * @param groupId the group ID to search
- * @return the list of artifact ID strings
- * @throws RepositoryIndexSearchException if there is a problem searching for the group ID
- */
- List getArtifactIds( String groupId )
- throws RepositoryIndexSearchException;
-
- /**
- * Get the list of available versions for a given artifact.
- *
- * @param groupId the group ID to search for
- * @param artifactId the artifact ID to search for
- * @return the list of version strings
- * @throws RepositoryIndexSearchException if there is a problem searching for the artifact
- */
- List getVersions( String groupId, String artifactId )
- throws RepositoryIndexSearchException;
-
- /**
- * Get the time when the index was last updated. Note that this does not monitor external processes.
- *
- * @return the last updated time, or 0 if it has not been updated since the class was instantiated.
- */
- long getLastUpdatedTime();
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-
-/**
- * Obtain an index instance.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryArtifactIndexFactory
-{
- /**
- * Plexus role.
- */
- String ROLE = RepositoryArtifactIndexFactory.class.getName();
-
- /**
- * Method to create an instance of the standard index.
- *
- * @param indexPath the path where the index will be created/updated
- * @return the index instance
- */
- RepositoryArtifactIndex createStandardIndex( File indexPath );
-
- /**
- * Method to create an instance of the minimal index.
- *
- * @param indexPath the path where the index will be created/updated
- * @return the index instance
- */
- RepositoryArtifactIndex createMinimalIndex( File indexPath );
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class RepositoryIndexException
- extends Exception
-{
- public RepositoryIndexException( String message, Throwable cause )
- {
- super( message, cause );
- }
-
- public RepositoryIndexException( String message )
- {
- super( message );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Brett Porter
- */
-public class RepositoryIndexSearchException
- extends Exception
-{
- public RepositoryIndexSearchException( String message, Throwable cause )
- {
- super( message, cause );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.lucene.document.Document;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
-
-import java.text.ParseException;
-
-/**
- * Converts repository records to Lucene documents.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface LuceneIndexRecordConverter
-{
- /**
- * Convert an index record to a Lucene document.
- *
- * @param record the record
- * @return the document
- */
- Document convert( RepositoryIndexRecord record );
-
- /**
- * Convert a Lucene document to an index record.
- *
- * @param document the document
- * @return the record
- * @throws java.text.ParseException if there is a problem parsing a field (specifically, dates)
- */
- RepositoryIndexRecord convert( Document document )
- throws ParseException;
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.lucene.document.DateTools;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.NumberTools;
-import org.apache.maven.archiva.indexer.record.MinimalArtifactIndexRecord;
-import org.apache.maven.archiva.indexer.record.MinimalIndexRecordFields;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
-
-import java.text.ParseException;
-import java.util.Arrays;
-
-/**
- * Convert the minimal index record to a Lucene document.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneMinimalIndexRecordConverter
- implements LuceneIndexRecordConverter
-{
- public Document convert( RepositoryIndexRecord record )
- {
- MinimalArtifactIndexRecord rec = (MinimalArtifactIndexRecord) record;
-
- Document document = new Document();
- addTokenizedField( document, MinimalIndexRecordFields.FILENAME, rec.getFilename() );
- addUntokenizedField( document, MinimalIndexRecordFields.LAST_MODIFIED,
- DateTools.timeToString( rec.getLastModified(), DateTools.Resolution.SECOND ) );
- addUntokenizedField( document, MinimalIndexRecordFields.FILE_SIZE, NumberTools.longToString( rec.getSize() ) );
- addUntokenizedField( document, MinimalIndexRecordFields.MD5, rec.getMd5Checksum() );
- addTokenizedField( document, MinimalIndexRecordFields.CLASSES,
- StringUtils.join( rec.getClasses().iterator(), "\n" ) );
-
- return document;
- }
-
- public RepositoryIndexRecord convert( Document document )
- throws ParseException
- {
- MinimalArtifactIndexRecord record = new MinimalArtifactIndexRecord();
-
- record.setFilename( document.get( MinimalIndexRecordFields.FILENAME ) );
- record.setLastModified( DateTools.stringToTime( document.get( MinimalIndexRecordFields.LAST_MODIFIED ) ) );
- record.setSize( NumberTools.stringToLong( document.get( MinimalIndexRecordFields.FILE_SIZE ) ) );
- record.setMd5Checksum( document.get( MinimalIndexRecordFields.MD5 ) );
- record.setClasses( Arrays.asList( document.get( MinimalIndexRecordFields.CLASSES ).split( "\n" ) ) );
-
- return record;
- }
-
- private static void addUntokenizedField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
- }
- }
-
- private static void addTokenizedField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.YES, Field.Index.TOKENIZED ) );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.indexer.query.Query;
-
-/**
- * A holder for a lucene query to pass to the indexer API.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneQuery
- implements Query
-{
- private final org.apache.lucene.search.Query query;
-
- public LuceneQuery( org.apache.lucene.search.Query query )
- {
- this.query = query;
- }
-
- org.apache.lucene.search.Query getLuceneQuery()
- {
- return query;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.CharTokenizer;
-import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexModifier;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.index.TermEnum;
-import org.apache.lucene.search.BooleanClause;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.Hits;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.indexer.query.Query;
-import org.apache.maven.archiva.indexer.record.MinimalIndexRecordFields;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-import org.apache.maven.artifact.Artifact;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.Reader;
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Set;
-
-/**
- * Lucene implementation of a repository index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneRepositoryArtifactIndex
- implements RepositoryArtifactIndex
-{
- /**
- * The location of the index on the file system.
- */
- private File indexLocation;
-
- /**
- * Convert repository records to Lucene documents.
- */
- private LuceneIndexRecordConverter converter;
-
- private static final String FLD_PK = "pk";
-
- private static Analyzer luceneAnalyzer = new LuceneAnalyzer();
-
- private static long lastUpdatedTime = 0;
-
- public LuceneRepositoryArtifactIndex( File indexPath, LuceneIndexRecordConverter converter )
- {
- this.indexLocation = indexPath;
- this.converter = converter;
- }
-
- public void indexRecords( Collection records )
- throws RepositoryIndexException
- {
- deleteRecords( records );
-
- addRecords( records );
- }
-
- private void addRecords( Collection records )
- throws RepositoryIndexException
- {
- IndexWriter indexWriter;
- try
- {
- indexWriter = new IndexWriter( indexLocation, getAnalyzer(), !exists() );
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Unable to open index", e );
- }
-
- try
- {
- for ( Iterator i = records.iterator(); i.hasNext(); )
- {
- RepositoryIndexRecord record = (RepositoryIndexRecord) i.next();
-
- if ( record != null )
- {
- Document document = converter.convert( record );
- document.add(
- new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
-
- indexWriter.addDocument( document );
- }
- }
-
- indexWriter.optimize();
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Failed to add an index document", e );
- }
- finally
- {
- closeQuietly( indexWriter );
- lastUpdatedTime = System.currentTimeMillis();
- }
- }
-
- public static Analyzer getAnalyzer()
- {
- return luceneAnalyzer;
- }
-
- private static class LuceneAnalyzer
- extends Analyzer
- {
- private static final Analyzer STANDARD = new StandardAnalyzer();
-
- public TokenStream tokenStream( String field, final Reader reader )
- {
- // do not tokenize field called 'element'
- if ( StandardIndexRecordFields.DEPENDENCIES.equals( field ) )
- {
- return new CharTokenizer( reader )
- {
- protected boolean isTokenChar( char c )
- {
- return c != '\n';
- }
- };
- }
- else if ( StandardIndexRecordFields.FILES.equals( field ) )
- {
- return new CharTokenizer( reader )
- {
- protected boolean isTokenChar( char c )
- {
- return c != '\n' && c != '/';
- }
- };
- }
- else
- if ( StandardIndexRecordFields.CLASSES.equals( field ) || MinimalIndexRecordFields.CLASSES.equals( field ) )
- {
- return new CharTokenizer( reader )
- {
- protected boolean isTokenChar( char c )
- {
- return c != '\n' && c != '.';
- }
-
- protected char normalize( char c )
- {
- return Character.toLowerCase( c );
- }
- };
- }
- else if ( StandardIndexRecordFields.GROUPID.equals( field ) )
- {
- return new CharTokenizer( reader )
- {
- protected boolean isTokenChar( char c )
- {
- return c != '.';
- }
-
- protected char normalize( char c )
- {
- return Character.toLowerCase( c );
- }
- };
- }
- else if ( StandardIndexRecordFields.VERSION.equals( field ) ||
- StandardIndexRecordFields.BASE_VERSION.equals( field ) )
- {
- return new CharTokenizer( reader )
- {
- protected boolean isTokenChar( char c )
- {
- return c != '-';
- }
- };
- }
- else if ( StandardIndexRecordFields.FILENAME.equals( field ) ||
- MinimalIndexRecordFields.FILENAME.equals( field ) )
- {
- return new CharTokenizer( reader )
- {
- protected boolean isTokenChar( char c )
- {
- return c != '-' && c != '.' && c != '/';
- }
- };
- }
- else
- {
- // use standard analyzer
- return STANDARD.tokenStream( field, reader );
- }
- }
- }
-
- public void deleteRecords( Collection records )
- throws RepositoryIndexException
- {
- if ( exists() )
- {
- IndexReader indexReader = null;
- try
- {
- indexReader = IndexReader.open( indexLocation );
-
- for ( Iterator i = records.iterator(); i.hasNext(); )
- {
- RepositoryIndexRecord record = (RepositoryIndexRecord) i.next();
-
- if ( record != null )
- {
- Term term = new Term( FLD_PK, record.getPrimaryKey() );
-
- indexReader.deleteDocuments( term );
- }
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( indexReader );
- }
- }
- }
-
- public Collection getAllRecords()
- throws RepositoryIndexSearchException
- {
- return search( new LuceneQuery( new MatchAllDocsQuery() ) );
- }
-
- public Collection getAllRecordKeys()
- throws RepositoryIndexException
- {
- return getAllFieldValues( FLD_PK );
- }
-
- private List getAllFieldValues( String fieldName )
- throws RepositoryIndexException
- {
- List keys = new ArrayList();
-
- if ( exists() )
- {
- IndexReader indexReader = null;
- TermEnum terms = null;
- try
- {
- indexReader = IndexReader.open( indexLocation );
-
- terms = indexReader.terms( new Term( fieldName, "" ) );
- while ( fieldName.equals( terms.term().field() ) )
- {
- keys.add( terms.term().text() );
-
- if ( !terms.next() )
- {
- break;
- }
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( indexReader );
- closeQuietly( terms );
- }
- }
- return keys;
- }
-
- public void indexArtifacts( List artifacts, RepositoryIndexRecordFactory factory )
- throws RepositoryIndexException
- {
- IndexModifier indexModifier = null;
- try
- {
- indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- if ( record != null )
- {
- Term term = new Term( FLD_PK, record.getPrimaryKey() );
-
- indexModifier.deleteDocuments( term );
-
- Document document = converter.convert( record );
- document.add(
- new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
-
- indexModifier.addDocument( document );
- }
- }
- indexModifier.optimize();
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( indexModifier );
- lastUpdatedTime = System.currentTimeMillis();
- }
- }
-
- public void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
- throws RepositoryIndexException
- {
- IndexModifier indexModifier = null;
- try
- {
- indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- if ( record != null )
- {
- Term term = new Term( FLD_PK, record.getPrimaryKey() );
-
- indexModifier.deleteDocuments( term );
-
- Document document = converter.convert( record );
- document.add( new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
-
- indexModifier.addDocument( document );
- }
- indexModifier.optimize();
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( indexModifier );
- lastUpdatedTime = System.currentTimeMillis();
- }
- }
-
- public List getAllGroupIds()
- throws RepositoryIndexException
- {
- return getAllFieldValues( StandardIndexRecordFields.GROUPID_EXACT );
- }
-
- public List getArtifactIds( String groupId )
- throws RepositoryIndexSearchException
- {
- return searchField( new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, groupId ) ),
- StandardIndexRecordFields.ARTIFACTID );
- }
-
- public List getVersions( String groupId, String artifactId )
- throws RepositoryIndexSearchException
- {
- BooleanQuery query = new BooleanQuery();
- query.add( new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, groupId ) ),
- BooleanClause.Occur.MUST );
- query.add( new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, artifactId ) ),
- BooleanClause.Occur.MUST );
-
- return searchField( query, StandardIndexRecordFields.VERSION );
- }
-
- public long getLastUpdatedTime()
- {
- return lastUpdatedTime;
- }
-
- private List searchField( org.apache.lucene.search.Query luceneQuery, String fieldName )
- throws RepositoryIndexSearchException
- {
- Set results = new LinkedHashSet();
-
- IndexSearcher searcher;
- try
- {
- searcher = new IndexSearcher( indexLocation.getAbsolutePath() );
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexSearchException( "Unable to open index: " + e.getMessage(), e );
- }
-
- try
- {
- Hits hits = searcher.search( luceneQuery );
- for ( int i = 0; i < hits.length(); i++ )
- {
- Document doc = hits.doc( i );
-
- results.add( doc.get( fieldName ) );
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( searcher );
- }
- return new ArrayList( results );
- }
-
- public boolean exists()
- throws RepositoryIndexException
- {
- if ( IndexReader.indexExists( indexLocation ) )
- {
- return true;
- }
- else if ( !indexLocation.exists() )
- {
- return false;
- }
- else if ( indexLocation.isDirectory() )
- {
- if ( indexLocation.listFiles().length > 1 )
- {
- throw new RepositoryIndexException( indexLocation + " is not a valid index directory." );
- }
- else
- {
- return false;
- }
- }
- else
- {
- throw new RepositoryIndexException( indexLocation + " is not a directory." );
- }
- }
-
- public List search( Query query )
- throws RepositoryIndexSearchException
- {
- LuceneQuery lQuery = (LuceneQuery) query;
-
- org.apache.lucene.search.Query luceneQuery = lQuery.getLuceneQuery();
-
- IndexSearcher searcher;
- try
- {
- searcher = new IndexSearcher( indexLocation.getAbsolutePath() );
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexSearchException( "Unable to open index: " + e.getMessage(), e );
- }
-
- List records = new ArrayList();
- try
- {
- Hits hits = searcher.search( luceneQuery );
- for ( int i = 0; i < hits.length(); i++ )
- {
- Document doc = hits.doc( i );
-
- records.add( converter.convert( doc ) );
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
- }
- catch ( ParseException e )
- {
- throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( searcher );
- }
-
- return records;
- }
-
- private static void closeQuietly( IndexSearcher searcher )
- {
- try
- {
- if ( searcher != null )
- {
- searcher.close();
- }
- }
- catch ( IOException e )
- {
- // ignore
- }
- }
-
- private static void closeQuietly( TermEnum terms )
- throws RepositoryIndexException
- {
- if ( terms != null )
- {
- try
- {
- terms.close();
- }
- catch ( IOException e )
- {
- // ignore
- }
- }
- }
-
- private static void closeQuietly( IndexWriter indexWriter )
- throws RepositoryIndexException
- {
- try
- {
- if ( indexWriter != null )
- {
- indexWriter.close();
- }
- }
- catch ( IOException e )
- {
- // write should compain if it can't be closed, data probably not persisted
- throw new RepositoryIndexException( e.getMessage(), e );
- }
- }
-
- private static void closeQuietly( IndexModifier indexModifier )
- {
- if ( indexModifier != null )
- {
- try
- {
- indexModifier.close();
- }
- catch ( IOException e )
- {
- // ignore
- }
- }
- }
-
- private static void closeQuietly( IndexReader reader )
- {
- try
- {
- if ( reader != null )
- {
- reader.close();
- }
- }
- catch ( IOException e )
- {
- // ignore
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-
-import java.io.File;
-
-/**
- * Factory for Lucene artifact index instances.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory" role-hint="lucene"
- */
-public class LuceneRepositoryArtifactIndexFactory
- implements RepositoryArtifactIndexFactory
-{
- public RepositoryArtifactIndex createStandardIndex( File indexPath )
- {
- return new LuceneRepositoryArtifactIndex( indexPath, new LuceneStandardIndexRecordConverter() );
- }
-
- public RepositoryArtifactIndex createMinimalIndex( File indexPath )
- {
- return new LuceneRepositoryArtifactIndex( indexPath, new LuceneMinimalIndexRecordConverter() );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.lucene.document.DateTools;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.NumberTools;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
-import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
-import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-
-import java.text.ParseException;
-import java.util.Arrays;
-
-/**
- * Convert the standard index record to a Lucene document.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneStandardIndexRecordConverter
- implements LuceneIndexRecordConverter
-{
- public Document convert( RepositoryIndexRecord record )
- {
- StandardArtifactIndexRecord rec = (StandardArtifactIndexRecord) record;
-
- Document document = new Document();
- addTokenizedField( document, StandardIndexRecordFields.FILENAME, rec.getFilename() );
- addTokenizedField( document, StandardIndexRecordFields.GROUPID, rec.getGroupId() );
- addExactField( document, StandardIndexRecordFields.GROUPID_EXACT, rec.getGroupId() );
- addTokenizedField( document, StandardIndexRecordFields.ARTIFACTID, rec.getArtifactId() );
- addExactField( document, StandardIndexRecordFields.ARTIFACTID_EXACT, rec.getArtifactId() );
- addTokenizedField( document, StandardIndexRecordFields.VERSION, rec.getVersion() );
- addExactField( document, StandardIndexRecordFields.VERSION_EXACT, rec.getVersion() );
- addTokenizedField( document, StandardIndexRecordFields.BASE_VERSION, rec.getBaseVersion() );
- addExactField( document, StandardIndexRecordFields.BASE_VERSION_EXACT, rec.getBaseVersion() );
- addUntokenizedField( document, StandardIndexRecordFields.TYPE, rec.getType() );
- addTokenizedField( document, StandardIndexRecordFields.CLASSIFIER, rec.getClassifier() );
- addUntokenizedField( document, StandardIndexRecordFields.PACKAGING, rec.getPackaging() );
- addUntokenizedField( document, StandardIndexRecordFields.REPOSITORY, rec.getRepository() );
- addUntokenizedField( document, StandardIndexRecordFields.LAST_MODIFIED,
- DateTools.timeToString( rec.getLastModified(), DateTools.Resolution.SECOND ) );
- addUntokenizedField( document, StandardIndexRecordFields.FILE_SIZE, NumberTools.longToString( rec.getSize() ) );
- addUntokenizedField( document, StandardIndexRecordFields.MD5, rec.getMd5Checksum() );
- addUntokenizedField( document, StandardIndexRecordFields.SHA1, rec.getSha1Checksum() );
- if ( rec.getClasses() != null )
- {
- addTokenizedField( document, StandardIndexRecordFields.CLASSES,
- StringUtils.join( rec.getClasses().iterator(), "\n" ) );
- }
- if ( rec.getFiles() != null )
- {
- addTokenizedField( document, StandardIndexRecordFields.FILES,
- StringUtils.join( rec.getFiles().iterator(), "\n" ) );
- }
- addUntokenizedField( document, StandardIndexRecordFields.PLUGIN_PREFIX, rec.getPluginPrefix() );
- addUntokenizedField( document, StandardIndexRecordFields.INCEPTION_YEAR, rec.getInceptionYear() );
- addTokenizedField( document, StandardIndexRecordFields.PROJECT_NAME, rec.getProjectName() );
- addTokenizedField( document, StandardIndexRecordFields.PROJECT_DESCRIPTION, rec.getProjectDescription() );
- if ( rec.getDependencies() != null )
- {
- addTokenizedField( document, StandardIndexRecordFields.DEPENDENCIES,
- StringUtils.join( rec.getDependencies().iterator(), "\n" ) );
- }
- if ( rec.getDevelopers() != null )
- {
- addTokenizedField( document, StandardIndexRecordFields.DEVELOPERS,
- StringUtils.join( rec.getDevelopers().iterator(), "\n" ) );
- }
-/* TODO: add later
- document.add( Field.Keyword( StandardIndexRecordFields.FLD_LICENSE_URLS, "" ) );
- document.add( Field.Keyword( StandardIndexRecordFields.FLD_PLUGINS_REPORT, "" ) );
- document.add( Field.Keyword( StandardIndexRecordFields.FLD_PLUGINS_BUILD, "" ) );
-*/
-
- return document;
- }
-
- public RepositoryIndexRecord convert( Document document )
- throws ParseException
- {
- StandardArtifactIndexRecord record = new StandardArtifactIndexRecord();
-
- record.setFilename( document.get( StandardIndexRecordFields.FILENAME ) );
- record.setGroupId( document.get( StandardIndexRecordFields.GROUPID ) );
- record.setArtifactId( document.get( StandardIndexRecordFields.ARTIFACTID ) );
- record.setVersion( document.get( StandardIndexRecordFields.VERSION ) );
- record.setBaseVersion( document.get( StandardIndexRecordFields.BASE_VERSION ) );
- record.setType( document.get( StandardIndexRecordFields.TYPE ) );
- record.setClassifier( document.get( StandardIndexRecordFields.CLASSIFIER ) );
- record.setPackaging( document.get( StandardIndexRecordFields.PACKAGING ) );
- record.setRepository( document.get( StandardIndexRecordFields.REPOSITORY ) );
- record.setLastModified( DateTools.stringToTime( document.get( StandardIndexRecordFields.LAST_MODIFIED ) ) );
- record.setSize( NumberTools.stringToLong( document.get( StandardIndexRecordFields.FILE_SIZE ) ) );
- record.setMd5Checksum( document.get( StandardIndexRecordFields.MD5 ) );
- record.setSha1Checksum( document.get( StandardIndexRecordFields.SHA1 ) );
- String classes = document.get( StandardIndexRecordFields.CLASSES );
- if ( classes != null )
- {
- record.setClasses( Arrays.asList( classes.split( "\n" ) ) );
- }
- String files = document.get( StandardIndexRecordFields.FILES );
- if ( files != null )
- {
- record.setFiles( Arrays.asList( files.split( "\n" ) ) );
- }
- String dependencies = document.get( StandardIndexRecordFields.DEPENDENCIES );
- if ( dependencies != null )
- {
- record.setDependencies( Arrays.asList( dependencies.split( "\n" ) ) );
- }
- String developers = document.get( StandardIndexRecordFields.DEVELOPERS );
- if ( developers != null )
- {
- record.setDevelopers( Arrays.asList( developers.split( "\n" ) ) );
- }
- record.setPluginPrefix( document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
- record.setInceptionYear( document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
- record.setProjectName( document.get( StandardIndexRecordFields.PROJECT_NAME ) );
- record.setProjectDescription( document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
-
- return record;
- }
-
- private static void addUntokenizedField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
- }
- }
-
- private static void addExactField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.NO, Field.Index.UN_TOKENIZED ) );
- }
- }
-
- private static void addTokenizedField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.YES, Field.Index.TOKENIZED ) );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.query;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Class to hold multiple SinglePhraseQueries and/or other CompoundQueries.
- *
- * @author Edwin Punzalan
- */
-public class CompoundQuery
- implements Query
-{
- /**
- * The query terms.
- */
- private final List compoundQueryTerms = new ArrayList();
-
- /**
- * Appends a required term to this query.
- *
- * @param term the term to be appended to this query
- */
- public void and( QueryTerm term )
- {
- compoundQueryTerms.add( CompoundQueryTerm.and( new SingleTermQuery( term ) ) );
- }
-
- /**
- * Appends an optional term to this query.
- *
- * @param term the term to be appended to this query
- */
- public void or( QueryTerm term )
- {
- compoundQueryTerms.add( CompoundQueryTerm.or( new SingleTermQuery( term ) ) );
- }
-
- /**
- * Appends a prohibited term to this query.
- *
- * @param term the term to be appended to this query
- */
- public void not( QueryTerm term )
- {
- compoundQueryTerms.add( CompoundQueryTerm.not( new SingleTermQuery( term ) ) );
- }
-
- /**
- * Appends a required subquery to this query.
- *
- * @param query the subquery to be appended to this query
- */
- public void and( Query query )
- {
- compoundQueryTerms.add( CompoundQueryTerm.and( query ) );
- }
-
- /**
- * Appends an optional subquery to this query.
- *
- * @param query the subquery to be appended to this query
- */
- public void or( Query query )
- {
- compoundQueryTerms.add( CompoundQueryTerm.or( query ) );
- }
-
- /**
- * Appends a prohibited subquery to this query.
- *
- * @param query the subquery to be appended to this query
- */
- public void not( Query query )
- {
- compoundQueryTerms.add( CompoundQueryTerm.not( query ) );
- }
-
- /**
- * Method to get the List of Queries appended into this
- *
- * @return List of all Queries added to this Query
- */
- public List getCompoundQueryTerms()
- {
- return compoundQueryTerms;
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.query;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Base of all query terms.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class CompoundQueryTerm
-{
- /**
- * The query to add to the compound query.
- */
- private final Query query;
-
- /**
- * Whether the term is required (an AND).
- */
- private final boolean required;
-
- /**
- * Whether the term is prohibited (a NOT).
- */
- private final boolean prohibited;
-
- /**
- * Class constructor
- *
- * @param query the subquery to add
- * @param required whether the term is required (an AND)
- * @param prohibited whether the term is prohibited (a NOT)
- */
- private CompoundQueryTerm( Query query, boolean required, boolean prohibited )
- {
- this.query = query;
- this.prohibited = prohibited;
- this.required = required;
- }
-
- /**
- * Method to test if the Query is a search requirement
- *
- * @return true if this Query is a search requirement, otherwise returns false
- */
- public boolean isRequired()
- {
- return required;
- }
-
- /**
- * Method to test if the Query is prohibited in the search result
- *
- * @return true if this Query is prohibited in the search result
- */
- public boolean isProhibited()
- {
- return prohibited;
- }
-
-
- /**
- * The subquery to execute.
- *
- * @return the query
- */
- public Query getQuery()
- {
- return query;
- }
-
- static CompoundQueryTerm and( Query query )
- {
- return new CompoundQueryTerm( query, true, false );
- }
-
- static CompoundQueryTerm or( Query query )
- {
- return new CompoundQueryTerm( query, false, false );
- }
-
- static CompoundQueryTerm not( Query query )
- {
- return new CompoundQueryTerm( query, false, true );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.query;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Interface to label the query classes
- *
- * @author Edwin Punzalan
- */
-public interface Query
-{
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.query;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Class to hold a single field search condition
- *
- * @author Edwin Punzalan
- */
-public class QueryTerm
-{
- private String field;
-
- private String value;
-
- /**
- * Class constructor
- *
- * @param field the index field to search
- * @param value the index value requirement
- */
- public QueryTerm( String field, String value )
- {
- this.field = field;
- this.value = value;
- }
-
- /**
- * Method to retrieve the name of the index field searched
- *
- * @return the name of the index field
- */
- public String getField()
- {
- return field;
- }
-
- /**
- * Method to retrieve the value used in searching the index field
- *
- * @return the value to corresspond the index field
- */
- public String getValue()
- {
- return value;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.query;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Query object that handles range queries (presently used for dates).
- *
- * @author Maria Odea Ching
- * @author Brett Porter
- */
-public class RangeQuery
- implements Query
-{
- /**
- * Whether values equal to the boundaries are included in the query results.
- */
- private final boolean inclusive;
-
- /**
- * The lower bound.
- */
- private final QueryTerm begin;
-
- /**
- * The upper bound.
- */
- private final QueryTerm end;
-
- /**
- * Constructor.
- *
- * @param begin the lower bound
- * @param end the upper bound
- * @param inclusive whether to include the boundaries in the query
- */
- private RangeQuery( QueryTerm begin, QueryTerm end, boolean inclusive )
- {
- this.begin = begin;
- this.end = end;
- this.inclusive = inclusive;
- }
-
- /**
- * Create an open range, including all results.
- *
- * @return the query object
- */
- public static RangeQuery createOpenRange()
- {
- return new RangeQuery( null, null, false );
- }
-
- /**
- * Create a bounded range, excluding the endpoints.
- *
- * @param begin the lower bound value to compare to
- * @param end the upper bound value to compare to
- * @return the query object
- */
- public static RangeQuery createExclusiveRange( QueryTerm begin, QueryTerm end )
- {
- return new RangeQuery( begin, end, false );
- }
-
- /**
- * Create a bounded range, including the endpoints.
- *
- * @param begin the lower bound value to compare to
- * @param end the upper bound value to compare to
- * @return the query object
- */
- public static RangeQuery createInclusiveRange( QueryTerm begin, QueryTerm end )
- {
- return new RangeQuery( begin, end, true );
- }
-
- /**
- * Create a range that is greater than or equal to a given term.
- *
- * @param begin the value to compare to
- * @return the query object
- */
- public static RangeQuery createGreaterThanOrEqualToRange( QueryTerm begin )
- {
- return new RangeQuery( begin, null, true );
- }
-
- /**
- * Create a range that is greater than a given term.
- *
- * @param begin the value to compare to
- * @return the query object
- */
- public static RangeQuery createGreaterThanRange( QueryTerm begin )
- {
- return new RangeQuery( begin, null, false );
- }
-
- /**
- * Create a range that is less than or equal to a given term.
- *
- * @param end the value to compare to
- * @return the query object
- */
- public static RangeQuery createLessThanOrEqualToRange( QueryTerm end )
- {
- return new RangeQuery( null, end, true );
- }
-
- /**
- * Create a range that is less than a given term.
- *
- * @param end the value to compare to
- * @return the query object
- */
- public static RangeQuery createLessThanRange( QueryTerm end )
- {
- return new RangeQuery( null, end, false );
- }
-
- public QueryTerm getBegin()
- {
- return begin;
- }
-
- public QueryTerm getEnd()
- {
- return end;
- }
-
- public boolean isInclusive()
- {
- return inclusive;
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.query;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Query for a single term.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class SingleTermQuery
- implements Query
-{
- /**
- * The term to query for.
- */
- private final QueryTerm term;
-
- /**
- * Constructor.
- *
- * @param term the term to query
- */
- public SingleTermQuery( QueryTerm term )
- {
- this.term = term;
- }
-
- /**
- * Shorthand constructor - create a single term query from a field and value
- *
- * @param field the field name
- * @param value the value to check for
- */
- public SingleTermQuery( String field, String value )
- {
- this.term = new QueryTerm( field, value );
- }
-
- public String getField()
- {
- return term.getField();
- }
-
- public String getValue()
- {
- return term.getValue();
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.codehaus.plexus.digest.Digester;
-import org.codehaus.plexus.digest.DigesterException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.List;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
-
-/**
- * Base class for the index record factories.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractArtifactIndexRecordFactory
- extends AbstractLogEnabled
- implements RepositoryIndexRecordFactory
-{
- protected String readChecksum( File file, Digester digester )
- {
- String checksum;
- try
- {
- checksum = digester.calc( file ).toLowerCase();
- }
- catch ( DigesterException e )
- {
- getLogger().error( "Error getting checksum for artifact file, leaving empty in index: " + e.getMessage() );
- checksum = null;
- }
- return checksum;
- }
-
- protected List readFilesInArchive( File file )
- throws IOException
- {
- ZipFile zipFile = new ZipFile( file );
- List files;
- try
- {
- files = new ArrayList( zipFile.size() );
-
- for ( Enumeration entries = zipFile.entries(); entries.hasMoreElements(); )
- {
- ZipEntry entry = (ZipEntry) entries.nextElement();
-
- files.add( entry.getName() );
- }
- }
- finally
- {
- closeQuietly( zipFile );
- }
- return files;
- }
-
- protected static boolean isClass( String name )
- {
- // TODO: verify if class is public or protected (this might require the original ZipEntry)
- return name.endsWith( ".class" ) && name.lastIndexOf( "$" ) < 0;
- }
-
- protected static void closeQuietly( ZipFile zipFile )
- {
- try
- {
- if ( zipFile != null )
- {
- zipFile.close();
- }
- }
- catch ( IOException e )
- {
- // ignored
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.Collection;
-
-/**
- * Filter that removes artifacts already in the index.
- * TODO: we could do timestamp comparisons here
- */
-public class IndexRecordExistsArtifactFilter
- implements ArtifactFilter
-{
- private final Collection keys;
-
- public IndexRecordExistsArtifactFilter( Collection keys )
- {
- this.keys = keys;
- }
-
- public boolean include( Artifact artifact )
- {
- String artifactKey = artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion() +
- ( artifact.getClassifier() != null ? ":" + artifact.getClassifier() : "" );
- return !keys.contains( artifactKey );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.util.Date;
-import java.util.List;
-
-/**
- * The a record with the fields in the minimal index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class MinimalArtifactIndexRecord
- implements RepositoryIndexRecord
-{
- /**
- * The classes in the archive for the artifact, if it is a JAR.
- */
- private List classes;
-
- /**
- * The MD5 checksum of the artifact file.
- */
- private String md5Checksum;
-
- /**
- * The filename of the artifact file (no path).
- */
- private String filename;
-
- /**
- * The timestamp that the artifact file was last modified. Granularity is seconds.
- */
- private long lastModified;
-
- /**
- * The size of the artifact file in bytes.
- */
- private long size;
-
- private static final int MS_PER_SEC = 1000;
-
- public void setClasses( List classes )
- {
- this.classes = classes;
- }
-
- public void setMd5Checksum( String md5Checksum )
- {
- this.md5Checksum = md5Checksum;
- }
-
- public void setFilename( String filename )
- {
- this.filename = filename;
- }
-
- public void setLastModified( long lastModified )
- {
- this.lastModified = lastModified - lastModified % MS_PER_SEC;
- }
-
- public void setSize( long size )
- {
- this.size = size;
- }
-
- public List getClasses()
- {
- return classes;
- }
-
- public String getMd5Checksum()
- {
- return md5Checksum;
- }
-
- public String getFilename()
- {
- return filename;
- }
-
- public long getLastModified()
- {
- return lastModified;
- }
-
- public long getSize()
- {
- return size;
- }
-
- /**
- * @noinspection RedundantIfStatement
- */
- public boolean equals( Object obj )
- {
- if ( this == obj )
- {
- return true;
- }
- if ( obj == null || getClass() != obj.getClass() )
- {
- return false;
- }
-
- MinimalArtifactIndexRecord that = (MinimalArtifactIndexRecord) obj;
-
- if ( lastModified != that.lastModified )
- {
- return false;
- }
- if ( size != that.size )
- {
- return false;
- }
- if ( classes != null ? !classes.equals( that.classes ) : that.classes != null )
- {
- return false;
- }
- if ( !filename.equals( that.filename ) )
- {
- return false;
- }
- if ( md5Checksum != null ? !md5Checksum.equals( that.md5Checksum ) : that.md5Checksum != null )
- {
- return false;
- }
-
- return true;
- }
-
- /**
- * @noinspection UnnecessaryParentheses
- */
- public int hashCode()
- {
- int result = classes != null ? classes.hashCode() : 0;
- result = 31 * result + ( md5Checksum != null ? md5Checksum.hashCode() : 0 );
- result = 31 * result + filename.hashCode();
- result = 31 * result + (int) ( lastModified ^ ( lastModified >>> 32 ) );
- result = 31 * result + (int) ( size ^ ( size >>> 32 ) );
- return result;
- }
-
- public String toString()
- {
- return "Filename: " + filename + "; checksum: " + md5Checksum + "; size: " + size + "; lastModified: " +
- new Date( lastModified ) + "; classes: " + classes;
- }
-
- public String getPrimaryKey()
- {
- return filename;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.digest.Digester;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-
-/**
- * An index record type for the minimal index.
- *
- * @author Edwin Punzalan
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory" role-hint="minimal"
- */
-public class MinimalArtifactIndexRecordFactory
- extends AbstractArtifactIndexRecordFactory
-{
- /* List of types to index. */
- private static final Set INDEXED_TYPES = new HashSet( Arrays.asList( new String[]{"jar", "maven-plugin"} ) );
-
- /**
- * @plexus.requirement role-hint="sha1"
- */
- protected Digester sha1Digester;
-
- /**
- * @plexus.requirement role-hint="md5"
- */
- protected Digester md5Digester;
-
- public RepositoryIndexRecord createRecord( Artifact artifact )
- {
- MinimalArtifactIndexRecord record = null;
-
- File file = artifact.getFile();
- if ( file != null && INDEXED_TYPES.contains( artifact.getType() ) && file.exists() )
- {
- String md5 = readChecksum( file, md5Digester );
-
- List files = null;
- try
- {
- files = readFilesInArchive( file );
- }
- catch ( IOException e )
- {
- getLogger().error( "Error reading artifact file, omitting from index: " + e.getMessage() );
- }
-
- if ( files != null )
- {
- record = new MinimalArtifactIndexRecord();
- record.setMd5Checksum( md5 );
- record.setFilename( artifact.getRepository().pathOf( artifact ) );
- record.setLastModified( file.lastModified() );
- record.setSize( file.length() );
- record.setClasses( getClassesFromFiles( files ) );
- }
- }
- return record;
- }
-
- private List getClassesFromFiles( List files )
- {
- List classes = new ArrayList();
-
- for ( Iterator i = files.iterator(); i.hasNext(); )
- {
- String name = (String) i.next();
-
- if ( isClass( name ) )
- {
- classes.add( name.substring( 0, name.length() - 6 ).replace( '/', '.' ) );
- }
- }
-
- return classes;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * The fields in a minimal artifact index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo should be an enum
- */
-public class MinimalIndexRecordFields
-{
- public static final String FILENAME = "j";
-
- public static final String LAST_MODIFIED = "d";
-
- public static final String FILE_SIZE = "s";
-
- public static final String MD5 = "m";
-
- public static final String CLASSES = "c";
-
- private MinimalIndexRecordFields()
- {
- // No touchy!
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * A repository index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryIndexRecord
-{
- /**
- * Get the primary key used to identify the record uniquely in the index.
- *
- * @return the primary key
- */
- String getPrimaryKey();
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.artifact.Artifact;
-
-/**
- * The layout of a record in a repository index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryIndexRecordFactory
-{
- /**
- * The Plexus role.
- */
- String ROLE = RepositoryIndexRecordFactory.class.getName();
-
- /**
- * Create an index record from an artifact.
- *
- * @param artifact the artifact
- * @return the index record
- * @throws RepositoryIndexException if there is a problem constructing the record (due to not being able to read the artifact file as a POM)
- */
- RepositoryIndexRecord createRecord( Artifact artifact )
- throws RepositoryIndexException;
-
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * The a record with the fields in the standard index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class StandardArtifactIndexRecord
- extends MinimalArtifactIndexRecord
-{
- /**
- * The SHA-1 checksum of the artifact file.
- */
- private String sha1Checksum;
-
- /**
- * The artifact's group.
- */
- private String groupId;
-
- /**
- * The artifact's identifier within the group.
- */
- private String artifactId;
-
- /**
- * The artifact's version.
- */
- private String version;
-
- /**
- * The classifier, if there is one.
- */
- private String classifier;
-
- /**
- * The artifact type (from the file).
- */
- private String type;
-
- /**
- * A list of files (separated by '\n') in the artifact if it is an archive.
- */
- private List files;
-
- /**
- * The identifier of the repository that the artifact came from.
- */
- private String repository;
-
- /**
- * The packaging specified in the POM for this artifact.
- */
- private String packaging;
-
- /**
- * The plugin prefix specified in the metadata if the artifact is a plugin.
- */
- private String pluginPrefix;
-
- /**
- * The year the project was started.
- */
- private String inceptionYear;
-
- /**
- * The description of the project.
- */
- private String projectDescription;
-
- /**
- * The name of the project.
- */
- private String projectName;
-
- /**
- * The base version (before the snapshot is determined).
- */
- private String baseVersion;
-
- /**
- * A list of dependencies for the artifact, each a string of the form <code>groupId:artifactId:version</code>.
- */
- private List dependencies;
-
- /**
- * A list of developers in the POM, each a string of the form <code>id:name:email</code>.
- */
- private List developers;
-
- public void setSha1Checksum( String sha1Checksum )
- {
- this.sha1Checksum = sha1Checksum;
- }
-
- public void setGroupId( String groupId )
- {
- this.groupId = groupId;
- }
-
- public void setArtifactId( String artifactId )
- {
- this.artifactId = artifactId;
- }
-
- public void setVersion( String version )
- {
- this.version = version;
- }
-
- public void setClassifier( String classifier )
- {
- this.classifier = classifier;
- }
-
- public void setType( String type )
- {
- this.type = type;
- }
-
- public void setFiles( List files )
- {
- this.files = files;
- }
-
- public void setRepository( String repository )
- {
- this.repository = repository;
- }
-
- /**
- * @noinspection RedundantIfStatement
- */
- public boolean equals( Object obj )
- {
- if ( this == obj )
- {
- return true;
- }
- if ( obj == null || getClass() != obj.getClass() )
- {
- return false;
- }
- if ( !super.equals( obj ) )
- {
- return false;
- }
-
- StandardArtifactIndexRecord that = (StandardArtifactIndexRecord) obj;
-
- if ( !artifactId.equals( that.artifactId ) )
- {
- return false;
- }
- if ( classifier != null ? !classifier.equals( that.classifier ) : that.classifier != null )
- {
- return false;
- }
-
- if ( dependencies != null && that.dependencies != null )
- {
- List sorted = new ArrayList( dependencies );
- Collections.sort( sorted );
-
- List sortedOther = new ArrayList( that.dependencies );
- Collections.sort( sortedOther );
-
- if ( !sorted.equals( sortedOther ) )
- {
- return false;
- }
- }
- else if ( !( dependencies == null && that.dependencies == null ) )
- {
- return false;
- }
-
- if ( developers != null ? !developers.equals( that.developers ) : that.developers != null )
- {
- return false;
- }
- if ( files != null ? !files.equals( that.files ) : that.files != null )
- {
- return false;
- }
- if ( !groupId.equals( that.groupId ) )
- {
- return false;
- }
- if ( repository != null ? !repository.equals( that.repository ) : that.repository != null )
- {
- return false;
- }
- if ( sha1Checksum != null ? !sha1Checksum.equals( that.sha1Checksum ) : that.sha1Checksum != null )
- {
- return false;
- }
- if ( type != null ? !type.equals( that.type ) : that.type != null )
- {
- return false;
- }
- if ( !version.equals( that.version ) )
- {
- return false;
- }
- if ( !baseVersion.equals( that.baseVersion ) )
- {
- return false;
- }
- if ( packaging != null ? !packaging.equals( that.packaging ) : that.packaging != null )
- {
- return false;
- }
- if ( pluginPrefix != null ? !pluginPrefix.equals( that.pluginPrefix ) : that.pluginPrefix != null )
- {
- return false;
- }
- if ( projectName != null ? !projectName.equals( that.projectName ) : that.projectName != null )
- {
- return false;
- }
- if ( inceptionYear != null ? !inceptionYear.equals( that.inceptionYear ) : that.inceptionYear != null )
- {
- return false;
- }
- if ( projectDescription != null ? !projectDescription.equals( that.projectDescription )
- : that.projectDescription != null )
- {
- return false;
- }
-
- return true;
- }
-
- public int hashCode()
- {
- int result = super.hashCode();
- result = 31 * result + ( sha1Checksum != null ? sha1Checksum.hashCode() : 0 );
- result = 31 * result + groupId.hashCode();
- result = 31 * result + artifactId.hashCode();
- result = 31 * result + version.hashCode();
- result = 31 * result + baseVersion.hashCode();
- result = 31 * result + ( classifier != null ? classifier.hashCode() : 0 );
- result = 31 * result + ( type != null ? type.hashCode() : 0 );
- result = 31 * result + ( files != null ? files.hashCode() : 0 );
- result = 31 * result + ( developers != null ? developers.hashCode() : 0 );
-
- if ( dependencies != null )
- {
- List sorted = new ArrayList( dependencies );
- Collections.sort( sorted );
-
- result = 31 * result + sorted.hashCode();
- }
-
- result = 31 * result + ( repository != null ? repository.hashCode() : 0 );
- result = 31 * result + ( packaging != null ? packaging.hashCode() : 0 );
- result = 31 * result + ( pluginPrefix != null ? pluginPrefix.hashCode() : 0 );
- result = 31 * result + ( inceptionYear != null ? inceptionYear.hashCode() : 0 );
- result = 31 * result + ( projectName != null ? projectName.hashCode() : 0 );
- result = 31 * result + ( projectDescription != null ? projectDescription.hashCode() : 0 );
- return result;
- }
-
- public String getSha1Checksum()
- {
- return sha1Checksum;
- }
-
- public String getGroupId()
- {
- return groupId;
- }
-
- public String getArtifactId()
- {
- return artifactId;
- }
-
- public String getVersion()
- {
- return version;
- }
-
- public String getClassifier()
- {
- return classifier;
- }
-
- public String getType()
- {
- return type;
- }
-
- public List getFiles()
- {
- return files;
- }
-
- public String getRepository()
- {
- return repository;
- }
-
- public String getPackaging()
- {
- return packaging;
- }
-
- public String getPluginPrefix()
- {
- return pluginPrefix;
- }
-
- public void setPackaging( String packaging )
- {
- this.packaging = packaging;
- }
-
- public void setPluginPrefix( String pluginPrefix )
- {
- this.pluginPrefix = pluginPrefix;
- }
-
- public void setInceptionYear( String inceptionYear )
- {
- this.inceptionYear = inceptionYear;
- }
-
- public void setProjectDescription( String description )
- {
- this.projectDescription = description;
- }
-
- public void setProjectName( String projectName )
- {
- this.projectName = projectName;
- }
-
- public String getInceptionYear()
- {
- return inceptionYear;
- }
-
- public String getProjectDescription()
- {
- return projectDescription;
- }
-
- public String getProjectName()
- {
- return projectName;
- }
-
- public void setBaseVersion( String baseVersion )
- {
- this.baseVersion = baseVersion;
- }
-
- public String getBaseVersion()
- {
- return baseVersion;
- }
-
- public void setDependencies( List dependencies )
- {
- this.dependencies = dependencies;
- }
-
- public void setDevelopers( List developers )
- {
- this.developers = developers;
- }
-
- public List getDevelopers()
- {
- return developers;
- }
-
- public List getDependencies()
- {
- return dependencies;
- }
-
- public String getPrimaryKey()
- {
- return groupId + ":" + artifactId + ":" + version + ( classifier != null ? ":" + classifier : "" );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.InvalidArtifactRTException;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Dependency;
-import org.apache.maven.model.Developer;
-import org.apache.maven.model.Model;
-import org.apache.maven.project.MavenProject;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.apache.maven.project.ProjectBuildingException;
-import org.codehaus.plexus.digest.Digester;
-import org.codehaus.plexus.util.xml.Xpp3Dom;
-import org.codehaus.plexus.util.xml.Xpp3DomBuilder;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipException;
-import java.util.zip.ZipFile;
-
-/**
- * An index record type for the standard index.
- *
- * @author Edwin Punzalan
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory" role-hint="standard"
- */
-public class StandardArtifactIndexRecordFactory
- extends AbstractArtifactIndexRecordFactory
-{
- /**
- * A list of artifact types to treat as a zip archive.
- *
- * @todo this should be smarter (perhaps use plexus archiver to look for an unarchiver, and make the ones for zip configurable since sar, par, etc can be added at random.
- */
- private static final Set ARCHIVE_TYPES =
- new HashSet( Arrays.asList( new String[]{"jar", "ejb", "par", "sar", "war", "ear", "rar"} ) );
-
- /**
- * @plexus.requirement
- */
- private ArtifactFactory artifactFactory;
-
- /**
- * @plexus.requirement
- */
- private MavenProjectBuilder projectBuilder;
-
- /**
- * @plexus.requirement role-hint="sha1"
- */
- protected Digester sha1Digester;
-
- /**
- * @plexus.requirement role-hint="md5"
- */
- protected Digester md5Digester;
-
- private static final String SITE_TEMPLATE_NAME = "META-INF/maven/site.vm";
-
- private static final String SITE_CSS_NAME = "css/maven-theme.css";
-
- private static final String PLUGIN_METADATA_NAME = "META-INF/maven/plugin.xml";
-
- private static final String ARCHETYPE_METADATA_NAME = "META-INF/maven/archetype.xml";
-
- // some current/old archetypes have the archetype.xml at different location.
- private static final String ARCHETYPE_METADATA_NAME_OLD = "META-INF/archetype.xml";
-
- public RepositoryIndexRecord createRecord( Artifact artifact )
- throws RepositoryIndexException
- {
- StandardArtifactIndexRecord record = null;
-
- File file = artifact.getFile();
-
- // TODO: is this condition really a possibility?
- if ( file != null && file.exists() )
- {
- String md5 = readChecksum( file, md5Digester );
- String sha1 = readChecksum( file, sha1Digester );
-
- List files = null;
- boolean archive = ARCHIVE_TYPES.contains( artifact.getType() );
- try
- {
- if ( archive )
- {
- files = readFilesInArchive( file );
- }
- }
- catch ( IOException e )
- {
- getLogger().error( "Error reading artifact file, omitting from index: " + e.getMessage() );
- }
-
- // If it's an archive with no files, don't create a record
- if ( !archive || files != null )
- {
- record = new StandardArtifactIndexRecord();
-
- record.setGroupId( artifact.getGroupId() );
- record.setArtifactId( artifact.getArtifactId() );
- record.setBaseVersion( artifact.getBaseVersion() );
- record.setVersion( artifact.getVersion() );
- record.setClassifier( artifact.getClassifier() );
- record.setType( artifact.getType() );
- record.setMd5Checksum( md5 );
- record.setSha1Checksum( sha1 );
- record.setFilename( artifact.getRepository().pathOf( artifact ) );
- record.setLastModified( file.lastModified() );
- record.setSize( file.length() );
- record.setRepository( artifact.getRepository().getId() );
-
- if ( files != null )
- {
- populateArchiveEntries( files, record, artifact.getFile() );
- }
-
- if ( !"pom".equals( artifact.getType() ) )
- {
- Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
- artifact.getArtifactId(),
- artifact.getVersion() );
- pomArtifact.isSnapshot(); // gross hack around bug in maven-artifact
- File pomFile = new File( artifact.getRepository().getBasedir(),
- artifact.getRepository().pathOf( pomArtifact ) );
- if ( pomFile.exists() )
- {
- try
- {
- populatePomEntries( readPom( pomArtifact, artifact.getRepository() ), record );
- }
- catch ( ProjectBuildingException e )
- {
- getLogger().error( "Error reading POM file [" + pomFile + "] for " + artifact +
- ", not populating in index: " + e.getMessage() );
- }
- }
- }
- else
- {
- Model model;
- try
- {
- model = readPom( artifact, artifact.getRepository() );
-
- if ( !"pom".equals( model.getPackaging() ) )
- {
- // Don't return a record for a POM that is does not belong on its own
- record = null;
- }
- else
- {
- populatePomEntries( model, record );
- }
- }
- catch ( ProjectBuildingException e )
- {
- getLogger().error(
- "Error reading POM file for " + artifact + ", not populating in index: " + e.getMessage() );
- }
- }
- }
- }
-
- return record;
- }
-
- private void populatePomEntries( Model pom, StandardArtifactIndexRecord record )
- {
- record.setPackaging( pom.getPackaging() );
- record.setProjectName( pom.getName() );
- record.setProjectDescription( pom.getDescription() );
- record.setInceptionYear( pom.getInceptionYear() );
-
- List dependencies = populateDependencies( pom.getDependencies() );
- if ( !dependencies.isEmpty() )
- {
- record.setDependencies( dependencies );
- }
- List developers = populateDevelopers( pom.getDevelopers() );
- if ( !developers.isEmpty() )
- {
- record.setDevelopers( developers );
- }
-
-/* TODO: fields for later
- indexPlugins( doc, FLD_PLUGINS_BUILD, pom.getBuild().getPlugins().iterator() );
- indexReportPlugins( doc, FLD_PLUGINS_REPORT, pom.getReporting().getPlugins().iterator() );
- record.setLicenses( licenses );
-*/
- }
-
- private List populateDependencies( List dependencies )
- {
- List convertedDependencies = new ArrayList();
-
- for ( Iterator i = dependencies.iterator(); i.hasNext(); )
- {
- Dependency dependency = (Dependency) i.next();
-
- convertedDependencies.add(
- dependency.getGroupId() + ":" + dependency.getArtifactId() + ":" + dependency.getVersion() );
- }
-
- return convertedDependencies;
- }
-
- private List populateDevelopers( List developers )
- {
- List convertedDevelopers = new ArrayList();
-
- for ( Iterator i = developers.iterator(); i.hasNext(); )
- {
- Developer developer = (Developer) i.next();
-
- convertedDevelopers.add( developer.getId() + ":" + developer.getName() + ":" + developer.getEmail() );
- }
-
- return convertedDevelopers;
- }
-
- private Model readPom( Artifact artifact, ArtifactRepository repository )
- throws RepositoryIndexException, ProjectBuildingException
- {
- // TODO: this can create a -SNAPSHOT.pom when it didn't exist and a timestamped one did. This is harmless, but should be avoided
- // TODO: will this pollute with local repo metadata?
-
- try
- {
- MavenProject project = projectBuilder.buildFromRepository( artifact, Collections.EMPTY_LIST, repository );
- return project.getModel();
- }
- catch ( InvalidArtifactRTException e )
- {
- throw new ProjectBuildingException( artifact.getId(),
- "Unable to build project from invalid artifact [" + artifact + "]", e );
- }
- }
-
- private void populateArchiveEntries( List files, StandardArtifactIndexRecord record, File artifactFile )
- throws RepositoryIndexException
- {
- List classes = new ArrayList();
- List fileList = new ArrayList();
-
- for ( Iterator i = files.iterator(); i.hasNext(); )
- {
- String name = (String) i.next();
-
- // ignore directories
- if ( !name.endsWith( "/" ) )
- {
- fileList.add( name );
-
- if ( isClass( name ) )
- {
- classes.add( name.substring( 0, name.length() - 6 ).replace( '/', '.' ) );
- }
- else if ( PLUGIN_METADATA_NAME.equals( name ) )
- {
- populatePluginEntries( readXmlMetadataFileInJar( artifactFile, PLUGIN_METADATA_NAME ), record );
- }
- else if ( ARCHETYPE_METADATA_NAME.equals( name ) || ARCHETYPE_METADATA_NAME_OLD.equals( name ) )
- {
- populateArchetypeEntries( record );
- }
- else if ( SITE_TEMPLATE_NAME.equals( name ) || SITE_CSS_NAME.equals( name ) )
- {
- populateSkinEntries( record );
- }
- }
- }
-
- if ( !classes.isEmpty() )
- {
- record.setClasses( classes );
- }
- if ( !fileList.isEmpty() )
- {
- record.setFiles( fileList );
- }
- }
-
- private void populateArchetypeEntries( StandardArtifactIndexRecord record )
- {
- // Typically discovered as a JAR
- record.setType( "maven-archetype" );
- }
-
- private void populateSkinEntries( StandardArtifactIndexRecord record )
- {
- // Typically discovered as a JAR
- record.setType( "maven-skin" );
- }
-
- private Xpp3Dom readXmlMetadataFileInJar( File file, String name )
- throws RepositoryIndexException
- {
- // TODO: would be more efficient with original ZipEntry still around
-
- Xpp3Dom xpp3Dom;
- ZipFile zipFile = null;
- try
- {
- zipFile = new ZipFile( file );
- ZipEntry entry = zipFile.getEntry( name );
- xpp3Dom = Xpp3DomBuilder.build( new InputStreamReader( zipFile.getInputStream( entry ) ) );
- }
- catch ( ZipException e )
- {
- throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
- }
- catch ( XmlPullParserException e )
- {
- throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( zipFile );
- }
- return xpp3Dom;
- }
-
- public void populatePluginEntries( Xpp3Dom metadata, StandardArtifactIndexRecord record )
- {
- // Typically discovered as a JAR
- record.setType( "maven-plugin" );
-
- Xpp3Dom prefix = metadata.getChild( "goalPrefix" );
-
- if ( prefix != null )
- {
- record.setPluginPrefix( prefix.getValue() );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * The fields in a minimal artifact index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo should be an enum
- */
-public class StandardIndexRecordFields
-{
- public static final String FILENAME = "filename";
-
- public static final String GROUPID = "groupId";
-
- public static final String GROUPID_EXACT = GROUPID + "_u";
-
- public static final String ARTIFACTID = "artifactId";
-
- public static final String ARTIFACTID_EXACT = ARTIFACTID + "_u";
-
- public static final String VERSION = "version";
-
- public static final String VERSION_EXACT = VERSION + "_u";
-
- public static final String BASE_VERSION = "baseVersion";
-
- public static final String BASE_VERSION_EXACT = BASE_VERSION + "_u";
-
- public static final String TYPE = "type";
-
- public static final String CLASSIFIER = "classifier";
-
- public static final String PACKAGING = "packaging";
-
- public static final String REPOSITORY = "repo";
-
- public static final String LAST_MODIFIED = "lastModified";
-
- public static final String FILE_SIZE = "fileSize";
-
- public static final String MD5 = "md5";
-
- public static final String SHA1 = "sha1";
-
- public static final String CLASSES = "classes";
-
- public static final String PLUGIN_PREFIX = "pluginPrefix";
-
- public static final String FILES = "files";
-
- public static final String INCEPTION_YEAR = "inceptionYear";
-
- public static final String PROJECT_NAME = "projectName";
-
- public static final String PROJECT_DESCRIPTION = "projectDesc";
-
- public static final String DEVELOPERS = "developers";
-
- public static final String DEPENDENCIES = "dependencies";
-
- private StandardIndexRecordFields()
- {
- // No touchy!
- }
-}
+++ /dev/null
- -----
- Indexer Design
- -----
- Brett Porter
- -----
- 25 July 2006
- -----
-
-~~ Copyright 2006 The Apache Software Foundation.
-~~
-~~ Licensed under the Apache License, Version 2.0 (the "License");
-~~ you may not use this file except in compliance with the License.
-~~ You may obtain a copy of the License at
-~~
-~~ http://www.apache.org/licenses/LICENSE-2.0
-~~
-~~ Unless required by applicable law or agreed to in writing, software
-~~ distributed under the License is distributed on an "AS IS" BASIS,
-~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-~~ See the License for the specific language governing permissions and
-~~ limitations under the License.
-
-~~ NOTE: For help with the syntax of this file, see:
-~~ http://maven.apache.org/guides/mini/guide-apt-format.html
-
-Indexer Design
-
- <<Note: The current indexer design is under review. This document will grow into what it should be, and the code and
- tests refactored to match>>
-
- ~~TODO: separate API design from Lucene implementation design
-
-* Standard Artifact Index
-
- We currently want to index these elements from the repository:
-
- * for each artifact file: the artifact ID, version, group ID, classifier, type (extension), filename (including path
- from the repository base), checksums (md5, sha1) and size
-
- * for each artifact POM: the packaging, licenses, dependencies, build plugins, reporting plugins
-
- * plugin prefix
-
- * Java classes within a JAR artifact (delimited by \n)
-
- * filenames within an archive (delimited by \n)
-
- * the identifier of the source repository
-
- Each record in the index refers to an artifact. Since the content for a record can come from various sources, the
- record may need to be updated when different files that are related to the same artifact are discovered (ie, the
- POM, or for plugins the metadata that contains their prefix).
-
- To simplify this, the process for discovery is as follows:
-
- * Discovered artifacts will read the related POM and metadata from the repository to index, rather than relying on
- it being discovered. This ensures that partial discovery still yields correct results in all cases, and it is
- possible to construct the entire record without having to read back from the index.
-
- * POMs that do not have a packaging of POM are not sent to the indexer.
-
- The result of this process is that updates to a POM or repository metadata and not the corresponding artifact(s) will
- not update the index. As POMs should not be modified, this will not be a major concern. Likewise, updates to metadata
- will only accompany updates to the artifact itself, so will not cause a problem.
-
- The above case may have a problem if the discovery happens during the middle of a deployment outside of the
- repository manager (where the artifact is present, but the metadata or POM is not). To avoid such cases, the
- discoverer should only detect changes more than a minute old (this blackout should be configurable).
-
- Other techniques were considered:
-
- * Processing each artifact file individually, updating each record as needed. This would result in having to read
- back each index record before writing. This is quite costly in Lucene as it would be "read, delete, add". You
- must have a reader and writer open for that process, and it greatly complicates the code.
-
- * Have three indices, one for each. This would complicate searching (and may affect ranking of results, though this
- was not analysed). While Lucene is
- {{{http://wiki.apache.org/jakarta-lucene/LuceneFAQ#head-b11296f9e7b2a5e7496d67118d0a5898f2fd9823} capable of
- searching multiple indices}}, it is expected that the results would be in the form of a list of separate records
- rather than the "table join" this effectively is. A similar derivative of this technique would be to store
- everything in one index, using a field (previously, doctype) to identify each record.
-
- Records in the index are keyed by their path from the repository root. While this is longer than using the
- dependency conflict ID, Lucene cannot delete by a combination of terms, so would require storing an additional
- field in the index where the file already exists.
-
- The plugin prefix could be found either from inside the plugin JAR (<<<META-INF/maven/plugin.xml>>>), or from the
- repository metadata for the plugin's group. For simplicity, the first approach will be used. This means at present
- there is no need to index the repository metadata, however that may be considered in future.
-
- Note that archetypes currently don't have a packaging associated with them in Maven, so it is not recorded in the POM.
- However, to be able to search by this type, the indexer will look for a <<<META-INF/maven/archetype.xml>>> file, and
- if found set its packaging to <<<maven-archetype>>>. In the future, this handling will be deprecated as the POMs
- can start using the appropriate packaging.
-
- The index is shared among multiple repositories. The source repository is recorded in the index record. The
- discovery/conversion/reporting mechanisms are expected to deal with duplicates before reaching the indexer, so if the
- indexer encounters an artifact from a different repository than it was already added, it will simply replace the
- record.
-
- When indexing metadata from a POM, the POM should be loaded using the Maven project builder so that inheritance and
- interpolation are performed. This ensures that the record is as complete as possible, and that searching by
- fields that are inherited will reveal both the parent and the children in the search results.
-
-* Reduced Size Index
-
- An additional index is maintained by the repository manager in the
- {{{../apidocs/org/apache/maven/archiva/indexing/MinimalArtifactIndexRecord.html} MinimalIndex}} class. This
- indexes all of the same artifacts as the first index, but stores them with shorter field names and less information to
- maintain a smaller size. This index is appropriate for use by certain clients such as IDE integration for fast
- searching. For a fuller interface to the repository information, the integration should use the XMLRPC interface.
-
- The following fields are in the reduced index:
-
- * <<<j>>>: The JAR filename
-
- * <<<s>>>: The JAR size
-
- * <<<d>>>: The last modified timestamp
-
- * <<<c>>>: A list of classes in the JAR (\n delimited)
-
- * <<<m>>>: md5 checksum of the JAR
-
- * <<<pk>>>: the primary key of the artifact
-
- Only JARs are indexed at present. The JAR filename is used as the key for later deleting entries.
-
-* Searching
-
- Searching will be reasonably flexible, though the general use case will be to enter a single parsed query that is
- applied to all fields in the index.
-
- Some features that will be available:
-
- * <Search through most fields for a particular keyword>: the general case described above.
-
- * <Search by a particular field (exact match)>: This would be needed for search by checksum.
-
- * <Search in a range of field values>: This would be needed for searching based on update time. Note that in
- Lucene it may be better to search by other fields (or return all), and then filter the results by dates rather
- than making dates part of a search query.
-
- * <Limit search to particular fields>: It will be useful to only search Java classes and packages, for example
-
- Another thing to note is that the search results should be able to be composed entirely from the index for performance
- reasons. It should not have to read any metadata files or properties of files such as size and checksum from the disk.
- This enables searching a repository remotely without having the physical repository available, which is useful for
- IDE integration among other things.
-
- Note that to be able to do an exact match search, a field must be stored untokenized. For fields where it makes sense
- to search both tokenized and untokenized, they will be stored twice. This currently includes: artifact ID, group ID,
- and version.
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <body>
- <menu name="Design Documentation">
- <item name="Indexing Design" href="/design.html"/>
- </menu>
- </body>
-</project>
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.queryParser.ParseException;
-import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.indexer.record.MinimalIndexRecordFields;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the Lucene implementation of the artifact index search.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be nice to abstract some of the query away, but for now passing in a Lucene query directly is good enough
- */
-public class LuceneMinimalArtifactIndexSearchTest
- extends PlexusTestCase
-{
- private RepositoryArtifactIndex index;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private File indexLocation;
-
- private RepositoryIndexRecordFactory recordFactory;
-
- private Map records = new HashMap();
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- indexLocation = getTestFile( "target/test-index" );
-
- FileUtils.deleteDirectory( indexLocation );
-
- index = factory.createMinimalIndex( indexLocation );
-
- records.put( "test-jar", recordFactory.createRecord( createArtifact( "test-jar" ) ) );
- records.put( "test-jar-jdk14",
- recordFactory.createRecord( createArtifact( "test-jar", "1.0", "jar", "jdk14" ) ) );
- records.put( "test-jar-and-pom",
- recordFactory.createRecord( createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" ) ) );
- records.put( "test-jar-and-pom-jdk14", recordFactory.createRecord(
- createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" ) ) );
- records.put( "test-child-pom",
- recordFactory.createRecord( createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" ) ) );
- records.put( "test-archetype", recordFactory.createRecord( createArtifact( "test-archetype" ) ) );
- records.put( "test-plugin", recordFactory.createRecord( createArtifact( "test-plugin" ) ) );
- records.put( "test-pom", recordFactory.createRecord( createArtifact( "test-pom", "1.0", "pom" ) ) );
- records.put( "parent-pom", recordFactory.createRecord( createArtifact( "parent-pom", "1", "pom" ) ) );
- records.put( "test-dll", recordFactory.createRecord( createArtifact( "test-dll", "1.0.1.34", "dll" ) ) );
-
- index.indexRecords( records.values() );
- }
-
- public void testExactMatchMd5()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( MinimalIndexRecordFields.MD5, "3a0adc365f849366cd8b633cad155cb7" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( MinimalIndexRecordFields.MD5, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchFilename()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( MinimalIndexRecordFields.FILENAME, "maven" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 7, results.size() );
-
- query = createMatchQuery( MinimalIndexRecordFields.FILENAME, "plugin" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createMatchQuery( MinimalIndexRecordFields.FILENAME, "test" );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 7, results.size() );
-
- // test non-match fails
- query = createMatchQuery( MinimalIndexRecordFields.FILENAME, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchClass()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( MinimalIndexRecordFields.CLASSES, "b.c.C" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = createMatchQuery( MinimalIndexRecordFields.CLASSES, "C" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = createMatchQuery( MinimalIndexRecordFields.CLASSES, "MyMojo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = createMatchQuery( MinimalIndexRecordFields.CLASSES, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- private static Query createExactMatchQuery( String field, String value )
- {
- return new TermQuery( new Term( field, value ) );
- }
-
- private static Query createMatchQuery( String field, String value )
- throws ParseException
- {
- return new QueryParser( field, LuceneRepositoryArtifactIndex.getAnalyzer() ).parse( value );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar", null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- return createArtifact( artifactId, version, type, null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type, String classifier )
- {
- Artifact artifact = artifactFactory.createDependencyArtifact( "org.apache.maven.archiva.record", artifactId,
- VersionRange.createFromVersion( version ), type,
- classifier, Artifact.SCOPE_RUNTIME );
- artifact.isSnapshot();
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.NumberTools;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.record.MinimalIndexRecordFields;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.TimeZone;
-
-/**
- * Test the Lucene implementation of the artifact index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneMinimalArtifactIndexTest
- extends PlexusTestCase
-{
- private RepositoryArtifactIndex index;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private File indexLocation;
-
- private RepositoryIndexRecordFactory recordFactory;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- indexLocation = getTestFile( "target/test-index" );
-
- FileUtils.deleteDirectory( indexLocation );
-
- index = factory.createMinimalIndex( indexLocation );
- }
-
- public void testIndexExists()
- throws IOException, RepositoryIndexException
- {
- assertFalse( "check index doesn't exist", index.exists() );
-
- // create empty directory
- indexLocation.mkdirs();
- assertFalse( "check index doesn't exist even if directory does", index.exists() );
-
- // create index, with no records
- createEmptyIndex();
- assertTrue( "check index is considered to exist", index.exists() );
-
- // Test non-directory
- FileUtils.deleteDirectory( indexLocation );
- indexLocation.createNewFile();
- try
- {
- index.exists();
- fail( "Index operation should fail as the location is not valid" );
- }
- catch ( RepositoryIndexException e )
- {
- // great
- }
- finally
- {
- indexLocation.delete();
- }
- }
-
- public void testAddRecordNoIndex()
- throws IOException, RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertEquals( "Check document", repository.pathOf( artifact ),
- document.get( MinimalIndexRecordFields.FILENAME ) );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddRecordExistingEmptyIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertRecord( document, artifact, "3a0adc365f849366cd8b633cad155cb7", "A\nb.B\nb.c.C" );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddRecordInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- // Do it again
- record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertRecord( document, artifact, "3a0adc365f849366cd8b633cad155cb7", "A\nb.B\nb.c.C" );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- index.deleteRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordNotInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
-
- index.deleteRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordNoIndex()
- throws IOException, RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.deleteRecords( Collections.singleton( record ) );
-
- assertFalse( index.exists() );
- }
-
- public void testAddPomRecord()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddPlugin()
- throws IOException, RepositoryIndexException, XmlPullParserException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-plugin" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
-
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertRecord( document, artifact, "3530896791670ebb45e17708e5d52c40",
- "org.apache.maven.archiva.record.MyMojo" );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar" );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- Artifact artifact =
- artifactFactory.createBuildArtifact( "org.apache.maven.archiva.record", artifactId, version, type );
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-
- private void createEmptyIndex()
- throws IOException
- {
- createIndex( Collections.EMPTY_LIST );
- }
-
- private void createIndex( List docments )
- throws IOException
- {
- IndexWriter writer = new IndexWriter( indexLocation, LuceneRepositoryArtifactIndex.getAnalyzer(), true );
- for ( Iterator i = docments.iterator(); i.hasNext(); )
- {
- Document document = (Document) i.next();
- writer.addDocument( document );
- }
- writer.optimize();
- writer.close();
- }
-
- private void assertRecord( Document document, Artifact artifact, String expectedChecksum, String expectedClasses )
- {
- assertEquals( "Check document filename", repository.pathOf( artifact ),
- document.get( MinimalIndexRecordFields.FILENAME ) );
- assertEquals( "Check document timestamp", getLastModified( artifact.getFile() ),
- document.get( MinimalIndexRecordFields.LAST_MODIFIED ) );
- assertEquals( "Check document checksum", expectedChecksum, document.get( MinimalIndexRecordFields.MD5 ) );
- assertEquals( "Check document size", artifact.getFile().length(),
- NumberTools.stringToLong( document.get( MinimalIndexRecordFields.FILE_SIZE ) ) );
- assertEquals( "Check document classes", expectedClasses, document.get( MinimalIndexRecordFields.CLASSES ) );
- }
-
- private String getLastModified( File file )
- {
- SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
- dateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) );
- return dateFormat.format( new Date( file.lastModified() ) );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.queryParser.ParseException;
-import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.BooleanClause;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the Lucene implementation of the artifact index search.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be nice to abstract some of the query away, but for now passing in a Lucene query directly is good enough
- */
-public class LuceneStandardArtifactIndexSearchTest
- extends PlexusTestCase
-{
- private RepositoryArtifactIndex index;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private File indexLocation;
-
- private RepositoryIndexRecordFactory recordFactory;
-
- private Map records = new HashMap();
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- indexLocation = getTestFile( "target/test-index" );
-
- FileUtils.deleteDirectory( indexLocation );
-
- index = factory.createStandardIndex( indexLocation );
-
- records.put( "test-jar", recordFactory.createRecord( createArtifact( "test-jar" ) ) );
- records.put( "test-jar-jdk14",
- recordFactory.createRecord( createArtifact( "test-jar", "1.0", "jar", "jdk14" ) ) );
- records.put( "test-jar-and-pom",
- recordFactory.createRecord( createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" ) ) );
- records.put( "test-jar-and-pom-jdk14", recordFactory.createRecord(
- createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" ) ) );
- records.put( "test-child-pom",
- recordFactory.createRecord( createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" ) ) );
- records.put( "test-archetype", recordFactory.createRecord( createArtifact( "test-archetype" ) ) );
- records.put( "test-plugin", recordFactory.createRecord( createArtifact( "test-plugin" ) ) );
- records.put( "test-pom", recordFactory.createRecord( createArtifact( "test-pom", "1.0", "pom" ) ) );
- records.put( "parent-pom", recordFactory.createRecord( createArtifact( "parent-pom", "1", "pom" ) ) );
- records.put( "test-dll", recordFactory.createRecord( createArtifact( "test-dll", "1.0.1.34", "dll" ) ) );
-
- index.indexRecords( records.values() );
- }
-
- public void testExactMatchVersion()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "1.0" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "1.0-SNAPSHOT" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "1.0-snapshot" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "1.0-20060728.121314-1" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.VERSION_EXACT, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchBaseVersion()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-SNAPSHOT" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-snapshot" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-20060728.121314-1" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.BASE_VERSION_EXACT, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchGroupId()
- throws RepositoryIndexSearchException
- {
- Query query =
- createExactMatchQuery( StandardIndexRecordFields.GROUPID_EXACT, "org.apache.maven.archiva.record" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
- // test partial match fails
- query = createExactMatchQuery( StandardIndexRecordFields.GROUPID_EXACT, "org.apache.maven" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.GROUPID_EXACT, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchArtifactId()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.ARTIFACTID_EXACT, "test-jar" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- // test partial match fails
- query = createExactMatchQuery( StandardIndexRecordFields.ARTIFACTID_EXACT, "test" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.ARTIFACTID_EXACT, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchType()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "maven-plugin" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "jar" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "dll" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "maven-archetype" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.TYPE, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchPackaging()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "maven-plugin" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "jar" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 4, results.size() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "dll" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "maven-archetype" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.PACKAGING, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchPluginPrefix()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.PLUGIN_PREFIX, "test" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.PLUGIN_PREFIX, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchRepository()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.REPOSITORY, "test" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.REPOSITORY, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchMd5()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.MD5, "3a0adc365f849366cd8b633cad155cb7" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.MD5, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchSha1()
- throws RepositoryIndexSearchException
- {
- Query query =
- createExactMatchQuery( StandardIndexRecordFields.SHA1, "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.SHA1, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchInceptionYear()
- throws RepositoryIndexSearchException
- {
- Query query = createExactMatchQuery( StandardIndexRecordFields.INCEPTION_YEAR, "2005" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertEquals( "Check results size", 3, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.INCEPTION_YEAR, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchFilename()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( StandardIndexRecordFields.FILENAME, "maven" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.FILENAME, "plugin" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.FILENAME, "pLuGiN" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createMatchQuery( StandardIndexRecordFields.FILENAME, "test" );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertEquals( "Check results size", 9, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.FILENAME, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchGroupId()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( StandardIndexRecordFields.GROUPID, "org.apache.maven.archiva.record" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.GROUPID, "maven" );
- results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.GROUPID, "Maven" );
- results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.GROUPID, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchArtifactId()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( StandardIndexRecordFields.ARTIFACTID, "plugin" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.ARTIFACTID, "test" );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertEquals( "Check results size", 9, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.ARTIFACTID, "maven" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchVersion()
- throws RepositoryIndexSearchException, ParseException
- {
- // If partial matches are desired, need to change the analyzer for versions to split on '.'
- Query query = createMatchQuery( StandardIndexRecordFields.VERSION, "1" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 4, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.VERSION, "1.0" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 8, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.VERSION, "snapshot" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createMatchQuery( StandardIndexRecordFields.VERSION, "SNAPSHOT" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createMatchQuery( StandardIndexRecordFields.VERSION, "alpha" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.VERSION, "1.0-alpha-1" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.VERSION, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchBaseVersion()
- throws RepositoryIndexSearchException, ParseException
- {
- // If partial matches are desired, need to change the analyzer for versions to split on '.'
- Query query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "1" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 3, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "1.0" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 8, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "SNAPSHOT" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "SnApShOt" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "snapshot" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "alpha" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "1.0-alpha-1" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.BASE_VERSION, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchClassifier()
- throws RepositoryIndexSearchException, ParseException
- {
- BooleanQuery bQuery = new BooleanQuery();
- bQuery.add( new MatchAllDocsQuery(), BooleanClause.Occur.MUST );
- bQuery.add( createMatchQuery( StandardIndexRecordFields.CLASSIFIER, "jdk14" ), BooleanClause.Occur.MUST_NOT );
- List results = index.search( new LuceneQuery( bQuery ) );
-
- assertFalse( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 8, results.size() );
-
- // TODO: can we search for "anything with no classifier" ?
-
- Query query = createMatchQuery( StandardIndexRecordFields.CLASSIFIER, "jdk14" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.CLASSIFIER, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchClass()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( StandardIndexRecordFields.CLASSES, "b.c.C" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.CLASSES, "C" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.CLASSES, "MyMojo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.CLASSES, "MYMOJO" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.CLASSES, "mymojo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.CLASSES, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchFiles()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( StandardIndexRecordFields.FILES, "MANIFEST.MF" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 7, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.FILES, "META-INF" );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 7, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.FILES, "plugin.xml" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.FILES, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchDependency()
- throws RepositoryIndexSearchException
- {
- Query query =
- createExactMatchQuery( StandardIndexRecordFields.DEPENDENCIES, "org.apache.maven:maven-plugin-api:2.0" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = createExactMatchQuery( StandardIndexRecordFields.DEPENDENCIES, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchProjectName()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( StandardIndexRecordFields.PROJECT_NAME, "mojo" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = createMatchQuery( StandardIndexRecordFields.PROJECT_NAME, "maven" );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.PROJECT_NAME, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchProjectDescription()
- throws RepositoryIndexSearchException, ParseException
- {
- Query query = createMatchQuery( StandardIndexRecordFields.PROJECT_DESCRIPTION, "description" );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertEquals( "Check results size", 3, results.size() );
-
- // test non-match fails
- query = createMatchQuery( StandardIndexRecordFields.PROJECT_DESCRIPTION, "foo" );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- private static Query createExactMatchQuery( String field, String value )
- {
- return new TermQuery( new Term( field, value ) );
- }
-
- private static Query createMatchQuery( String field, String value )
- throws ParseException
- {
- return new QueryParser( field, LuceneRepositoryArtifactIndex.getAnalyzer() ).parse( value );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar", null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- return createArtifact( artifactId, version, type, null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type, String classifier )
- {
- Artifact artifact = artifactFactory.createDependencyArtifact( "org.apache.maven.archiva.record", artifactId,
- VersionRange.createFromVersion( version ), type,
- classifier, Artifact.SCOPE_RUNTIME );
- artifact.isSnapshot();
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.NumberTools;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecord;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.TimeZone;
-
-/**
- * Test the Lucene implementation of the artifact index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneStandardArtifactIndexTest
- extends PlexusTestCase
-{
- private RepositoryArtifactIndex index;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private File indexLocation;
-
- private RepositoryIndexRecordFactory recordFactory;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- indexLocation = getTestFile( "target/test-index" );
-
- FileUtils.deleteDirectory( indexLocation );
-
- index = factory.createStandardIndex( indexLocation );
- }
-
- public void testIndexExists()
- throws IOException, RepositoryIndexException
- {
- assertFalse( "check index doesn't exist", index.exists() );
-
- // create empty directory
- indexLocation.mkdirs();
- assertFalse( "check index doesn't exist even if directory does", index.exists() );
-
- // create index, with no records
- createEmptyIndex();
- assertTrue( "check index is considered to exist", index.exists() );
-
- // Test non-directory
- FileUtils.deleteDirectory( indexLocation );
- indexLocation.createNewFile();
- try
- {
- index.exists();
- fail( "Index operation should fail as the location is not valid" );
- }
- catch ( RepositoryIndexException e )
- {
- // great
- }
- finally
- {
- indexLocation.delete();
- }
- }
-
- public void testAddRecordNoIndex()
- throws IOException, RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertJarRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddRecordExistingEmptyIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertJarRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddRecordInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- // Do it again
- record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertJarRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddPomRecord()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertPomRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddPlugin()
- throws IOException, RepositoryIndexException, XmlPullParserException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-plugin" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
-
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertPluginRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- index.deleteRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordNotInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
-
- index.deleteRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordNoIndex()
- throws IOException, RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.deleteRecords( Collections.singleton( record ) );
-
- assertFalse( index.exists() );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar" );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- Artifact artifact =
- artifactFactory.createBuildArtifact( "org.apache.maven.archiva.record", artifactId, version, type );
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-
- private void createEmptyIndex()
- throws IOException
- {
- createIndex( Collections.EMPTY_LIST );
- }
-
- private void createIndex( List docments )
- throws IOException
- {
- IndexWriter writer = new IndexWriter( indexLocation, LuceneRepositoryArtifactIndex.getAnalyzer(), true );
- for ( Iterator i = docments.iterator(); i.hasNext(); )
- {
- Document document = (Document) i.next();
- writer.addDocument( document );
- }
- writer.optimize();
- writer.close();
- }
-
- private void assertRecord( Artifact artifact, Document document, String expectedArtifactId, String expectedType,
- String expectedMd5, String expectedSha1 )
- {
- assertEquals( "Check document filename", repository.pathOf( artifact ),
- document.get( StandardIndexRecordFields.FILENAME ) );
- assertEquals( "Check document groupId", "org.apache.maven.archiva.record",
- document.get( StandardIndexRecordFields.GROUPID ) );
- assertEquals( "Check document artifactId", expectedArtifactId,
- document.get( StandardIndexRecordFields.ARTIFACTID ) );
- assertEquals( "Check document version", "1.0", document.get( StandardIndexRecordFields.VERSION ) );
- assertEquals( "Check document type", expectedType, document.get( StandardIndexRecordFields.TYPE ) );
- assertEquals( "Check document repository", "test", document.get( StandardIndexRecordFields.REPOSITORY ) );
- assertEquals( "Check document timestamp", getLastModified( artifact.getFile() ),
- document.get( StandardIndexRecordFields.LAST_MODIFIED ) );
- assertEquals( "Check document md5", expectedMd5, document.get( StandardIndexRecordFields.MD5 ) );
- assertEquals( "Check document sha1", expectedSha1, document.get( StandardIndexRecordFields.SHA1 ) );
- assertEquals( "Check document file size", artifact.getFile().length(),
- NumberTools.stringToLong( document.get( StandardIndexRecordFields.FILE_SIZE ) ) );
- assertNull( "Check document classifier", document.get( StandardIndexRecordFields.CLASSIFIER ) );
- }
-
- private void assertPomRecord( Artifact artifact, Document document )
- {
- assertRecord( artifact, document, "test-pom", "pom", "758e1ae96dff63dab7278a62e3eb174d",
- "770fde06cd5c3dccb5f5e8c6754b8c4c77b98560" );
- assertNull( "Check document classes", document.get( StandardIndexRecordFields.CLASSES ) );
- assertNull( "Check document files", document.get( StandardIndexRecordFields.FILES ) );
- assertNull( "Check document pluginPrefix", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
- assertEquals( "Check document year", "2005", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
- assertEquals( "Check document project name", "Maven Repository Manager Test POM",
- document.get( StandardIndexRecordFields.PROJECT_NAME ) );
- assertEquals( "Check document project description", "Description",
- document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
- assertEquals( "Check document packaging", "pom", document.get( StandardIndexRecordFields.PACKAGING ) );
- }
-
- private void assertJarRecord( Artifact artifact, Document document )
- {
- assertRecord( artifact, document, "test-jar", "jar", "3a0adc365f849366cd8b633cad155cb7",
- "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- assertEquals( "Check document classes", "A\nb.B\nb.c.C", document.get( StandardIndexRecordFields.CLASSES ) );
- assertEquals( "Check document files", "META-INF/MANIFEST.MF\nA.class\nb/B.class\nb/c/C.class",
- document.get( StandardIndexRecordFields.FILES ) );
- assertNull( "Check document inceptionYear", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
- assertNull( "Check document projectName", document.get( StandardIndexRecordFields.PROJECT_NAME ) );
- assertNull( "Check document projectDesc", document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
- assertNull( "Check document pluginPrefix", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
- assertNull( "Check document packaging", document.get( StandardIndexRecordFields.PACKAGING ) );
- }
-
- private void assertPluginRecord( Artifact artifact, Document document )
- {
- assertRecord( artifact, document, "test-plugin", "maven-plugin", "3530896791670ebb45e17708e5d52c40",
- "2cd2619d59a684e82e97471d2c2e004144c8f24e" );
- assertEquals( "Check document classes", "org.apache.maven.archiva.record.MyMojo",
- document.get( StandardIndexRecordFields.CLASSES ) );
- assertEquals( "Check document files", "META-INF/MANIFEST.MF\n" +
- "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.properties\n" +
- "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.xml\n" + "META-INF/maven/plugin.xml\n" +
- "org/apache/maven/archiva/record/MyMojo.class", document.get( StandardIndexRecordFields.FILES ) );
- assertEquals( "Check document pluginPrefix", "test", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
- assertEquals( "Check document packaging", "maven-plugin", document.get( StandardIndexRecordFields.PACKAGING ) );
- assertNull( "Check document inceptionYear", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
- assertEquals( "Check document project name", "Maven Mojo Archetype",
- document.get( StandardIndexRecordFields.PROJECT_NAME ) );
- assertNull( "Check document projectDesc", document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
- }
-
- private String getLastModified( File file )
- {
- SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
- dateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) );
- return dateFormat.format( new Date( file.lastModified() ) );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.query;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import junit.framework.TestCase;
-
-import java.util.Iterator;
-
-/**
- * @author Brett Porter
- */
-public class QueryTest
- extends TestCase
-{
- private QueryTerm term1 = new QueryTerm( "field1", "value1" );
-
- private QueryTerm term2 = new QueryTerm( "field2", "value2" );
-
- private QueryTerm term3 = new QueryTerm( "field3", "value3" );
-
- public void testQueryTerm()
- {
- QueryTerm query = new QueryTerm( "Field", "Value" );
- assertEquals( "check field setting", "Field", query.getField() );
- assertEquals( "check value setting", "Value", query.getValue() );
- }
-
- public void testSingleTermQuery()
- {
- SingleTermQuery query = new SingleTermQuery( "Field", "Value" );
- assertEquals( "check field setting", "Field", query.getField() );
- assertEquals( "check value setting", "Value", query.getValue() );
-
- query = new SingleTermQuery( term1 );
- assertEquals( "check field setting", "field1", query.getField() );
- assertEquals( "check value setting", "value1", query.getValue() );
- }
-
- public void testRangeQueryOpen()
- {
- RangeQuery rangeQuery = RangeQuery.createOpenRange();
- assertNull( "Check range has no start", rangeQuery.getBegin() );
- assertNull( "Check range has no end", rangeQuery.getEnd() );
- }
-
- public void testRangeQueryExclusive()
- {
- RangeQuery rangeQuery = RangeQuery.createExclusiveRange( term1, term2 );
- assertEquals( "Check range start", term1, rangeQuery.getBegin() );
- assertEquals( "Check range end", term2, rangeQuery.getEnd() );
- assertFalse( "Check exclusive", rangeQuery.isInclusive() );
- }
-
- public void testRangeQueryInclusive()
- {
- RangeQuery rangeQuery = RangeQuery.createInclusiveRange( term1, term2 );
- assertEquals( "Check range start", term1, rangeQuery.getBegin() );
- assertEquals( "Check range end", term2, rangeQuery.getEnd() );
- assertTrue( "Check inclusive", rangeQuery.isInclusive() );
- }
-
- public void testRangeQueryOpenEnded()
- {
- RangeQuery rangeQuery = RangeQuery.createGreaterThanOrEqualToRange( term1 );
- assertEquals( "Check range start", term1, rangeQuery.getBegin() );
- assertNull( "Check range end", rangeQuery.getEnd() );
- assertTrue( "Check inclusive", rangeQuery.isInclusive() );
-
- rangeQuery = RangeQuery.createGreaterThanRange( term1 );
- assertEquals( "Check range start", term1, rangeQuery.getBegin() );
- assertNull( "Check range end", rangeQuery.getEnd() );
- assertFalse( "Check exclusive", rangeQuery.isInclusive() );
-
- rangeQuery = RangeQuery.createLessThanOrEqualToRange( term1 );
- assertNull( "Check range start", rangeQuery.getBegin() );
- assertEquals( "Check range end", term1, rangeQuery.getEnd() );
- assertTrue( "Check inclusive", rangeQuery.isInclusive() );
-
- rangeQuery = RangeQuery.createLessThanRange( term1 );
- assertNull( "Check range start", rangeQuery.getBegin() );
- assertEquals( "Check range end", term1, rangeQuery.getEnd() );
- assertFalse( "Check exclusive", rangeQuery.isInclusive() );
- }
-
- public void testCompundQuery()
- {
- CompoundQuery query = new CompoundQuery();
- assertTrue( "check query is empty", query.getCompoundQueryTerms().isEmpty() );
-
- query.and( term1 );
- query.or( term2 );
- query.not( term3 );
-
- Iterator i = query.getCompoundQueryTerms().iterator();
- CompoundQueryTerm term = (CompoundQueryTerm) i.next();
- assertEquals( "Check first term", "field1", getQuery( term ).getField() );
- assertEquals( "Check first term", "value1", getQuery( term ).getValue() );
- assertTrue( "Check first term", term.isRequired() );
- assertFalse( "Check first term", term.isProhibited() );
-
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check second term", "field2", getQuery( term ).getField() );
- assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
- assertFalse( "Check second term", term.isRequired() );
- assertFalse( "Check second term", term.isProhibited() );
-
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check third term", "field3", getQuery( term ).getField() );
- assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
- assertFalse( "Check third term", term.isRequired() );
- assertTrue( "Check third term", term.isProhibited() );
-
- CompoundQuery query2 = new CompoundQuery();
- query2.and( query );
- query2.or( new SingleTermQuery( term2 ) );
- query2.not( new SingleTermQuery( term3 ) );
-
- i = query2.getCompoundQueryTerms().iterator();
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check first term", query, term.getQuery() );
- assertTrue( "Check first term", term.isRequired() );
- assertFalse( "Check first term", term.isProhibited() );
-
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check second term", "field2", getQuery( term ).getField() );
- assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
- assertFalse( "Check second term", term.isRequired() );
- assertFalse( "Check second term", term.isProhibited() );
-
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check third term", "field3", getQuery( term ).getField() );
- assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
- assertFalse( "Check third term", term.isRequired() );
- assertTrue( "Check third term", term.isProhibited() );
- }
-
- private static SingleTermQuery getQuery( CompoundQueryTerm term )
- {
- return (SingleTermQuery) term.getQuery();
- }
-}
-
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Test the minimal artifact index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class MinimalArtifactIndexRecordFactoryTest
- extends PlexusTestCase
-{
- private RepositoryIndexRecordFactory factory;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private static final String TEST_GROUP_ID = "org.apache.maven.archiva.record";
-
- private static final List JAR_CLASS_LIST = Arrays.asList( new String[]{"A", "b.B", "b.c.C"} );
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- factory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
- }
-
- public void testIndexedJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarWithClassifier()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar", "1.0", "jar", "jdk14" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarAndPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarAndPomWithClassifier()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
- }
-
- public void testNonIndexedPom()
- throws RepositoryIndexException
- {
- // If we pass in only the POM that belongs to a JAR, then expect null not the POM
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "pom" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-plugin", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-archetype", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-skin", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
- }
-
- public void testIndexedPlugin()
- throws RepositoryIndexException, IOException, XmlPullParserException
- {
- Artifact artifact = createArtifact( "test-plugin" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3530896791670ebb45e17708e5d52c40" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( Collections.singletonList( "org.apache.maven.archiva.record.MyMojo" ) );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testCorruptJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-corrupt-jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- public void testNonJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-dll", "1.0.1.34", "dll" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- public void testMissingFile()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-foo" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar" );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- return createArtifact( artifactId, version, type, null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type, String classifier )
- {
- Artifact artifact = artifactFactory.createDependencyArtifact( TEST_GROUP_ID, artifactId,
- VersionRange.createFromVersion( version ), type,
- classifier, Artifact.SCOPE_RUNTIME );
- artifact.isSnapshot();
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.record;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Test the minimal artifact index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class StandardArtifactIndexRecordFactoryTest
- extends PlexusTestCase
-{
- private RepositoryIndexRecordFactory factory;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private static final String TEST_GROUP_ID = "org.apache.maven.archiva.record";
-
- private static final List JAR_CLASS_LIST = Arrays.asList( new String[]{"A", "b.B", "b.c.C"} );
-
- private static final List JAR_FILE_LIST =
- Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "A.class", "b/B.class", "b/c/C.class"} );
-
- private static final String JUNIT_DEPENDENCY = "junit:junit:3.8.1";
-
- private static final String PLUGIN_API_DEPENDENCY = "org.apache.maven:maven-plugin-api:2.0";
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- factory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
- }
-
- public void testIndexedJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-jar" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarWithClassifier()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar", "1.0", "jar", "jdk14" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-jar" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setClassifier( "jdk14" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarAndPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-jar-and-pom" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0-alpha-1" );
- expectedRecord.setVersion( "1.0-alpha-1" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Test JAR and POM" );
- expectedRecord.setDependencies( createDependencies() );
- expectedRecord.setDevelopers( createDevelopers() );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarAndPomWithClassifier()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-jar-and-pom" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0-alpha-1" );
- expectedRecord.setVersion( "1.0-alpha-1" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Test JAR and POM" );
- expectedRecord.setClassifier( "jdk14" );
- expectedRecord.setDependencies( createDependencies() );
- expectedRecord.setDevelopers( createDevelopers() );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarWithParentPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-child-pom" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0-SNAPSHOT" );
- expectedRecord.setVersion( "1.0-20060728.121314-1" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Child Project" );
- expectedRecord.setProjectDescription( "Description" );
- expectedRecord.setInceptionYear( "2005" );
- expectedRecord.setDependencies( Collections.singletonList( JUNIT_DEPENDENCY ) );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "758e1ae96dff63dab7278a62e3eb174d" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-pom" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setSha1Checksum( "770fde06cd5c3dccb5f5e8c6754b8c4c77b98560" );
- expectedRecord.setType( "pom" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setPackaging( "pom" );
- expectedRecord.setInceptionYear( "2005" );
- expectedRecord.setProjectName( "Maven Repository Manager Test POM" );
- expectedRecord.setProjectDescription( "Description" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testNonIndexedPom()
- throws RepositoryIndexException
- {
- // If we pass in only the POM that belongs to a JAR, then expect null not the POM
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-plugin", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-archetype", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-skin", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
- }
-
- public void testIndexedPlugin()
- throws RepositoryIndexException, IOException, XmlPullParserException
- {
- Artifact artifact = createArtifact( "test-plugin" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3530896791670ebb45e17708e5d52c40" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-plugin" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setSha1Checksum( "2cd2619d59a684e82e97471d2c2e004144c8f24e" );
- expectedRecord.setType( "maven-plugin" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setClasses( Arrays.asList( new String[]{"org.apache.maven.archiva.record.MyMojo"} ) );
- expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF",
- "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.properties",
- "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.xml", "META-INF/maven/plugin.xml",
- "org/apache/maven/archiva/record/MyMojo.class"} ) );
- expectedRecord.setPackaging( "maven-plugin" );
- expectedRecord.setProjectName( "Maven Mojo Archetype" );
- expectedRecord.setPluginPrefix( "test" );
- expectedRecord.setDependencies( Arrays.asList( new String[]{JUNIT_DEPENDENCY, PLUGIN_API_DEPENDENCY} ) );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedArchetype()
- throws RepositoryIndexException, IOException, XmlPullParserException
- {
- Artifact artifact = createArtifact( "test-archetype" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "52b7ea4b53818b8a5f4c329d88fd60d9" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-archetype" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setSha1Checksum( "05841f5e51c124f1729d86c1687438c36b9255d9" );
- expectedRecord.setType( "maven-archetype" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "META-INF/maven/archetype.xml",
- "META-INF/maven/org.apache.maven.archiva.record/test-archetype/pom.properties",
- "META-INF/maven/org.apache.maven.archiva.record/test-archetype/pom.xml", "archetype-resources/pom.xml",
- "archetype-resources/src/main/java/App.java", "archetype-resources/src/test/java/AppTest.java"} ) );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Archetype - test-archetype" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedSkin()
- throws RepositoryIndexException, IOException, XmlPullParserException
- {
- Artifact artifact = createArtifact( "test-skin" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "ba2d8a722f763db2950ad63119585f45" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-skin" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setSha1Checksum( "44855e3e56c18ce766db315a2d4c114d7a8c8ab0" );
- expectedRecord.setType( "maven-skin" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "css/maven-theme.css",
- "META-INF/maven/org.apache.maven.skins/test-skin/pom.xml",
- "META-INF/maven/org.apache.maven.skins/test-skin/pom.properties"} ) );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Skin - test-skin" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testCorruptJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-corrupt-jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- public void testDll()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-dll", "1.0.1.34", "dll" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "d41d8cd98f00b204e9800998ecf8427e" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-dll" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0.1.34" );
- expectedRecord.setVersion( "1.0.1.34" );
- expectedRecord.setSha1Checksum( "da39a3ee5e6b4b0d3255bfef95601890afd80709" );
- expectedRecord.setType( "dll" );
- expectedRecord.setRepository( "test" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testMissingFile()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-foo" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar" );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- return createArtifact( artifactId, version, type, null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type, String classifier )
- {
- Artifact artifact = artifactFactory.createDependencyArtifact( TEST_GROUP_ID, artifactId,
- VersionRange.createFromVersion( version ), type,
- classifier, Artifact.SCOPE_RUNTIME );
- artifact.isSnapshot();
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-
- private static List createDevelopers()
- {
- List developers = new ArrayList();
- developers.add( "brett:Brett Porter:brett@apache.org" );
- return developers;
- }
-
- private static List createDependencies()
- {
- List dependencies = new ArrayList();
- dependencies.add( JUNIT_DEPENDENCY );
- dependencies.add( "org.apache.maven:maven-project:2.0" );
- return dependencies;
- }
-}
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.archiva.record</groupId>
- <artifactId>parent-pom</artifactId>
- <version>1</version>
- <packaging>pom</packaging>
- <name>Test Parent POM</name>
- <description>Description</description>
- <inceptionYear>2005</inceptionYear>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
- <modules>
- <module>test-child-pom</module>
- </modules>
-</project>
-
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.archiva.record</groupId>
- <artifactId>test-archetype</artifactId>
- <version>1.0</version>
- <name>Archetype - test-archetype</name>
-</project>
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.maven.archiva.record</groupId>
- <artifactId>parent-pom</artifactId>
- <version>1</version>
- </parent>
- <artifactId>test-child-pom</artifactId>
- <version>1.0-20060731-121314-1</version>
- <name>Child Project</name>
-</project>
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.archiva.record</groupId>
- <artifactId>test-jar-and-pom</artifactId>
- <version>1.0-alpha-1</version>
- <name>Test JAR and POM</name>
- <dependencies>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-project</artifactId>
- <version>2.0</version>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>brett</id>
- <name>Brett Porter</name>
- <email>brett@apache.org</email>
- <roles>
- <role>Developer</role>
- </roles>
- </developer>
- </developers>
-</project>
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.archiva.record</groupId>
- <artifactId>test-plugin</artifactId>
- <packaging>maven-plugin</packaging>
- <version>1.0</version>
- <name>Maven Mojo Archetype</name>
- <dependencies>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-plugin-api</artifactId>
- <version>2.0</version>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.archiva.record</groupId>
- <artifactId>test-pom</artifactId>
- <version>1.0</version>
- <name>Maven Repository Manager Test POM</name>
- <inceptionYear>2005</inceptionYear>
- <description>Description</description>
- <packaging>pom</packaging>
-</project>
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.archiva.record</groupId>
- <artifactId>test-skin</artifactId>
- <version>1.0</version>
- <name>Skin - test-skin</name>
-</project>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <parent>
- <artifactId>maven</artifactId>
- <groupId>org.apache.maven</groupId>
- <version>2.0.1</version>
- </parent>
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-artifact</artifactId>
- <name>Maven Artifact</name>
- <version>2.0.1</version>
- <licenses>
- <license>
- <name>The Apache Software License, Version 2.0</name>
- <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <dependencies>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-utils</artifactId>
- <version>1.0.5</version>
- </dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-container-default</artifactId>
- <version>1.0-alpha-9</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
- <distributionManagement>
- <status>deployed</status>
- </distributionManagement>
- <reporting>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-checkstyle-plugin</artifactId>
- <version>2.0</version>
- </plugin>
- </plugins>
- </reporting>
-</project>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-artifact</artifactId>
- <version>2.0.1</version>
-</metadata>
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-artifact</artifactId>
- <version>2.0.1</version>
- <versioning>
- <release>2.0.1</release>
- <versions>
- <version>2.0.1</version>
- </versions>
- <lastUpdated>20051212044643</lastUpdated>
- </versioning>
-</metadata>
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <parent>
- <artifactId>maven</artifactId>
- <groupId>org.apache.maven</groupId>
- <version>2.0</version>
- </parent>
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-corrupt-jar</artifactId>
- <name>Maven Model</name>
- <version>2.0</version>
- <description>Maven Model</description>
- <licenses>
- <license>
- <name>The Apache Software License, Version 2.0</name>
- <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.modello</groupId>
- <artifactId>modello-maven-plugin</artifactId>
- <version>2.0</version>
- <executions>
- <execution>
- <goals>
- <goal>xpp3-writer</goal>
- <goal>java</goal>
- <goal>xpp3-reader</goal>
- <goal>xsd</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <version>4.0.0</version>
- <model>maven.mdo</model>
- </configuration>
- </plugin>
- </plugins>
- </build>
- <profiles>
- <profile>
- <id>all-models</id>
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.modello</groupId>
- <artifactId>modello-maven-plugin</artifactId>
- <executions>
- <execution>
- <id>v3</id>
- <goals>
- <goal>xpp3-writer</goal>
- <goal>java</goal>
- <goal>xpp3-reader</goal>
- <goal>xsd</goal>
- </goals>
- <configuration>
- <version>3.0.0</version>
- <packageWithVersion>true</packageWithVersion>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-jar-plugin</artifactId>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>jar</goal>
- </goals>
- <configuration>
- <classifier>all</classifier>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- </profiles>
- <dependencies>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-utils</artifactId>
- <version>1.0.5</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <status>deployed</status>
- </distributionManagement>
-</project>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven</groupId>
- <plugins>
- <plugin>
- <prefix>org.apache.maven</prefix>
- <artifactId>org.apache.maven-maven-plugin</artifactId>
- </plugin>
- </plugins>
-</metadata>
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <parent>
- <artifactId>maven</artifactId>
- <groupId>org.apache.maven</groupId>
- <version>2.0</version>
- </parent>
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-model</artifactId>
- <name>Maven Model</name>
- <version>2.0</version>
- <description>Maven Model</description>
- <licenses>
- <license>
- <name>The Apache Software License, Version 2.0</name>
- <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.modello</groupId>
- <artifactId>modello-maven-plugin</artifactId>
- <version>2.0</version>
- <executions>
- <execution>
- <goals>
- <goal>xpp3-writer</goal>
- <goal>java</goal>
- <goal>xpp3-reader</goal>
- <goal>xsd</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <version>4.0.0</version>
- <model>maven.mdo</model>
- </configuration>
- </plugin>
- </plugins>
- </build>
- <profiles>
- <profile>
- <id>all-models</id>
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.modello</groupId>
- <artifactId>modello-maven-plugin</artifactId>
- <executions>
- <execution>
- <id>v3</id>
- <goals>
- <goal>xpp3-writer</goal>
- <goal>java</goal>
- <goal>xpp3-reader</goal>
- <goal>xsd</goal>
- </goals>
- <configuration>
- <version>3.0.0</version>
- <packageWithVersion>true</packageWithVersion>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-jar-plugin</artifactId>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>jar</goal>
- </goals>
- <configuration>
- <classifier>all</classifier>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- </profiles>
- <dependencies>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-utils</artifactId>
- <version>1.0.5</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <status>deployed</status>
- </distributionManagement>
-</project>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>test.inherited</groupId>
- <version>1.0.15</version>
- <artifactId>test-inherited-parent</artifactId>
- </parent>
- <!-- groupID, version are inherited -->
- <artifactId>test-inherited</artifactId>
- <packaging>pom</packaging>
-</project>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>test</groupId>
- <plugins>
- <plugin>
- <prefix></prefix>
- <artifactId>test-test-plugin</artifactId>
- </plugin>
- </plugins>
-</metadata>
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <modelVersion>4.0.0</modelVersion>
- <groupId>test</groupId>
- <artifactId>test-artifactId</artifactId>
- <version>1.0</version>
-</project>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <parent>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva</artifactId>
- <version>1.0-SNAPSHOT</version>
- </parent>
- <modelVersion>4.0.0</modelVersion>
- <artifactId>archiva-model</artifactId>
- <name>Archiva Model</name>
- <dependencies>
- <dependency>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-utils</artifactId>
- </dependency>
- <dependency>
- <groupId>javax.jdo</groupId>
- <artifactId>jdo2-api</artifactId>
- <version>2.0</version>
- </dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-container-default</artifactId>
- </dependency>
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- </dependency>
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-maven-plugin</artifactId>
- </plugin>
- <plugin>
- <groupId>org.codehaus.modello</groupId>
- <artifactId>modello-maven-plugin</artifactId>
- <version>1.0-alpha-15-SNAPSHOT</version>
- <configuration>
- <version>1.0.0</version>
- <packageWithVersion>false</packageWithVersion>
- <model>src/main/mdo/archiva-base.xml</model>
- </configuration>
- <executions>
- <execution>
- <id>archiva-base</id>
- <goals>
- <goal>java</goal>
- <goal>xsd</goal>
- <goal>jpox-jdo-mapping</goal>
- <goal>jpox-metadata-class</goal>
- <goal>xpp3-writer</goal>
- <goal>xpp3-reader</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>jpox-maven-plugin</artifactId>
- <version>1.1.6</version>
- <dependencies>
- <dependency>
- <groupId>org.apache.derby</groupId>
- <artifactId>derby</artifactId>
- <version>10.2.1.6</version>
- </dependency>
- </dependencies>
- <executions>
- <execution>
- <id>create-ddl</id>
- <phase>generate-test-resources</phase>
- <goals>
- <goal>schema-create</goal>
- </goals>
- <configuration>
- <outputFile>${basedir}/target/classes/org/apache/maven/archiva/model/schema.ddl</outputFile>
- <toolProperties>
- <property>
- <name>javax.jdo.option.ConnectionDriverName</name>
- <value>org.apache.derby.jdbc.EmbeddedDriver</value>
- </property>
- <property>
- <name>javax.jdo.option.ConnectionURL</name>
- <value>jdbc:derby:target/jdo-schema-create;create=true</value>
- </property>
- <property>
- <name>javax.jdo.option.ConnectionUserName</name>
- <value>sa</value>
- </property>
- <property>
- <name>javax.jdo.option.ConnectionPassword</name>
- <value></value>
- </property>
- <property>
- <name>log4j.configuration</name>
- <value>${basedir}/src/test/resources/log4j.xml</value>
- </property>
- <property>
- <name>org.jpox.autoCreateTables</name>
- <value>true</value>
- </property>
- </toolProperties>
- </configuration>
- </execution>
- <execution>
- <id>enhance</id>
- <goals>
- <goal>enhance</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>cobertura-maven-plugin</artifactId>
- <configuration>
- <instrumentation>
- <!-- exclude generated -->
- <excludes>
- <exclude>org/apache/maven/archiva/reporting/model/**</exclude>
- </excludes>
- </instrumentation>
- </configuration>
- </plugin>
- </plugins>
- </build>
-</project>
+++ /dev/null
-package org.apache.maven.archiva.model;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-
-import java.io.Serializable;
-
-/**
- * RepositoryContentKey - the jpox application key support class for all content within the repository.
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class RepositoryContentKey implements Serializable
-{
- /**
- * The Repository ID. (JPOX Requires this remain public)
- */
- public String repositoryId = "";
-
- /**
- * The Group ID. (JPOX Requires this remain public)
- */
- public String groupId = "";
-
- /**
- * The Artifact ID. (JPOX Requires this remain public)
- */
- public String artifactId = "";
-
- /**
- * The Version. (JPOX Requires this remain public)
- */
- public String version = "";
-
- /**
- * Default Constructor. Required by JPOX.
- */
- public RepositoryContentKey()
- {
-
- }
-
- /**
- * Key Based Constructor. Required by JPOX.
- *
- * @param key the String representing this object's values.
- */
- public RepositoryContentKey( String key )
- {
- String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
- repositoryId = parts[0];
- groupId = parts[1];
- artifactId = parts[2];
- version = parts[3];
- }
-
- /**
- * Get the String representation of this object. - Required by JPOX.
- */
- public String toString()
- {
- return StringUtils.join( new String[] { repositoryId, groupId, artifactId, version } );
- }
-
- /**
- * Get the hashcode for this object's values - Required by JPOX.
- */
- public int hashCode()
- {
- final int PRIME = 31;
- int result = 1;
- result = PRIME * result + ( ( repositoryId == null ) ? 0 : repositoryId.hashCode() );
- result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
- result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
- result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
- return result;
- }
-
- /**
- * Get the equals for this object's values - Required by JPOX.
- */
- public boolean equals( Object obj )
- {
- if ( this == obj )
- {
- return true;
- }
-
- if ( obj == null )
- {
- return false;
- }
-
- if ( getClass() != obj.getClass() )
- {
- return false;
- }
-
- final RepositoryContentKey other = (RepositoryContentKey) obj;
-
- if ( repositoryId == null )
- {
- if ( other.repositoryId != null )
- {
- return false;
- }
- }
- else if ( !repositoryId.equals( other.repositoryId ) )
- {
- return false;
- }
-
- if ( groupId == null )
- {
- if ( other.groupId != null )
- {
- return false;
- }
- }
- else if ( !groupId.equals( other.groupId ) )
- {
- return false;
- }
-
- if ( artifactId == null )
- {
- if ( other.artifactId != null )
- {
- return false;
- }
- }
- else if ( !artifactId.equals( other.artifactId ) )
- {
- return false;
- }
-
- if ( version == null )
- {
- if ( other.version != null )
- {
- return false;
- }
- }
- else if ( !version.equals( other.version ) )
- {
- return false;
- }
-
- return true;
- }
-}
+++ /dev/null
-<?xml version="1.0" ?>
-<model jpox.mapping-in-package="true"
- jpox.table-prefix="ARCHIVA_"
- xsd.namespace="http://maven.apache.org/archiva/MODEL-1.0.0"
- xsd.target-namespace="http://maven.apache.org/archiva/MODEL-1.0.0">
- <id>archiva-base-model</id>
- <name>ArchivaBaseModel</name>
- <version>1.0.0</version>
- <description>Archiva Model</description>
- <defaults>
- <default>
- <key>package</key>
- <value>org.apache.maven.archiva.model</value>
- </default>
- </defaults>
-
- <classes>
- <class stash.storable="false"
- rootElement="true"
- jpox.enabled="false">
- <name>ArchivaAll</name>
- <version>1.0.0+</version>
- <description>
- This object is only used for the XML backup / restore features of Archiva.
- This object is not serialized to the Database.
- </description>
- <fields>
- <field>
- <name>repositories</name>
- <version>1.0.0+</version>
- <association>
- <type>ArchivaRepositoryModel</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>artifacts</name>
- <version>1.0.0+</version>
- <association>
- <type>ArchivaArtifactModel</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>repositoryMetadata</name>
- <version>1.0.0+</version>
- <association>
- <type>ArchivaRepositoryMetadata</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>projects</name>
- <version>1.0.0+</version>
- <association>
- <type>ArchivaProjectModel</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>artifactHealth</name>
- <version>1.0.0+</version>
- <association>
- <type>ArchivaArtifactHealth</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>repositoryMetadataHealth</name>
- <version>1.0.0+</version>
- <association>
- <type>ArchivaRepositoryMetadataHealth</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>repositoryContentStatistics</name>
- <version>1.0.0+</version>
- <association>
- <type>RepositoryContentStatistics</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- </fields>
- </class>
-
- <!-- _______________________________________________________________
- ____ _ _
- | _ \ ___ _ __ ___ ___(_) |_ ___ _ __ _ _
- | |_) / _ \ '_ \ / _ \/ __| | __/ _ \| '__| | | |
- | _ < __/ |_) | (_) \__ \ | || (_) | | | |_| |
- |_| \_\___| .__/ \___/|___/_|\__\___/|_| \__, |
- |_| |___/
- -->
-
- <class stash.storable="true"
- jpox.table="REPOSITORIES"
- jpox.not-persisted-fields="modelEncoding">
- <name>ArchivaRepositoryModel</name>
- <version>1.0.0+</version>
- <fields>
- <field jpox.primary-key="true"
- jpox.value-strategy="off">
- <name>id</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>true</required>
- <description>
- The unique ID for the repository.
- </description>
- </field>
- <field>
- <name>name</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>true</required>
- <description>
- The Name of the repository.
- </description>
- </field>
- <field>
- <name>url</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>true</required>
- <description>
- The URL of the repository.
- </description>
- </field>
- <field>
- <name>creationSource</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>true</required>
- <description>
- The Source of this repository.
- (Example: Configuration, POM)
- </description>
- </field>
- <field>
- <name>layoutName</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>true</required>
- <description>
- The layout of the repository.
- (Either 'default', or 'legacy')
- </description>
- <defaultValue>default</defaultValue>
- </field>
- <field>
- <name>releasePolicy</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>boolean</type>
- <required>true</required>
- <description>
- The releases policy of the repository.
- </description>
- <defaultValue>true</defaultValue>
- </field>
- <field>
- <name>snapshotPolicy</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>boolean</type>
- <required>true</required>
- <description>
- The snapshot policy of the repository.
- </description>
- <defaultValue>false</defaultValue>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="REPOSITORY_CONTENT_KEYS"
- jpox.use-identifiers-as-primary-key="true"
- jpox.identity-type="application"
- jpox.identity-class="RepositoryContentKey">
- <name>RepositoryContent</name>
- <version>1.0.0+</version>
- <description>
- Repository Content Identifier.
- </description>
- <fields>
- <field jpox.primary-key="true"
- jpox.value-strategy="off"
- jpox.persistence-modifier="persistent">
- <name>groupId</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>true</required>
- <description>
- The Group ID of the repository content.
- </description>
- </field>
- <field jpox.primary-key="true"
- jpox.value-strategy="off"
- jpox.persistence-modifier="persistent">
- <name>artifactId</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>true</required>
- <description>
- The Artifact ID of the repository content.
- </description>
- </field>
- <field jpox.primary-key="true"
- jpox.value-strategy="off"
- jpox.persistence-modifier="persistent">
- <name>version</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>false</required>
- <description>
- The version of the repository content.
- </description>
- </field>
- <field jpox.primary-key="true"
- jpox.value-strategy="off"
- jpox.persistence-modifier="persistent">
- <name>repositoryId</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <type>String</type>
- <description>
- The repository associated with this content.
- </description>
- </field>
- </fields>
- <codeSegments>
- <codeSegment>
- <version>1.0.0+</version>
- <code>
- <![CDATA[
- /**
- * Default Constructor.
- * NOTE: The RepositoryContent object is invalid, and cannot be persisted to
- * the database until you set the {@link #setGroupId(String)},
- * {@link #setArtifactId(String)}, and {@link #setVersion(String)} values
- * first.
- */
- public RepositoryContent()
- {
- /* Do nothing */
- }
-
- public RepositoryContent( String groupId, String artifactId, String version )
- {
- this.setGroupId( groupId );
- this.setArtifactId( artifactId );
- this.setVersion( version );
- }
-
- public RepositoryContent( String repositoryId, String groupId, String artifactId, String version )
- {
- this.setRepositoryId( repositoryId );
- this.setGroupId( groupId );
- this.setArtifactId( artifactId );
- this.setVersion( version );
- }
-
- public RepositoryContent( ArchivaRepositoryModel repository, String groupId, String artifactId, String version )
- {
- this.setRepositoryId( repository.getId() );
- this.setGroupId( groupId );
- this.setArtifactId( artifactId );
- this.setVersion( version );
- }
- ]]></code>
- </codeSegment>
- </codeSegments>
- </class>
-
- <!-- _______________________________________________________________
- _ _ _ __ _
- / \ _ __| |_(_)/ _| __ _ ___| |_
- / _ \ | '__| __| | |_ / _` |/ __| __|
- / ___ \| | | |_| | _| (_| | (__| |_
- /_/ \_\_| \__|_|_| \__,_|\___|\__|
-
- -->
-
- <class stash.storable="true"
- jpox.table="ARTIFACT">
- <name>ArchivaArtifactModel</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>contentKey</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <association>
- <type>RepositoryContent</type>
- <multiplicity>1</multiplicity>
- </association>
- <description>
- The content key for this artifact.
- </description>
- </field>
- <field>
- <name>classifier</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>false</required>
- <description>
- The classifier for this artifact.
- </description>
- </field>
- <field jpox.column="FILE_TYPE">
- <name>type</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>true</required>
- <description>
- The type of artifact.
- </description>
- </field>
- <field>
- <name>checksumMD5</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>false</required>
- <description>
- The MD5 checksum for the artifact file.
- </description>
- </field>
- <field>
- <name>checksumSHA1</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>false</required>
- <description>
- The SHA1 checksum for the artifact file.
- </description>
- </field>
- <field>
- <name>checksumBytecode</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>false</required>
- <description>
- The SHA1 checksum for the bytecode in the artifact file. (Can be empty if
- the artifact contains no bytecode)
- </description>
- </field>
- <field>
- <name>whenIndexed</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>Date</type>
- <required>false</required>
- <description>
- The timestamp when this artifact was indexed.
- </description>
- </field>
- <field>
- <name>origin</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>false</required>
- <description>
- The origin of this artifact. (Filesystem, Proxy, Deploy)
- </description>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="REPOSITORY_METADATA">
- <name>ArchivaRepositoryMetadata</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>contentKey</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <association>
- <type>RepositoryContent</type>
- <multiplicity>1</multiplicity>
- </association>
- <description>
- The content key for this repository metadata.
- </description>
- </field>
- <field>
- <name>releasedVersion</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>false</required>
- <type>String</type>
- <description>
- The released version id.
- </description>
- </field>
- <field>
- <name>availableVersions</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>false</required>
- <association>
- <type>String</type>
- <multiplicity>*</multiplicity>
- </association>
- <description>
- The list of available version ids.
- </description>
- </field>
- <field>
- <name>whenIndexed</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>Date</type>
- <required>false</required>
- <description>
- The timestamp when this artifact was indexed.
- </description>
- </field>
- <field>
- <name>lastUpdated</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>Date</type>
- <required>false</required>
- <description>
- the timestamp when this artifact was indexed.
- </description>
- </field>
- <field>
- <name>origin</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <type>String</type>
- <required>false</required>
- <description>
- The origin of this artifact. (Filesystem, Proxy, Deploy)
- </description>
- </field>
- </fields>
- </class>
-
- <!-- _______________________________________________________________
- __ __ ____ _ _
- | \/ | __ ___ _____ _ __ | _ \ _ __ ___ (_) ___ ___| |_
- | |\/| |/ _` \ \ / / _ \ '_ \ | |_) | '__/ _ \| |/ _ \/ __| __|
- | | | | (_| |\ V / __/ | | | | __/| | | (_) | | __/ (__| |_
- |_| |_|\__,_| \_/ \___|_| |_| |_| |_| \___// |\___|\___|\__|
- |__/
- -->
-
- <class stash.storable="true"
- jpox.table="PROJECT">
- <name>ArchivaProjectModel</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>contentKey</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <association>
- <type>RepositoryContent</type>
- <multiplicity>1</multiplicity>
- </association>
- <description>
- The content key for this artifact.
- </description>
- </field>
- <field>
- <name>parentContentKey</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>false</required>
- <association>
- <type>RepositoryContent</type>
- <multiplicity>1</multiplicity>
- </association>
- <description>
- The content key for a parent reference.
- </description>
- </field>
- <field>
- <name>packaging</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <type>String</type>
- <description>
- The declared packaging for this project model.
- </description>
- </field>
- <field>
- <name>origin</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <type>String</type>
- <description>
- The Origin of this Model. (Filesystem, Proxy, or Deploy)
- </description>
- </field>
- <field>
- <name>whenIndexed</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <type>Date</type>
- <description>
- The timestamp when this model was indexed.
- </description>
- </field>
- <field>
- <name>url</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>false</required>
- <type>String</type>
- <description>
- The URL for the project's homepage.
- </description>
- </field>
- <field>
- <name>organization</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>false</required>
- <association>
- <type>Organization</type>
- </association>
- </field>
- <field>
- <name>licenses</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>false</required>
- <association>
- <type>License</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>issueManagement</name>
- <version>1.0.0+</version>
- <description><![CDATA[The project's issue management system information.]]></description>
- <association>
- <type>IssueManagement</type>
- </association>
- </field>
- <field>
- <name>ciManagement</name>
- <version>1.0.0+</version>
- <description><![CDATA[The project's continuous integration information.]]></description>
- <association>
- <type>CiManagement</type>
- </association>
- </field>
- <field>
- <name>scm</name>
- <version>1.0.0+</version>
- <description>
- <![CDATA[Specification for the SCM used by the project, such as CVS, Subversion, etc.]]></description>
- <association>
- <type>Scm</type>
- </association>
- </field>
- <field>
- <name>individuals</name>
- <version>1.0.0+</version>
- <description>
- The list of individuals around this project.
- </description>
- <association>
- <type>Individual</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>dependencies</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- This element describes all of the dependencies associated with a
- project.
- ]]></description>
- <association>
- <type>Dependency</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>repositories</name>
- <version>1.0.0+</version>
- <description>
- The list project repositories in use by this project.
- </description>
- <association>
- <type>ProjectRepository</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>plugins</name>
- <version>1.0.0+</version>
- <description>
- The list of plugins that this project uses.
- </description>
- <association>
- <type>RepositoryContent</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>reports</name>
- <version>1.0.0+</version>
- <description>
- The list of reports that this project uses.
- </description>
- <association>
- <type>RepositoryContent</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="ORGANIZATION">
- <name>Organization</name>
- <description>Specifies the organization that produces this project.</description>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>name</name>
- <version>1.0.0+</version>
- <description><![CDATA[The full name of the organization.]]></description>
- <type>String</type>
- </field>
- <field>
- <name>url</name>
- <version>1.0.0+</version>
- <description><![CDATA[The URL to the organization's home page.]]></description>
- <type>String</type>
- </field>
- <field jpox.column="FAVICON_URL">
- <name>favicon</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- The URL to the organization's logo image. This can be an URL relative
- to the base directory of the generated web site,
- (e.g., <code>/images/org-logo.png</code>) or an absolute URL
- (e.g., <code>http://my.corp/logo.png</code>). This value is used
- when generating the project documentation.
- ]]></description>
- <type>String</type>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="LICENSES">
- <name>License</name>
- <description><![CDATA[
- Describes the licenses for this project. This is used to generate
- the license page of the project's web site, as well as being taken into consideration in other reporting and
- validation. The licenses listed for the project are that of the project itself, and not of dependencies.
- ]]></description>
- <version>1.0.0+</version>
- <fields>
- <field jpox.column="LICENSE_ID">
- <name>id</name>
- <version>1.0.0+</version>
- <type>int</type>
- <description>
- The type of license.
- </description>
- </field>
- <field>
- <name>name</name>
- <version>1.0.0+</version>
- <description><![CDATA[The full legal name of the license.]]></description>
- <type>String</type>
- </field>
- <field>
- <name>url</name>
- <version>1.0.0+</version>
- <description><![CDATA[The official url for the license text.]]></description>
- <type>String</type>
- </field>
- <field>
- <name>comments</name>
- <description>
- Addendum information pertaining to this license.
- </description>
- <version>1.0.0+</version>
- <type>String</type>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="ISSUE_MANAGEMENT">
- <name>IssueManagement</name>
- <description>
- Information about the issue tracking (or bug tracking) system used to manage this project.
- </description>
- <version>1.0.0+</version>
- <fields>
- <field jpox.column="SYSTEM_NAME">
- <name>system</name>
- <version>1.0.0+</version>
- <description><![CDATA[The name of the issue management system, e.g. Bugzilla]]></description>
- <type>String</type>
- </field>
- <field>
- <name>url</name>
- <version>1.0.0+</version>
- <description><![CDATA[URL for the issue management system used by the project.]]></description>
- <type>String</type>
- </field>
- </fields>
- </class>
- <class>
- <name>CiManagement</name>
- <version>1.0.0+</version>
- <fields>
- <field jpox.column="SYSTEM_NAME">
- <name>system</name>
- <version>1.0.0+</version>
- <description>
- <![CDATA[The name of the continuous integration system, e.g. <code>continuum</code>.]]></description>
- <type>String</type>
- </field>
- <field>
- <name>url</name>
- <version>1.0.0+</version>
- <description>
- <![CDATA[URL for the continuous integration system used by the project if it has a web interface.]]></description>
- <type>String</type>
- </field>
- </fields>
- </class>
- <class>
- <name>Individual</name>
- <description>
- Description of a person who has contributed to the project.
- This includes contributors and commitors.
- </description>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>name</name>
- <version>1.0.0+</version>
- <description><![CDATA[The full name of the individual.]]></description>
- <type>String</type>
- </field>
- <field>
- <name>email</name>
- <version>1.0.0+</version>
- <description><![CDATA[The email address of the individual.]]></description>
- <type>String</type>
- </field>
- <field>
- <name>principal</name>
- <version>1.0.0+</version>
- <description>
- The RedBack (plexus security) principal associated with this Invididual.
- </description>
- <type>String</type>
- </field>
- <field>
- <name>commitor</name>
- <version>1.0.0+</version>
- <description>
- The flag if this user is a developer and/or commitor.
- </description>
- <type>boolean</type>
- </field>
- <field>
- <name>url</name>
- <version>1.0.0+</version>
- <description><![CDATA[The URL for the homepage of the individual.]]></description>
- <type>String</type>
- </field>
- <!-- TODO: should this just be a single Organization element -->
- <field>
- <name>organization</name>
- <alias>organisation</alias>
- <version>1.0.0+</version>
- <description><![CDATA[The organization to which the individual belongs.]]></description>
- <type>String</type>
- </field>
- <field>
- <name>organizationUrl</name>
- <alias>organisationUrl</alias>
- <version>1.0.0+</version>
- <description><![CDATA[The URL of the organization.]]></description>
- <type>String</type>
- </field>
- <field jpox.column="CONTRIBUTOR_ROLES">
- <name>roles</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- The roles the individual plays in the project. Each role is
- described by a <code>role</code> element, the body of which is a
- role name. This can also be used to describe the contribution.
- ]]></description>
- <association>
- <type>String</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>timezone</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- The timezone the individual is in. This is a number in the range -11 to 12.
- ]]></description>
- <type>String</type>
- </field>
- <field>
- <name>properties</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- Properties about the individual, such as an instant messenger handle.
- ]]></description>
- <type>Properties</type>
- <association xml.mapStyle="inline">
- <type>String</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="DEPENDENCY">
- <name>Dependency</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>contentKey</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <association>
- <type>RepositoryContent</type>
- <multiplicity>1</multiplicity>
- </association>
- <description>
- The content key for this dependency.
- </description>
- </field>
- <field>
- <name>url</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- This url will be provided to the user if the jar file cannot be downloaded
- from the central repository.
- ]]></description>
- <type>String</type>
- </field>
- <field jpox.column="DEPENDENCY_TYPE">
- <name>type</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- The type of dependency. This defaults to <code>jar</code>. While it usually represents the extension on
- the filename of the dependency, that is not always the case. A type can be mapped to a different
- extension and a classifier.
- The type often correspongs to the packaging used, though this is also not always the case.
- Some examples are <code>jar</code>, <code>war</code>, <code>ejb-client</code> and <code>test-jar</code>.
- New types can be defined by plugins that set
- <code>extensions</code> to <code>true</code>, so this is not a complete list.
- ]]></description>
- <type>String</type>
- <defaultValue>jar</defaultValue>
- </field>
- <field>
- <name>classifier</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- The classifier of the dependency. This allows distinguishing two artifacts that belong to the same POM but
- were built differently, and is appended to the filename after the version. For example,
- <code>jdk14</code> and <code>jdk15</code>.
- ]]></description>
- <type>String</type>
- <required>false</required>
- </field>
- <field jpox.column="DEPENDENCY_SCOPE">
- <name>scope</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- The scope of the dependency - <code>compile</code>, <code>runtime</code>, <code>test</code>,
- <code>system</code>, and <code>provided</code>. Used to
- calculate the various classpaths used for compilation, testing, and so on. It also assists in determining
- which artifacts to include in a distribution of this project. For more information, see
- <a href="http://maven.apache.org/guides/introduction/introduction-to-dependency-mechanism.html">the
- dependency mechanism</a>.]]>
- </description>
- <type>String</type>
- </field>
- <field>
- <name>systemPath</name>
- <version>1.0.0+</version>
- <description><![CDATA[
- FOR SYSTEM SCOPE ONLY. Note that use of this property is <b>discouraged</b> and may be replaced in later
- versions. This specifies the path on the filesystem for this dependency.
- Requires an absolute path for the value, not relative.
- Use a property that gives the machine specific absolute path,
- e.g. <code>${java.home}</code>.
- ]]></description>
- <type>String</type>
- </field>
- <field>
- <name>exclusions</name>
- <version>1.0.0+</version>
- <description>
- Lists a set of artifacts that should be excluded from this dependency's artifact list when it comes to
- calculating transitive dependencies.
- </description>
- <association>
- <type>Exclusion</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>optional</name>
- <version>1.0.0+</version>
- <description>
- Indicates the dependency is optional for use of this library. While the version of the dependency will be
- taken into account for dependency calculation if the library is used elsewhere, it will not be passed on
- transitively.
- </description>
- <type>boolean</type>
- <defaultValue>false</defaultValue>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="EXCLUSIONS">
- <name>Exclusion</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>groupId</name>
- <version>4.0.0</version>
- <description><![CDATA[The group ID of the project to exclude.]]></description>
- <type>String</type>
- <required>true</required>
- </field>
- <field>
- <name>artifactId</name>
- <version>4.0.0</version>
- <description><![CDATA[The artifact ID of the project to exclude.]]></description>
- <type>String</type>
- <required>true</required>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="SCM">
- <name>Scm</name>
- <version>1.0.0+</version>
- <fields>
- <field jpox.column="SCM_URL">
- <name>connection</name>
- <version>1.0.0</version>
- <description><![CDATA[
- The source control management system URL
- that describes the repository and how to connect to the
- repository. For more information, see the
- <a href="http://maven.apache.org/scm/scm-url-format.html">URL format</a>
- and <a href="http://maven.apache.org/scm/scms-overview.html">list of supported SCMs</a>.
- This connection is read-only.
- ]]></description>
- <type>String</type>
- </field>
- <field>
- <name>developerConnection</name>
- <version>1.0.0</version>
- <description><![CDATA[
- Just like <code>connection</code>, but for developers, i.e. this scm connection
- will not be read only.
- ]]></description>
- <type>String</type>
- </field>
- <field>
- <name>url</name>
- <version>1.0.0</version>
- <description>
- <![CDATA[The URL to the project's browsable SCM repository, such as ViewVC or Fisheye.]]></description>
- <type>String</type>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="PROJECT_REPOSITORIES">
- <name>ProjectRepository</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>id</name>
- <version>4.0.0</version>
- <description><![CDATA[
- A unique identifier for a repository. This is used to match the repository to configuration in
- the <code>settings.xml</code> file, for example.
- ]]></description>
- <type>String</type>
- </field>
- <field>
- <name>name</name>
- <version>4.0.0</version>
- <description><![CDATA[
- Human readable name of the repository.
- ]]></description>
- <type>String</type>
- </field>
- <field>
- <name>url</name>
- <version>4.0.0</version>
- <description><![CDATA[
- The url of the repository, in the form <code>protocol://hostname/path</code>.
- ]]></description>
- <type>String</type>
- </field>
- <field>
- <name>layout</name>
- <version>4.0.0</version>
- <description><![CDATA[
- The type of layout this repository uses for locating and storing artifacts - can be <code>legacy</code> or
- <code>default</code>.
- ]]></description>
- <type>String</type>
- <defaultValue>default</defaultValue>
- </field>
- <field>
- <name>plugins</name>
- <version>1.0.0+</version>
- <description>
- Flag indicating if this repository is for plugin resolution.
- </description>
- <type>boolean</type>
- </field>
- <field>
- <name>releases</name>
- <version>1.0.0+</version>
- <description>
- Flag indicating if this repository has release versioned artifacts.
- </description>
- <type>boolean</type>
- </field>
- <field>
- <name>snapshots</name>
- <version>1.0.0+</version>
- <description>
- Flag indicating if this repository has snapshot versioned artifacts.
- </description>
- <type>boolean</type>
- </field>
- </fields>
- </class>
-
- <!-- _______________________________________________________________
- _ _ _ _ _
- | | | | ___ __ _| | |_| |__
- | |_| |/ _ \/ _` | | __| '_ \
- | _ | __/ (_| | | |_| | | |
- |_| |_|\___|\__,_|_|\__|_| |_|
- -->
-
- <class stash.storable="true"
- jpox.table="HEALTH_PROBLEMS">
- <name>HealthProblem</name>
- <version>1.0.0+</version>
- <fields>
- <field jpox.column="PROBLEM_TYPE">
- <name>type</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <type>String</type>
- <description>
- The type of health problem.
- </description>
- </field>
- <field jpox.column="PROBLEM_ORIGIN">
- <name>origin</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <type>String</type>
- <description>
- The origin of the health problem.
- </description>
- </field>
- <field>
- <name>message</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <type>String</type>
- <description>
- The origin of the health problem.
- </description>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="HEALTH_ARTIFACTS">
- <name>ArchivaArtifactHealth</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>artifact</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <association>
- <type>ArchivaArtifactModel</type>
- <multiplicity>1</multiplicity>
- </association>
- <description>
- The Artifact to report on.
- </description>
- </field>
- <field>
- <name>problems</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>false</required>
- <association>
- <type>HealthProblem</type>
- <multiplicity>*</multiplicity>
- </association>
- <description>
- The list of problems associated with this artifact.
- </description>
- </field>
- </fields>
- </class>
- <class stash.storable="true"
- jpox.table="HEALTH_REPOSITORY_METADATAS">
- <name>ArchivaRepositoryMetadataHealth</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>repositoryMetadata</name>
- <identity>true</identity>
- <version>1.0.0+</version>
- <required>true</required>
- <association>
- <type>ArchivaRepositoryMetadata</type>
- <multiplicity>1</multiplicity>
- </association>
- <description>
- The ArchivaRepositoryMetadata to report on.
- </description>
- </field>
- <field>
- <name>problems</name>
- <identity>false</identity>
- <version>1.0.0+</version>
- <required>false</required>
- <association>
- <type>HealthProblem</type>
- <multiplicity>*</multiplicity>
- </association>
- <description>
- The list of problems associated with this repository metadata.
- </description>
- </field>
- </fields>
- </class>
-
- <!-- _______________________________________________________________
- ____ _ _ _ _ _
- / ___|| |_ __ _| |_(_)___| |_(_) ___ ___
- \___ \| __/ _` | __| / __| __| |/ __/ __|
- ___) | || (_| | |_| \__ \ |_| | (__\__ \
- |____/ \__\__,_|\__|_|___/\__|_|\___|___/
- -->
-
- <class stash.storable="true"
- jpox.table="REPOSITORY_STATS">
- <name>RepositoryContentStatistics</name>
- <version>1.0.0+</version>
- <fields>
- <field>
- <name>repositoryId</name>
- <version>1.0.0+</version>
- <identity>false</identity>
- <required>true</required>
- <type>String</type>
- <description>
- The repository id the statistics belong to.
- </description>
- </field>
- <field>
- <name>whenGathered</name>
- <version>1.0.0+</version>
- <identity>false</identity>
- <required>true</required>
- <type>Date</type>
- <description>
- The timestamp on when this set of statistics was gathered.
- </description>
- </field>
- <field>
- <name>duration</name>
- <version>1.0.0+</version>
- <identity>false</identity>
- <required>true</required>
- <type>long</type>
- <description>
- The duration (in milliseconds) for the gathering of the statistics.
- </description>
- </field>
- <field>
- <name>totalFileCount</name>
- <version>1.0.0+</version>
- <identity>false</identity>
- <required>true</required>
- <type>long</type>
- <description>
- The total number of files in the repository.
- </description>
- </field>
- <field>
- <name>newFileCount</name>
- <version>1.0.0+</version>
- <identity>false</identity>
- <required>true</required>
- <type>long</type>
- <description>
- The number of new files discovered.
- </description>
- </field>
- </fields>
- <codeSegments>
- <codeSegment>
- <version>1.0.0+</version>
- <code>
- private transient long startTimestamp;
-
- public void triggerStart() {
- startTimestamp = System.currentTimeMillis();
- }
-
- public void triggerFinished() {
- long finished = System.currentTimeMillis();
- setDuration( finished - startTimestamp );
- setWhenGathered( new java.util.Date( finished ) );
- }
-
- public void increaseFileCount() {
- this.totalFileCount++;
- }
-
- public void increaseNewFileCount() {
- this.newFileCount++;
- }
- </code>
- </codeSegment>
- </codeSegments>
- </class>
- </classes>
-</model>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8" ?>
-<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
-
-<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
-
- <appender name="console" class="org.apache.log4j.ConsoleAppender">
- <param name="Target" value="System.out"/>
- <layout class="org.apache.log4j.PatternLayout">
- <param name="ConversionPattern" value="%d [%t] %-5p %-30c{1} - %m%n"/>
- </layout>
- </appender>
-
- <!-- Help identify bugs during testing -->
- <logger name="org.apache.maven">
- <level value="info"/>
- </logger>
-
- <logger name="org.codehaus.plexus.security">
- <level value="info"/>
- </logger>
-
- <!-- squelch noisy objects (for now) -->
- <logger name="org.codehaus.plexus.mailsender.MailSender">
- <level value="info"/>
- </logger>
-
- <logger name="org.quartz">
- <level value="info"/>
- </logger>
-
- <logger name="org.apache.jasper">
- <level value="info"/>
- </logger>
-
- <logger name="com.opensymphony.xwork">
- <level value="info"/>
- </logger>
-
- <logger name="com.opensymphony.webwork">
- <level value="info"/>
- </logger>
-
- <logger name="org.codehaus.plexus.PlexusContainer">
- <level value="info"/>
- </logger>
-
- <logger name="JPOX">
- <level value="warn"/>
- </logger>
-
- <logger name="JPOX.MetaData">
- <level value="error"/>
- </logger>
-
- <logger name="JPOX.RDBMS.SQL">
- <level value="error"/>
- </logger>
-
- <logger name="SQL">
- <level value="error"/>
- </logger>
-
- <logger name="freemarker">
- <level value="warn"/>
- </logger>
-
- <logger name="org.codehaus.plexus.component.manager.ClassicSingletonComponentManager">
- <level value="error"/>
- </logger>
-
- <root>
- <priority value ="info" />
- <appender-ref ref="console" />
- </root>
-
-</log4j:configuration>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <parent>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva</artifactId>
- <version>1.0-SNAPSHOT</version>
- </parent>
- <modelVersion>4.0.0</modelVersion>
- <artifactId>archiva-proxy</artifactId>
- <name>Archiva Proxy</name>
- <dependencies>
- <dependency>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-file</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-provider-api</artifactId>
- </dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-digest</artifactId>
- </dependency>
- <dependency>
- <groupId>easymock</groupId>
- <artifactId>easymock</artifactId>
- <version>1.2_Java1.3</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-package org.apache.maven.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.common.artifact.builder.BuilderException;
-import org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
-import org.apache.maven.model.DistributionManagement;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.Relocation;
-import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.apache.maven.wagon.ConnectionException;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authentication.AuthenticationException;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.apache.maven.wagon.observers.ChecksumObserver;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.apache.maven.wagon.repository.Repository;
-import org.codehaus.plexus.digest.DigestUtils;
-import org.codehaus.plexus.digest.DigesterException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.Reader;
-import java.security.NoSuchAlgorithmException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.TimeZone;
-
-/**
- * An implementation of the proxy handler. This class is not thread safe (the class itself is, but the wagons it uses
- * are not) - it is declared <code>per-lookup</code> for that reason.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component instantiation-strategy="per-lookup"
- * @todo use wagonManager for cache use file:// as URL
- * @todo this currently duplicates a lot of the wagon manager, and doesn't do things like snapshot resolution, etc.
- * The checksum handling is inconsistent with that of the wagon manager.
- * Should we have a more artifact based one? This will merge metadata so should behave correctly, and it is able to
- * correct some limitations of the wagon manager (eg, it can retrieve newer SNAPSHOT files without metadata)
- */
-public class DefaultProxyRequestHandler
- extends AbstractLogEnabled
- implements ProxyRequestHandler
-{
- /**
- * @plexus.requirement
- */
- private ArtifactFactory factory;
-
- /**
- * @plexus.requirement role-hint="default"
- * @todo use a map, and have priorities in them.
- */
- private LayoutArtifactBuilder defaultArtifactBuilder;
-
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private LayoutArtifactBuilder legacyArtifactBuilder;
-
- /**
- * @plexus.requirement role="org.apache.maven.wagon.Wagon"
- */
- private Map/*<String,Wagon>*/ wagons;
-
- private static final TimeZone UTC_TIMEZONE = TimeZone.getTimeZone( "UTC" );
-
- public File get( String path, List proxiedRepositories, ArtifactRepository managedRepository )
- throws ProxyException, ResourceDoesNotExistException
- {
- return get( path, proxiedRepositories, managedRepository, null );
- }
-
- public File get( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
- throws ProxyException, ResourceDoesNotExistException
- {
- return get( managedRepository, path, proxiedRepositories, wagonProxy, false );
- }
-
- public File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository )
- throws ProxyException, ResourceDoesNotExistException
- {
- return getAlways( path, proxiedRepositories, managedRepository, null );
- }
-
- public File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository,
- ProxyInfo wagonProxy )
- throws ResourceDoesNotExistException, ProxyException
- {
- return get( managedRepository, path, proxiedRepositories, wagonProxy, true );
- }
-
- private File get( ArtifactRepository managedRepository, String path, List proxiedRepositories, ProxyInfo wagonProxy,
- boolean force )
- throws ProxyException, ResourceDoesNotExistException
- {
- File target = new File( managedRepository.getBasedir(), path );
-
- if ( path.endsWith( "maven-metadata.xml" ) )
- {
- // Request for managed repository metadatas
- getMetadata( path, target, proxiedRepositories, managedRepository, wagonProxy, force );
- }
- else
- {
- boolean checksum = false;
- String checksumExtension = null;
- String artifactPath = path;
- if ( path.endsWith( ".md5" ) || path.endsWith( ".sha1" ) )
- {
- int index = path.lastIndexOf( '.' );
- checksumExtension = path.substring( index + 1 );
- checksum = true;
- artifactPath = path.substring( 0, index );
- }
-
- String msg = "";
-
- // Request for artifact: parse the requested path to build an Artifact.
- Artifact artifact = null;
- try
- {
- artifact = defaultArtifactBuilder.build( artifactPath );
- getLogger().debug( "Artifact requested is: " + artifact );
- }
- catch ( BuilderException e )
- {
- msg = "Failed to build artifact from path:\n\tfrom default: " + e.getMessage();
- }
-
- if ( artifact == null )
- {
- try
- {
- artifact = legacyArtifactBuilder.build( artifactPath );
- getLogger().debug( "Artifact requested is: " + artifact );
- }
- catch ( BuilderException e )
- {
- getLogger().debug( msg + "\n\tfrom legacy: " + e.getMessage() );
- }
- }
-
- if ( artifact != null )
- {
- applyRelocation( managedRepository, artifact, proxiedRepositories, wagonProxy, force );
-
- if ( !checksum )
- {
- // Build the target file name
- target = new File( managedRepository.getBasedir(), managedRepository.pathOf( artifact ) );
-
- // Get the requested artifact from proxiedRepositories
- getArtifactFromRepository( managedRepository, target, artifact, proxiedRepositories, wagonProxy,
- force );
- }
- else
- {
- // Just adjust the filename for relocation, don't actualy get it
- target = new File( managedRepository.getBasedir(),
- managedRepository.pathOf( artifact ) + "." + checksumExtension );
- }
- }
- else if ( !checksum )
- {
- // Some other unknown file in the repository, proxy as is, unless it was a checksum
- if ( force || !target.exists() )
- {
- getFileFromRepository( managedRepository, target, path, proxiedRepositories, wagonProxy, force );
- }
- }
- }
-
- if ( !target.exists() )
- {
- throw new ResourceDoesNotExistException( "Could not find " + path + " in any of the repositories." );
- }
-
- return target;
- }
-
- private void getFileFromRepository( ArtifactRepository managedRepository, File target, String path,
- List proxiedRepositories, ProxyInfo wagonProxy, boolean force )
- throws ProxyException, ResourceDoesNotExistException
- {
- for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
- {
- ProxiedArtifactRepository repository = (ProxiedArtifactRepository) i.next();
-
- if ( !force && repository.isCachedFailure( path ) )
- {
- processCachedRepositoryFailure( repository, "Cached failure found for: " + path );
- }
- else
- {
- ArtifactRepositoryPolicy policy = repository.getRepository().getReleases();
- getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, target, policy,
- force );
- }
- }
- }
-
- private void getArtifactFromRepository( ArtifactRepository managedRepository, File target, Artifact artifact,
- List proxiedRepositories, ProxyInfo wagonProxy, boolean force )
- throws ProxyException, ResourceDoesNotExistException
- {
- for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
- {
- ProxiedArtifactRepository repository = (ProxiedArtifactRepository) i.next();
- String path = repository.getRepository().getLayout().pathOf( artifact );
-
- if ( !force && repository.isCachedFailure( path ) )
- {
- processCachedRepositoryFailure( repository, "Cached failure found for: " + path );
- }
- else
- {
- get( artifact, target, repository, managedRepository, wagonProxy, force );
- }
- }
- }
-
- private void applyRelocation( ArtifactRepository managedRepository, Artifact artifact, List proxiedRepositories,
- ProxyInfo wagonProxy, boolean force )
- {
- Artifact pomArtifact =
- factory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() );
-
- File pomFile = new File( managedRepository.getBasedir(), managedRepository.pathOf( pomArtifact ) );
- try
- {
- getArtifactFromRepository( managedRepository, pomFile, pomArtifact, proxiedRepositories, wagonProxy,
- force );
- }
- catch ( ProxyException e )
- {
- getLogger().warn( "Error getting POM for artifact - not relocating: " + e.getMessage() );
- getLogger().debug( "Cause", e );
- }
- catch ( ResourceDoesNotExistException e )
- {
- getLogger().debug( "Remote POM not found for artifact - not relocating" );
- }
-
- if ( pomFile.exists() )
- {
- Model model = null;
- try
- {
- // Parse the pom and look at relocation metadata
- Reader reader = new FileReader( pomFile );
- model = new MavenXpp3Reader().read( reader );
- }
- catch ( IOException e )
- {
- getLogger().warn( "Error reading POM for artifact - not relocating: " + e.getMessage() );
- getLogger().debug( "Cause", e );
- }
- catch ( XmlPullParserException e )
- {
- getLogger().warn( "Error parsing POM for artifact - not relocating: " + e.getMessage() );
- getLogger().debug( "Cause", e );
- }
-
- if ( model != null )
- {
- DistributionManagement dist;
- dist = model.getDistributionManagement();
-
- if ( dist != null )
- {
- Relocation relocation = dist.getRelocation();
- if ( relocation != null )
- {
- String requestedId =
- artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion();
-
- // artifact is relocated : update the artifact
- if ( relocation.getGroupId() != null )
- {
- artifact.setGroupId( relocation.getGroupId() );
- }
- if ( relocation.getArtifactId() != null )
- {
- artifact.setArtifactId( relocation.getArtifactId() );
- }
- if ( relocation.getVersion() != null )
- {
- artifact.setVersion( relocation.getVersion() );
- }
-
- String relocatedId =
- artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion();
-
- getLogger().debug( "Artifact " + requestedId + " has been relocated to " + relocatedId +
- ( relocation.getMessage() != null ? ": " + relocation.getMessage() : "" ) );
-
- applyRelocation( managedRepository, artifact, proxiedRepositories, wagonProxy, force );
- }
- }
- }
- }
- }
-
- private void getMetadata( String path, File target, List proxiedRepositories, ArtifactRepository managedRepository,
- ProxyInfo wagonProxy, boolean force )
- throws ProxyException
- {
- for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
- {
- ProxiedArtifactRepository repository = (ProxiedArtifactRepository) i.next();
- File metadataFile = new File( target.getParentFile(), ".metadata-" + repository.getRepository().getId() );
-
- ArtifactRepositoryPolicy policy = repository.getRepository().getReleases();
-
- // if it is snapshot metadata, use a different policy
- if ( path.endsWith( "-SNAPSHOT/maven-metadata.xml" ) )
- {
- policy = repository.getRepository().getSnapshots();
- }
-
- if ( force || !metadataFile.exists() || isOutOfDate( policy, metadataFile ) )
- {
- getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, metadataFile,
- policy, force );
-
- mergeMetadataFiles( target, metadataFile );
- }
- }
- }
-
- private void get( Artifact artifact, File target, ProxiedArtifactRepository repository,
- ArtifactRepository managedRepository, ProxyInfo wagonProxy, boolean force )
- throws ProxyException
- {
- ArtifactRepository artifactRepository = repository.getRepository();
-
- // we use the release policy for tracking failures, but only check for updates on snapshots
- // also, we don't look for updates on timestamp snapshot files, only non-unique-version ones
- ArtifactRepositoryPolicy policy =
- artifact.isSnapshot() ? artifactRepository.getSnapshots() : artifactRepository.getReleases();
-
- boolean needsUpdate = false;
- if ( artifact.getVersion().endsWith( "-SNAPSHOT" ) && isOutOfDate( policy, target ) )
- {
- needsUpdate = true;
- }
-
- if ( needsUpdate || force || !target.exists() )
- {
- getFileFromRepository( artifactRepository.pathOf( artifact ), repository, managedRepository.getBasedir(),
- wagonProxy, target, policy, force );
- }
- }
-
- private void mergeMetadataFiles( File target, File metadataFile )
- throws ProxyException
- {
- MetadataXpp3Reader reader = new MetadataXpp3Reader();
- if ( metadataFile.exists() )
- {
- Metadata metadata = null;
- if ( target.exists() )
- {
- FileReader fileReader = null;
- try
- {
- fileReader = new FileReader( target );
- metadata = reader.read( fileReader );
- }
- catch ( XmlPullParserException e )
- {
- throw new ProxyException( "Unable to parse existing metadata: " + e.getMessage(), e );
- }
- catch ( IOException e )
- {
- throw new ProxyException( "Unable to read existing metadata: " + e.getMessage(), e );
- }
- finally
- {
- IOUtils.closeQuietly( fileReader );
- }
- }
-
- FileReader fileReader = null;
- boolean changed = false;
- try
- {
- fileReader = new FileReader( metadataFile );
- Metadata newMetadata = reader.read( fileReader );
-
- if ( metadata != null )
- {
- setLastUpdatedIfEmpty( newMetadata, metadataFile );
- setLastUpdatedIfEmpty( metadata, target );
-
- changed = metadata.merge( newMetadata );
- }
- else
- {
- metadata = newMetadata;
- changed = true;
- }
- }
- catch ( IOException e )
- {
- // ignore the merged file
- getLogger().warn( "Unable to read new metadata: " + e.getMessage() );
- }
- catch ( XmlPullParserException e )
- {
- // ignore the merged file
- getLogger().warn( "Unable to parse new metadata: " + e.getMessage() );
- }
- finally
- {
- IOUtils.closeQuietly( fileReader );
- }
-
- if ( changed )
- {
- FileWriter fileWriter = null;
- try
- {
- fileWriter = new FileWriter( target );
- new MetadataXpp3Writer().write( fileWriter, metadata );
- }
- catch ( IOException e )
- {
- getLogger().warn( "Unable to store new metadata: " + e.getMessage() );
- }
- finally
- {
- IOUtils.closeQuietly( fileWriter );
- }
- }
- }
- }
-
- private void setLastUpdatedIfEmpty( Metadata metadata, File metadataFile )
- {
- if ( metadata.getVersioning() == null )
- {
- metadata.setVersioning( new Versioning() );
- }
- if ( metadata.getVersioning().getLastUpdated() == null )
- {
- DateFormat fmt = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
- fmt.setTimeZone( UTC_TIMEZONE );
- metadata.getVersioning().setLastUpdated( fmt.format( new Date( metadataFile.lastModified() ) ) );
- }
- }
-
- private void getFileFromRepository( String path, ProxiedArtifactRepository repository, String repositoryCachePath,
- ProxyInfo httpProxy, File target, ArtifactRepositoryPolicy policy,
- boolean force )
- throws ProxyException
- {
- if ( !policy.isEnabled() )
- {
- getLogger().debug( "Skipping disabled repository " + repository.getName() );
- return;
- }
-
- Map checksums = null;
- Wagon wagon = null;
-
- File temp = new File( target.getAbsolutePath() + ".tmp" );
- temp.deleteOnExit();
-
- boolean connected = false;
- try
- {
- String protocol = repository.getRepository().getProtocol();
- wagon = (Wagon) wagons.get( protocol );
- if ( wagon == null )
- {
- throw new ProxyException( "Unsupported remote protocol: " + protocol );
- }
-
- //@todo configure wagon (ssh settings, etc)
-
- checksums = prepareChecksumListeners( wagon );
-
- connected = connectToRepository( wagon, repository, httpProxy );
- if ( connected )
- {
- int tries = 0;
- boolean success;
-
- do
- {
- tries++;
-
- boolean downloaded = true;
- if ( force || !target.exists() )
- {
- getLogger().debug( "Retrieving " + path + " from " + repository.getName() );
- wagon.get( path, temp );
- }
- else
- {
- getLogger().debug( "Retrieving " + path + " from " + repository.getName() + " if updated" );
- downloaded = wagon.getIfNewer( path, temp, target.lastModified() );
- }
-
- if ( downloaded )
- {
- success = checkChecksum( checksums, path, wagon, repositoryCachePath );
-
- if ( tries > 1 && !success )
- {
- processRepositoryFailure( repository,
- "Checksum failures occurred while downloading " + path, path,
- policy );
- return;
- }
- }
- else
- {
- // getIfNewer determined we were up to date
- success = true;
- }
- }
- while ( !success );
-
- // temp won't exist if we called getIfNewer and it was older, but its still a successful return
- if ( temp.exists() )
- {
- moveTempToTarget( temp, target );
- }
-
- getLogger().debug( "Successfully downloaded" );
- }
- //try next repository
- }
- catch ( TransferFailedException e )
- {
- processRepositoryFailure( repository, e, path, policy );
- }
- catch ( AuthorizationException e )
- {
- processRepositoryFailure( repository, e, path, policy );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // hard failure setting doesn't affect "not found".
- getLogger().debug( "Artifact not found in repository: " + repository.getName() + ": " + e.getMessage() );
- }
- finally
- {
- temp.delete();
-
- if ( wagon != null && checksums != null )
- {
- releaseChecksumListeners( wagon, checksums );
- }
-
- if ( connected )
- {
- disconnectWagon( wagon );
- }
- }
- }
-
- private static boolean isOutOfDate( ArtifactRepositoryPolicy policy, File target )
- {
- return policy != null && policy.checkOutOfDate( new Date( target.lastModified() ) );
- }
-
- /**
- * Used to add checksum observers as transfer listeners to the wagonManager object
- *
- * @param wagon the wagonManager object to use the checksum with
- * @return map of ChecksumObservers added into the wagonManager transfer listeners
- */
- private Map prepareChecksumListeners( Wagon wagon )
- {
- Map checksums = new LinkedHashMap();
- try
- {
- ChecksumObserver checksum = new ChecksumObserver( "SHA-1" );
- wagon.addTransferListener( checksum );
- checksums.put( "sha1", checksum );
-
- checksum = new ChecksumObserver( "MD5" );
- wagon.addTransferListener( checksum );
- checksums.put( "md5", checksum );
- }
- catch ( NoSuchAlgorithmException e )
- {
- getLogger().error( "An error occurred while preparing checksum observers: " + e.getMessage() );
- }
- return checksums;
- }
-
- private void releaseChecksumListeners( Wagon wagon, Map checksumMap )
- {
- for ( Iterator checksums = checksumMap.values().iterator(); checksums.hasNext(); )
- {
- ChecksumObserver listener = (ChecksumObserver) checksums.next();
- wagon.removeTransferListener( listener );
- }
- }
-
- private boolean connectToRepository( Wagon wagon, ProxiedArtifactRepository repository, ProxyInfo httpProxy )
- {
- boolean connected = false;
- try
- {
- ArtifactRepository artifactRepository = repository.getRepository();
- Repository wagonRepository = new Repository( artifactRepository.getId(), artifactRepository.getUrl() );
- if ( repository.isUseNetworkProxy() && httpProxy != null )
- {
- wagon.connect( wagonRepository, httpProxy );
- }
- else
- {
- wagon.connect( wagonRepository );
- }
- connected = true;
- }
- catch ( ConnectionException e )
- {
- getLogger().info( "Could not connect to " + repository.getName() + ": " + e.getMessage() );
- }
- catch ( AuthenticationException e )
- {
- getLogger().info( "Could not connect to " + repository.getName() + ": " + e.getMessage() );
- }
-
- return connected;
- }
-
- private boolean checkChecksum( Map checksumMap, String path, Wagon wagon, String repositoryCachePath )
- throws ProxyException
- {
- releaseChecksumListeners( wagon, checksumMap );
-
- boolean correctChecksum = false;
-
- boolean allNotFound = true;
-
- for ( Iterator i = checksumMap.keySet().iterator(); i.hasNext() && !correctChecksum; )
- {
- String checksumExt = (String) i.next();
- ChecksumObserver checksum = (ChecksumObserver) checksumMap.get( checksumExt );
- String checksumPath = path + "." + checksumExt;
- File checksumFile = new File( repositoryCachePath, checksumPath );
-
- File tempChecksumFile = new File( checksumFile.getAbsolutePath() + ".tmp" );
- tempChecksumFile.deleteOnExit();
-
- try
- {
- wagon.get( checksumPath, tempChecksumFile );
-
- allNotFound = false;
-
- String remoteChecksum = DigestUtils.cleanChecksum( FileUtils.readFileToString( tempChecksumFile, null ),
- checksumExt.toUpperCase(),
- path.substring( path.lastIndexOf( '/' ) + 1 ) );
-
- String actualChecksum = checksum.getActualChecksum();
-
- remoteChecksum = remoteChecksum.toUpperCase();
-
- if ( actualChecksum != null && remoteChecksum.equals( actualChecksum.toUpperCase() ) )
- {
- moveTempToTarget( tempChecksumFile, checksumFile );
-
- correctChecksum = true;
- }
- else
- {
- getLogger().warn(
- "The checksum '" + actualChecksum + "' did not match the remote value: " + remoteChecksum );
- }
- }
- catch ( TransferFailedException e )
- {
- getLogger().warn( "An error occurred during the download of " + checksumPath + ": " + e.getMessage() );
- // do nothing try the next checksum
-
- allNotFound = false;
- }
- catch ( ResourceDoesNotExistException e )
- {
- getLogger().debug( "The checksum did not exist: " + checksumPath + "; " + e.getMessage() );
- // do nothing try the next checksum
- // remove it if it is present locally in case there is an old incorrect one
- if ( checksumFile.exists() )
- {
- checksumFile.delete();
- }
- }
- catch ( AuthorizationException e )
- {
- getLogger().warn( "An error occurred during the download of " + checksumPath + ": " + e.getMessage() );
- // do nothing try the next checksum
-
- allNotFound = false;
- }
- catch ( IOException e )
- {
- getLogger().warn( "An error occurred while reading the temporary checksum file: " + e.getMessage() );
- // do nothing try the next checksum
-
- allNotFound = false;
- }
- catch ( DigesterException e )
- {
- getLogger().warn( "The checksum was invalid: " + checksumPath + ": " + e.getMessage() );
- // do nothing try the next checksum
-
- allNotFound = false;
- }
- finally
- {
- tempChecksumFile.delete();
- }
- }
- return correctChecksum || allNotFound;
- }
-
- /**
- * Used to move the temporary file to its real destination. This is patterned from the way WagonManager handles
- * its downloaded files.
- *
- * @param temp The completed download file
- * @param target The final location of the downloaded file
- * @throws ProxyException when the temp file cannot replace the target file
- */
- private void moveTempToTarget( File temp, File target )
- throws ProxyException
- {
- if ( target.exists() && !target.delete() )
- {
- throw new ProxyException( "Unable to overwrite existing target file: " + target.getAbsolutePath() );
- }
-
- if ( !temp.renameTo( target ) )
- {
- getLogger().warn( "Unable to rename tmp file to its final name... resorting to copy command." );
-
- try
- {
- FileUtils.copyFile( temp, target );
- }
- catch ( IOException e )
- {
- throw new ProxyException( "Cannot copy tmp file to its final location", e );
- }
- finally
- {
- temp.delete();
- }
- }
- }
-
- /**
- * Used to disconnect the wagonManager from its repository
- *
- * @param wagon the connected wagonManager object
- */
- private void disconnectWagon( Wagon wagon )
- {
- try
- {
- wagon.disconnect();
- }
- catch ( ConnectionException e )
- {
- getLogger().error( "Problem disconnecting from wagonManager - ignoring: " + e.getMessage() );
- }
- }
-
- private void processRepositoryFailure( ProxiedArtifactRepository repository, Throwable t, String path,
- ArtifactRepositoryPolicy policy )
- throws ProxyException
- {
- repository.addFailure( path, policy );
-
- String message = t.getMessage();
- if ( repository.isHardFail() )
- {
- repository.addFailure( path, policy );
- throw new ProxyException(
- "An error occurred in hardfailing repository " + repository.getName() + "...\n " + message, t );
- }
-
- getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
- getLogger().debug( "Cause", t );
- }
-
- private void processRepositoryFailure( ProxiedArtifactRepository repository, String message, String path,
- ArtifactRepositoryPolicy policy )
- throws ProxyException
- {
- repository.addFailure( path, policy );
-
- processCachedRepositoryFailure( repository, message );
- }
-
- private void processCachedRepositoryFailure( ProxiedArtifactRepository repository, String message )
- throws ProxyException
- {
- if ( repository.isHardFail() )
- {
- throw new ProxyException(
- "An error occurred in hardfailing repository " + repository.getName() + "...\n " + message );
- }
-
- getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
-
-import java.util.Calendar;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * A proxied artifact repository - contains the artifact repository and additional information about
- * the proxied repository.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ProxiedArtifactRepository
-{
- /**
- * Whether to cache failures or not.
- */
- private boolean cacheFailures;
-
- /**
- * Whether failures on this repository cause the whole group to fail.
- */
- private boolean hardFail;
-
- /**
- * Whether to use the network proxy for any requests.
- */
- private boolean useNetworkProxy;
-
- /**
- * The artifact repository on the other end of the proxy.
- */
- private final ArtifactRepository repository;
-
- /**
- * Cache of failures that have already occurred, containing paths from the repository root. The value given
- * specifies when the failure should expire.
- */
- private Map/*<String,Long>*/ failureCache = new HashMap/*<String,Long>*/();
-
- /**
- * A user friendly name for the repository.
- */
- private String name;
-
- public ProxiedArtifactRepository( ArtifactRepository repository )
- {
- this.repository = repository;
- }
-
- public boolean isHardFail()
- {
- return hardFail;
- }
-
- public boolean isUseNetworkProxy()
- {
- return useNetworkProxy;
- }
-
- public boolean isCacheFailures()
- {
- return cacheFailures;
- }
-
- public ArtifactRepository getRepository()
- {
- return repository;
- }
-
- /**
- * Check if there is a previously cached failure for requesting the given path.
- *
- * @param path the path
- * @return whether there is a failure
- */
- public boolean isCachedFailure( String path )
- {
- boolean failed = false;
- if ( cacheFailures )
- {
- Long time = (Long) failureCache.get( path );
- if ( time != null )
- {
- if ( System.currentTimeMillis() < time.longValue() )
- {
- failed = true;
- }
- else
- {
- clearFailure( path );
- }
- }
- }
- return failed;
- }
-
- /**
- * Add a failure to the cache.
- *
- * @param path the path that failed
- * @param policy the policy for when the failure should expire
- */
- public void addFailure( String path, ArtifactRepositoryPolicy policy )
- {
- failureCache.put( path, new Long( calculateExpiryTime( policy ) ) );
- }
-
- private long calculateExpiryTime( ArtifactRepositoryPolicy policy )
- {
- String updatePolicy = policy.getUpdatePolicy();
- long time;
- if ( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS.equals( updatePolicy ) )
- {
- time = 0;
- }
- else if ( ArtifactRepositoryPolicy.UPDATE_POLICY_DAILY.equals( updatePolicy ) )
- {
- // Get midnight boundary
- Calendar cal = Calendar.getInstance();
- cal.set( Calendar.HOUR_OF_DAY, 0 );
- cal.set( Calendar.MINUTE, 0 );
- cal.set( Calendar.SECOND, 0 );
- cal.set( Calendar.MILLISECOND, 0 );
- cal.add( Calendar.DAY_OF_MONTH, 1 );
- time = cal.getTime().getTime();
- }
- else if ( updatePolicy.startsWith( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL ) )
- {
- String s = updatePolicy.substring( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL.length() + 1 );
- int minutes = Integer.valueOf( s ).intValue();
- Calendar cal = Calendar.getInstance();
- cal.add( Calendar.MINUTE, minutes );
- time = cal.getTime().getTime();
- }
- else
- {
- // else assume "never"
- time = Long.MAX_VALUE;
- }
- return time;
- }
-
- /**
- * Remove a failure.
- *
- * @param path the path that had previously failed
- */
- public void clearFailure( String path )
- {
- failureCache.remove( path );
- }
-
- public String getName()
- {
- return name;
- }
-
- public void setCacheFailures( boolean cacheFailures )
- {
- this.cacheFailures = cacheFailures;
- }
-
- public void setHardFail( boolean hardFail )
- {
- this.hardFail = hardFail;
- }
-
- public void setUseNetworkProxy( boolean useNetworkProxy )
- {
- this.useNetworkProxy = useNetworkProxy;
- }
-
- public void setName( String name )
- {
- this.name = name;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class ProxyException
- extends Exception
-{
- public ProxyException( String message )
- {
- super( message );
- }
-
- public ProxyException( String message, Throwable t )
- {
- super( message, t );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * An individual request handler for the proxy.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface ProxyRequestHandler
-{
- /**
- * The Plexus role of the component.
- */
- String ROLE = ProxyRequestHandler.class.getName();
-
- /**
- * Used to retrieve an artifact at a particular path, giving the cached version if it exists.
- *
- * @param path the expected repository path
- * @param proxiedRepositories the repositories being proxied to
- * @param managedRepository the locally managed repository to cache artifacts in
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException
- * when the requested object can't be found in any of the
- * configured repositories
- */
- File get( String path, List proxiedRepositories, ArtifactRepository managedRepository )
- throws ProxyException, ResourceDoesNotExistException;
-
- /**
- * Used to retrieve an artifact at a particular path, giving the cached version if it exists.
- *
- * @param path the expected repository path
- * @param proxiedRepositories the repositories being proxied to
- * @param managedRepository the locally managed repository to cache artifacts in
- * @param wagonProxy a network proxy to use when transferring files if needed
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException
- * when the requested object can't be found in any of the
- * configured repositories
- */
- File get( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
- throws ProxyException, ResourceDoesNotExistException;
-
- /**
- * Used to force remote download of the requested path from any the configured repositories. This method will
- * only bypass the cache for searching but the requested path will still be cached.
- *
- * @param path the expected repository path
- * @param proxiedRepositories the repositories being proxied to
- * @param managedRepository the locally managed repository to cache artifacts in
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException
- * when the requested object can't be found in any of the
- * configured repositories
- */
- File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository )
- throws ProxyException, ResourceDoesNotExistException;
-
- /**
- * Used to force remote download of the requested path from any the configured repositories. This method will
- * only bypass the cache for searching but the requested path will still be cached.
- *
- * @param path the expected repository path
- * @param proxiedRepositories the repositories being proxied to
- * @param managedRepository the locally managed repository to cache artifacts in
- * @param wagonProxy a network proxy to use when transferring files if needed
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException
- * when the requested object can't be found in any of the
- * configured repositories
- */
- File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
- throws ProxyException, ResourceDoesNotExistException;
-}
+++ /dev/null
-~~ Copyright 2006 The Apache Software Foundation.\r
-~~\r
-~~ Licensed under the Apache License, Version 2.0 (the "License");\r
-~~ you may not use this file except in compliance with the License.\r
-~~ You may obtain a copy of the License at\r
-~~\r
-~~ http://www.apache.org/licenses/LICENSE-2.0\r
-~~\r
-~~ Unless required by applicable law or agreed to in writing, software\r
-~~ distributed under the License is distributed on an "AS IS" BASIS,\r
-~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-~~ See the License for the specific language governing permissions and\r
-~~ limitations under the License.\r
-\r
-~~ NOTE: For help with the syntax of this file, see:\r
-~~ http://maven.apache.org/guides/mini/guide-apt-format.html\r
-\r
-ProxyManager\r
-\r
- The ProxyManager is designed to be used as a simple object or bean for use by\r
- a command-line application or web application.\r
-\r
-Configuration\r
-\r
- An instance of a ProxyManager requires a configuration object that will\r
- define its behavior called ProxyConfiguration. The ProxyConfiguration is a\r
- plexus component and can be looked up to get an instance of it. Below is a sample\r
- plexus lookup statement:\r
-\r
-----------\r
- ProxyConfiguration config = (ProxyConfiguration) container.lookup( ProxyConfiguration.ROLE );\r
-----------\r
-\r
- Currently, a ProxyConfiguration lookup will return an empty instance of the \r
- ProxyConfiguration which means it doesn't have any default definitions yet on\r
- how the ProxyManager should behave. So the next step is to explicitly define\r
- its behavior.\r
-\r
-----------\r
- ProxyConfiguration config = (ProxyConfiguration) container.lookup( ProxyConfiguration.ROLE );\r
-\r
- config.setRepositoryCachePath( "/user/proxy-cache" );\r
-\r
- ArtifactRepositoryLayout defLayout = new DefaultRepositoryLayout();\r
-\r
- File repo1File = new File( "src/test/remote-repo1" );\r
-\r
- ProxyRepository repo1 = new ProxyRepository( "central", "http://www.ibiblio.org/maven2", defLayout );\r
-\r
- config.addRepository( repo1 );\r
-----------\r
-\r
- The above statements sets up the ProxyConfiguration to use the directory \r
- <<</user/proxy-cache>>> as the location of the proxy's repository cache.\r
- Then it creates a ProxyRepository instance with an id of <<<central>>> to\r
- look for remote files in ibiblio.org.\r
-\r
-Instantiation\r
-\r
- To create or retrieve an instance of a ProxyManager, one will need to use the\r
- ProxyManagerFactory.\r
-\r
-----------\r
- ProxyManagerFactory factory = (ProxyManagerFactory) container.lookup( ProxyManagerFactory.ROLE );\r
- proxy = factory.getProxyManager( "default", config );\r
-----------\r
-\r
- The factory requires two parameters. The first parameter is the proxy_type\r
- that you will want to use. And the second parameter is the ProxyConfiguration\r
- which we already did above. The proxy_type defines the client that the\r
- ProxyManager is expected to service. Currently, only <<<default>>>\r
- ProxyManager type is available and is defined to be for Maven 2.x clients.\r
-\r
-Usage\r
-\r
-* The get() method\r
-\r
- The ProxyManager get( target ) method is used to retrieve a path file. This\r
- method first looks into the cache if the target exists. If it does not, then\r
- the ProxyManager will search all the ProxyRepositories present in its\r
- ProxyConfiguration. When the target path is found, the ProxyManager creates\r
- a copy of it in its cache and returns a File instance of the cached copy.\r
-\r
-* The getRemoteFile() method\r
-\r
- The ProxyManager getRemoteFile( path ) method is used to force the\r
- ProxyManager to ignore the contents of its cache and search all the\r
- ProxyRepository objects for the specified path and retrieve it when\r
- available. When successful, the ProxyManager creates a copy of the remote\r
- file in its cache and then returns a File instance of the cached copy.
\ No newline at end of file
+++ /dev/null
-package org.apache.maven.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.codehaus.plexus.PlexusTestCase;
-import org.easymock.MockControl;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.StringWriter;
-import java.net.MalformedURLException;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.TimeZone;
-
-/**
- * Test the proxy handler.
- *
- * @author Brett Porter
- */
-public class ProxyRequestHandlerTest
- extends PlexusTestCase
-{
- private ProxyRequestHandler requestHandler;
-
- private List proxiedRepositories;
-
- private List legacyProxiedRepositories;
-
- private ArtifactRepository defaultManagedRepository;
-
- private ArtifactRepository legacyManagedRepository;
-
- private ArtifactRepository proxiedRepository1;
-
- private ArtifactRepository proxiedRepository2;
-
- private ArtifactRepository legacyProxiedRepository;
-
- private ArtifactRepositoryLayout defaultLayout;
-
- private ArtifactRepositoryFactory factory;
-
- private MockControl wagonMockControl;
-
- private Wagon wagonMock;
-
- private static final ArtifactRepositoryPolicy DEFAULT_POLICY =
- new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER, null );
-
- private static final ArtifactRepositoryPolicy ALWAYS_UPDATE_POLICY =
- new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS, null );
-
- private static final TimeZone UTC_TIMEZONE = TimeZone.getTimeZone( "UTC" );
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- requestHandler = (ProxyRequestHandler) lookup( ProxyRequestHandler.ROLE );
-
- factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- File repoLocation = getTestFile( "target/test-repository/managed" );
- // faster only to delete this one before copying, the others are done case by case
- FileUtils.deleteDirectory( new File( repoLocation, "org/apache/maven/test/get-merged-metadata" ) );
- copyDirectoryStructure( getTestFile( "src/test/repositories/managed" ), repoLocation );
-
- defaultLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- defaultManagedRepository = createRepository( "managed-repository", repoLocation );
-
- repoLocation = getTestFile( "target/test-repository/legacy-managed" );
- FileUtils.deleteDirectory( repoLocation );
- copyDirectoryStructure( getTestFile( "src/test/repositories/legacy-managed" ), repoLocation );
-
- ArtifactRepositoryLayout legacyLayout =
- (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
-
- legacyManagedRepository = createRepository( "managed-repository", repoLocation, legacyLayout );
-
- File location = getTestFile( "src/test/repositories/proxied1" );
- proxiedRepository1 = createRepository( "proxied1", location );
-
- location = getTestFile( "src/test/repositories/proxied2" );
- proxiedRepository2 = createRepository( "proxied2", location );
-
- proxiedRepositories = new ArrayList( 2 );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- location = getTestFile( "src/test/repositories/legacy-proxied" );
- legacyProxiedRepository = createRepository( "legacy-proxied", location, legacyLayout );
-
- legacyProxiedRepositories = Collections.singletonList( createProxiedRepository( legacyProxiedRepository ) );
-
- wagonMockControl = MockControl.createNiceControl( Wagon.class );
- wagonMock = (Wagon) wagonMockControl.getMock();
- WagonDelegate delegate = (WagonDelegate) lookup( Wagon.ROLE, "test" );
- delegate.setDelegate( wagonMock );
- }
-
- public void testGetDefaultLayoutNotPresent()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- // TODO: timestamp preservation requires support for that in wagon
-// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
- }
-
- public void testGetDefaultLayoutAlreadyPresent()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
- long originalModificationTime = expectedFile.lastModified();
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
- assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
- file.lastModified() );
- }
-
- public void testGetDefaultLayoutRemoteUpdate()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- expectedFile.setLastModified( getPastDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetWhenInBothProxiedRepos()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetInSecondProxiedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- }
-
- public void testNotFoundInAnyProxies()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/does-not-exist/1.0/does-not-exist-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "File returned was: " + file + "; should have got a not found exception" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected, but check file was not created
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testGetInSecondProxiedRepoFirstFails()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMockControl.replay();
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- wagonMockControl.verify();
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
-
- public void testGetButAllRepositoriesFail()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepository2 = createRepository( "proxied2", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository1 );
- ProxiedArtifactRepository proxiedArtifactRepository2 = createProxiedRepository( proxiedRepository2 );
- proxiedRepositories.add( proxiedArtifactRepository2 );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // as expected
- wagonMockControl.verify();
- assertTrue( "Check failure", proxiedArtifactRepository1.isCachedFailure( path ) );
- assertTrue( "Check failure", proxiedArtifactRepository2.isCachedFailure( path ) );
-
- // TODO: do not want failures to present as a not found!
- // TODO: How much information on each failure should we pass back to the user vs. logging in the proxy?
- }
- }
-
- public void testGetInSecondProxiedRepoFirstHardFails()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- TransferFailedException failedException = new TransferFailedException( "transfer failed" );
- wagonMockControl.setThrowable( failedException );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ProxyException e )
- {
- // expect a failure
- wagonMockControl.verify();
-
- assertEquals( "Check cause", failedException, e.getCause() );
- assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
- }
-
- public void testGetInSecondProxiedRepoFirstFailsFromCache()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- // fail from the cache, even though it is in the first repo now
-
- String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetInSecondProxiedRepoFirstHardFailsFromCache()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- // fail from the cache, even though it is in the first repo now
-
- String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ProxyException e )
- {
- // expect a failure
- assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
- }
-
- public void testGetInSecondProxiedRepoFirstFailsDisabledCacheFailure()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
-
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
- proxiedArtifactRepository.setCacheFailures( false );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMockControl.replay();
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- wagonMockControl.verify();
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
-
- public void testGetWhenInBothProxiedReposFirstHasExpiredCacheFailure()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
-
- assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
-
- public void testGetAlwaysAlreadyPresent()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetAlwaysAlreadyPresentRemovedFromProxies()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-removed-from-proxies/1.0/get-removed-from-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- }
-
- public void testGetAlwaysWithCachedFailure()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetRemovesTemporaryFileOnSuccess()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File tempFile = new File( file.getParentFile(), file.getName() + ".tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
- }
-
- public void testGetRemovesTemporaryFileOnError()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository1 );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // as expected
- wagonMockControl.verify();
-
- File tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
- }
- }
-
- public void testGetRemovesTemporaryChecksumFileOnSuccess()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File tempFile = new File( file.getParentFile(), file.getName() + ".sha1.tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
- }
-
- public void testGetRemovesTemporaryChecksumFileOnError()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository1 );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
-
- mockFailedChecksums( path, expectedFile );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // as expected
- wagonMockControl.verify();
-
- File tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
-
- tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
- }
- }
-
- public void testGetChecksumBothCorrect()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-both-right/1.0/get-checksum-both-right-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "sha1" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar",
- FileUtils.readFileToString( checksumFile, null ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
- }
-
- public void testGetCorrectSha1NoMd5()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "sha1" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
- FileUtils.readFileToString( checksumFile, null ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
- }
-
- public void testGetCorrectSha1BadMd5()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-bad-md5/1.0/get-checksum-sha1-bad-md5-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "sha1" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
- FileUtils.readFileToString( checksumFile, null ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
- }
-
- public void testGetCorrectMd5NoSha1()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-md5-only/1.0/get-checksum-md5-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "md5" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar",
- FileUtils.readFileToString( checksumFile, null ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
- }
-
- public void testGetCorrectMd5BadSha1()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-md5-bad-sha1/1.0/get-checksum-md5-bad-sha1-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "md5" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "8a02aa67549d27b2a03cd4547439c6d3 *get-checksum-md5-bad-sha1-1.0.jar",
- FileUtils.readFileToString( checksumFile, null ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
- }
-
- public void testGetWithNoChecksums()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
- assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
- }
-
- public void testGetBadMd5BadSha1()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-both-bad/1.0/get-checksum-both-bad-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expect a failure
- assertFalse( "Check file not created", expectedFile.exists() );
-
- assertFalse( "Check file not created", getChecksumFile( expectedFile, "md5" ).exists() );
- assertFalse( "Check file not created", getChecksumFile( expectedFile, "sha1" ).exists() );
- }
- }
-
- public void testGetChecksumTransferFailed()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository1 );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
-
- mockFailedChecksums( path, expectedFile );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // as expected
- wagonMockControl.verify();
-
- assertFalse( "Check file not created", expectedFile.exists() );
-
- assertFalse( "Check file not created", getChecksumFile( expectedFile, "md5" ).exists() );
- assertFalse( "Check file not created", getChecksumFile( expectedFile, "sha1" ).exists() );
- }
- }
-
- public void testGetAlwaysBadChecksumPresentLocallyAbsentRemote()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-bad-local-checksum/1.0/get-bad-local-checksum-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
-
- assertFalse( "Check checksum removed", new File( file.getParentFile(), file.getName() + ".sha1" ).exists() );
- assertFalse( "Check checksum removed", new File( file.getParentFile(), file.getName() + ".md5" ).exists() );
- }
-
- public void testGetChecksumPresentInManagedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-checksum-from-managed-repo/1.0/get-checksum-from-managed-repo-1.0.jar.sha1";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetAlwaysChecksumPresentInManagedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-checksum-from-managed-repo/1.0/get-checksum-from-managed-repo-1.0.jar.sha1";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetChecksumNotPresentInManagedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar.sha1";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected
-
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testGetAlwaysChecksumNotPresentInManagedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar.sha1";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected
-
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testGetMetadataNotPresent()
- throws ProxyException, IOException
- {
- String path = "org/apache/maven/test/dummy-artifact/1.0/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected
-
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testGetMetadataProxied()
- throws ProxyException, ResourceDoesNotExistException, IOException
- {
- String path = "org/apache/maven/test/get-default-metadata/1.0/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- String expectedContents = getExpectedMetadata( "get-default-metadata", "1.0" );
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
- }
-
-/* TODO: test keeps failing in the reactor - needs to be made more robust before re-enabling
- public void testGetMetadataMergeRepos()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-merged-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents = getExpectedMetadata( "get-merged-metadata", getVersioning(
- Arrays.asList( new String[]{"0.9", "1.0", "2.0", "3.0", "5.0", "4.0"} ), file ) );
-
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
- }
-*/
-
- public void testGetMetadataRemovedFromProxies()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-removed-metadata/1.0/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents =
- FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
- }
-
- public void testGetReleaseMetadataNotExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents =
- FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
-
- String unexpectedContents =
- FileUtils.readFileToString( new File( proxiedRepository1.getBasedir(), path ), null );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetSnapshotMetadataNotExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents =
- FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
-
- String unexpectedContents =
- FileUtils.readFileToString( new File( proxiedRepository1.getBasedir(), path ), null );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetReleaseMetadataExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents =
- FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents = getExpectedMetadata( "get-updated-metadata", getVersioning(
- Arrays.asList( new String[]{"1.0", "2.0"} ), file ) );
-
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
-/* TODO: test keeps failing in the reactor - needs to be made more robust before re-enabling
- public void testGetSnapshotMetadataExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents =
- FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents =
- getExpectedMetadata( "get-updated-metadata", "1.0-SNAPSHOT", getVersioning( "20050831.111213", 2, file ) );
-
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetMetadataNotUpdated()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents =
- FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
-
- assertTrue( expectedFile.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( proxiedFile.lastModified() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
-
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetMetadataUpdated()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents =
- FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents = getExpectedMetadata( "get-updated-metadata", getVersioning(
- Arrays.asList( new String[]{"1.0", "2.0"} ), file ) );
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check content doesn't match old version",
- unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testGetAlwaysMetadata()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents =
- FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents = getExpectedMetadata( "get-updated-metadata", getVersioning(
- Arrays.asList( new String[]{"1.0", "2.0"} ), file ) );
-
- assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
- assertFalse( "Check content doesn't match old version",
- unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-*/
-
- public void testSnapshotNonExistant()
- throws ProxyException, IOException
- {
- String path = "org/apache/maven/test/does-not-exist/1.0-SNAPSHOT/does-not-exist-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "File returned was: " + file + "; should have got a not found exception" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected, but check file was not created
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testTimestampDrivenSnapshotNotPresentAlready()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- }
-
- public void testNewerTimestampDrivenSnapshotOnFirstRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- String path =
- "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertTrue( expectedFile.exists() );
-
- expectedFile.setLastModified( getPastDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- }
-
- public void testOlderTimestampDrivenSnapshotOnFirstRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- expectedFile.setLastModified( getFutureDate().getTime() );
-
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
-/* TODO: won't pass until Wagon preserves timestamp on download
- public void testNewerTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- String path =
- "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- File repoLocation = getTestFile( "target/test-repository/proxied1" );
- FileUtils.deleteDirectory( repoLocation );
- copyDirectoryStructure( getTestFile( "src/test/repositories/proxied1" ), repoLocation );
- proxiedRepository1 = createRepository( "proxied1", repoLocation );
-
- new File( proxiedRepository1.getBasedir(), path ).setLastModified( getPastDate().getTime() );
-
- proxiedRepositories.clear();
- proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-*/
-
- public void testOlderTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
- throws ParseException, ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File repoLocation = getTestFile( "target/test-repository/proxied2" );
- FileUtils.deleteDirectory( repoLocation );
- copyDirectoryStructure( getTestFile( "src/test/repositories/proxied2" ), repoLocation );
- proxiedRepository2 = createRepository( "proxied2", repoLocation );
-
- new File( proxiedRepository2.getBasedir(), path ).setLastModified( getPastDate().getTime() );
-
- proxiedRepositories.clear();
- proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testTimestampDrivenSnapshotNotExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path =
- "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertTrue( expectedFile.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- proxiedFile.setLastModified( getFutureDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testTimestampDrivenSnapshotNotUpdated()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path =
- "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- expectedFile.setLastModified( proxiedFile.lastModified() );
-
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testTimestampDrivenSnapshotNotPresentAlreadyExpiredCacheFailure()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
-
- assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
-
- public void testMetadataDrivenSnapshotNotPresentAlready()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-metadata-snapshot/1.0-SNAPSHOT/get-metadata-snapshot-1.0-20050831.101112-1.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- }
-
- public void testGetMetadataDrivenSnapshotRemoteUpdate()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- // Metadata driven snapshots (using a full timestamp) are treated like a release. It is the timing of the
- // updates to the metadata files that triggers which will be downloaded
-
- String path =
- "org/apache/maven/test/get-present-metadata-snapshot/1.0-SNAPSHOT/get-present-metadata-snapshot-1.0-20050831.101112-1.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
-
- assertTrue( expectedFile.exists() );
-
- expectedFile.setLastModified( getPastDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- }
-
- public void testLegacyManagedRepoGetNotPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(),
- "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar" );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- // TODO: timestamp preservation requires support for that in wagon
-// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
- }
-
- public void testLegacyManagedRepoGetAlreadyPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
- long originalModificationTime = expectedFile.lastModified();
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- File proxiedFile = new File( proxiedRepository1.getBasedir(),
- "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar" );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
- assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
- file.lastModified() );
- }
-
- public void testLegacyProxyRepoGetNotPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile =
- new File( legacyProxiedRepository.getBasedir(), "org.apache.maven.test/jars/get-default-layout-1.0.jar" );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- // TODO: timestamp preservation requires support for that in wagon
-// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
- }
-
- public void testLegacyProxyRepoGetAlreadyPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
- long originalModificationTime = expectedFile.lastModified();
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- File proxiedFile = new File( legacyProxiedRepository.getBasedir(),
- "org.apache.maven.test/jars/get-default-layout-present-1.0.jar" );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
- assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
- file.lastModified() );
- }
-
- public void testLegacyManagedAndProxyRepoGetNotPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- // TODO: timestamp preservation requires support for that in wagon
-// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
- }
-
- public void testLegacyManagedAndProxyRepoGetAlreadyPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.readFileToString( expectedFile, null );
- long originalModificationTime = expectedFile.lastModified();
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
- File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
- assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
- assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
- file.lastModified() );
- }
-
- public void testLegacyRequestConvertedToDefaultPathInManagedRepo()
- throws Exception
- {
- // Check that a Maven1 legacy request is translated to a maven2 path in
- // the managed repository.
-
- String legacyPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( legacyPath, legacyProxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- }
-
- public void testDefaultRequestConvertedToLegacyPathInManagedRepo()
- throws Exception
- {
- // Check that a Maven2 default request is translated to a legacy path in
- // the managed repository.
-
- String legacyPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), legacyPath );
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- }
-
- public void testRelocateMaven1Request()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar";
- String relocatedPath =
- "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- }
-
- public void testDoublyRelocateMaven1Request()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-doubly-relocated-artefact-1.0.jar";
- String relocatedPath =
- "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- }
-
- public void testRelocateMaven1PomRequest()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/poms/get-relocated-artefact-with-pom-1.0.pom";
- String relocatedPath =
- "org/apache/maven/test/get-default-layout-present-with-pom/1.0/get-default-layout-present-with-pom-1.0.pom";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
-
- assertTrue( expectedFile.exists() );
- }
-
- public void testRelocateMaven1PomRequestMissingTarget()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/poms/get-relocated-artefact-1.0.pom";
- String relocatedPath =
- "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.pom";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
-
- assertFalse( expectedFile.exists() );
-
- try
- {
- requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Should have failed to find target POM" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- assertTrue( true );
- }
- }
-
- public void testRelocateMaven1ChecksumRequest()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar.md5";
- String relocatedPath =
- "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar.md5";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
-
- assertTrue( expectedFile.exists() );
-
- path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar.sha1";
- relocatedPath = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar.sha1";
- expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
-
- assertFalse( expectedFile.exists() );
-
- try
- {
- requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Checksum was not present, should not be found" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- assertTrue( true );
- }
- }
-
- public void testRelocateMaven2Request()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-relocated-artefact/1.0/get-relocated-artefact-1.0.jar";
- String relocatedPath =
- "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- }
-
- public void testRelocateMaven2RequestInLegacyManagedRepo()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-relocated-artefact/1.0/get-relocated-artefact-1.0.jar";
- String relocatedPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), relocatedPath );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- }
-
- private static Versioning getVersioning( List versions, File file )
- {
- Versioning versioning = new Versioning();
- for ( Iterator i = versions.iterator(); i.hasNext(); )
- {
- String v = (String) i.next();
- versioning.addVersion( v );
- }
- setLastUpdatedTimestamp( versioning, file );
- return versioning;
- }
-
- private static String getExpectedMetadata( String artifactId, Versioning versioning )
- throws IOException
- {
- return getExpectedMetadata( artifactId, null, versioning );
- }
-
- private static String getExpectedMetadata( String artifactId, String version, Versioning versioning )
- throws IOException
- {
- StringWriter expectedContents = new StringWriter();
-
- Metadata m = new Metadata();
- m.setGroupId( "org.apache.maven.test" );
- m.setArtifactId( artifactId );
- m.setVersion( version );
- m.setVersioning( versioning );
- m.setModelEncoding( null );
-
- new MetadataXpp3Writer().write( expectedContents, m );
- return expectedContents.toString();
- }
-
- private static String getExpectedMetadata( String artifactId, String version )
- throws IOException
- {
- return getExpectedMetadata( artifactId, version, null );
- }
-
- private static Versioning getVersioning( String timestamp, int buildNumber, File file )
- {
- Versioning versioning = new Versioning();
- versioning.setSnapshot( new Snapshot() );
- versioning.getSnapshot().setTimestamp( timestamp );
- versioning.getSnapshot().setBuildNumber( buildNumber );
- setLastUpdatedTimestamp( versioning, file );
- return versioning;
- }
-
- private static void setLastUpdatedTimestamp( Versioning versioning, File file )
- {
- DateFormat fmt = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
- fmt.setTimeZone( UTC_TIMEZONE );
- versioning.setLastUpdated( fmt.format( new Date( file.lastModified() ) ) );
- }
-
- private static Date getPastDate()
- throws ParseException
- {
- return new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" );
- }
-
- private static Date getFutureDate()
- {
- Calendar cal = Calendar.getInstance();
- cal.add( Calendar.YEAR, 1 );
- return cal.getTime();
- }
-
- private void mockFailedChecksums( String path, File expectedFile )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- // must do it twice as it will re-attempt it
- wagonMock.get( path + ".sha1", new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMock.get( path + ".md5", new File( expectedFile.getParentFile(), expectedFile.getName() + ".md5.tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMock.get( path + ".sha1", new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMock.get( path + ".md5", new File( expectedFile.getParentFile(), expectedFile.getName() + ".md5.tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
- }
-
- private File getChecksumFile( File file, String algorithm )
- {
- return new File( file.getParentFile(), file.getName() + "." + algorithm );
- }
-
- /**
- * A faster recursive copy that omits .svn directories.
- *
- * @param sourceDirectory the source directory to copy
- * @param destDirectory the target location
- * @throws java.io.IOException if there is a copying problem
- * @todo get back into plexus-utils, share with converter module
- */
- private static void copyDirectoryStructure( File sourceDirectory, File destDirectory )
- throws IOException
- {
- if ( !sourceDirectory.exists() )
- {
- throw new IOException( "Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")." );
- }
-
- File[] files = sourceDirectory.listFiles();
-
- String sourcePath = sourceDirectory.getAbsolutePath();
-
- for ( int i = 0; i < files.length; i++ )
- {
- File file = files[i];
-
- String dest = file.getAbsolutePath();
-
- dest = dest.substring( sourcePath.length() + 1 );
-
- File destination = new File( destDirectory, dest );
-
- if ( file.isFile() )
- {
- destination = destination.getParentFile();
-
- FileUtils.copyFile( file, new File( destination, file.getName() ), false );
- // TODO: Change when there is a FileUtils.copyFileToDirectory(file, destination, boolean) option
- //FileUtils.copyFileToDirectory( file, destination );
- }
- else if ( file.isDirectory() )
- {
- if ( !".svn".equals( file.getName() ) )
- {
- if ( !destination.exists() && !destination.mkdirs() )
- {
- throw new IOException(
- "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
- }
-
- copyDirectoryStructure( file, destination );
- }
- }
- else
- {
- throw new IOException( "Unknown file type: " + file.getAbsolutePath() );
- }
- }
- }
-
- private static ProxiedArtifactRepository createProxiedRepository( ArtifactRepository repository )
- {
- ProxiedArtifactRepository proxiedArtifactRepository = new ProxiedArtifactRepository( repository );
- proxiedArtifactRepository.setName( repository.getId() );
- proxiedArtifactRepository.setCacheFailures( true );
- return proxiedArtifactRepository;
- }
-
- private static ProxiedArtifactRepository createHardFailProxiedRepository( ArtifactRepository repository )
- {
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( repository );
- proxiedArtifactRepository.setHardFail( true );
- return proxiedArtifactRepository;
- }
-
- private ArtifactRepository createRepository( String id, File repoLocation )
- throws MalformedURLException
- {
- return createRepository( id, repoLocation.toURI().toURL().toExternalForm() );
- }
-
- private ArtifactRepository createRepository( String id, File location, ArtifactRepositoryLayout layout )
- throws MalformedURLException
- {
- return createRepository( id, location.toURI().toURL().toExternalForm(), layout );
- }
-
- private ArtifactRepository createRepository( String id, String url )
- {
- return createRepository( id, url, defaultLayout );
- }
-
- private ArtifactRepository createRepository( String id, String url, ArtifactRepositoryLayout repositoryLayout )
- {
- return factory.createArtifactRepository( id, url, repositoryLayout, null, null );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.proxy;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.maven.wagon.ConnectionException;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authentication.AuthenticationException;
-import org.apache.maven.wagon.authentication.AuthenticationInfo;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.apache.maven.wagon.events.SessionListener;
-import org.apache.maven.wagon.events.TransferListener;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.apache.maven.wagon.repository.Repository;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-/**
- * A dummy wagon implementation
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class WagonDelegate
- implements Wagon
-{
- private Wagon delegate;
-
- private String contentToGet;
-
- public void get( String resourceName, File destination )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- delegate.get( resourceName, destination );
- create( destination );
- }
-
- public boolean getIfNewer( String resourceName, File destination, long timestamp )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- boolean result = delegate.getIfNewer( resourceName, destination, timestamp );
- createIfMissing( destination );
- return result;
- }
-
- public void put( File source, String destination )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- delegate.put( source, destination );
- }
-
- public void putDirectory( File sourceDirectory, String destinationDirectory )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- delegate.putDirectory( sourceDirectory, destinationDirectory );
- }
-
- public boolean resourceExists( String resourceName )
- throws TransferFailedException, AuthorizationException
- {
- return delegate.resourceExists( resourceName );
- }
-
- public List getFileList( String destinationDirectory )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- return delegate.getFileList( destinationDirectory );
- }
-
- public boolean supportsDirectoryCopy()
- {
- return delegate.supportsDirectoryCopy();
- }
-
- public Repository getRepository()
- {
- return delegate.getRepository();
- }
-
- public void connect( Repository source )
- throws ConnectionException, AuthenticationException
- {
- delegate.connect( source );
- }
-
- public void connect( Repository source, ProxyInfo proxyInfo )
- throws ConnectionException, AuthenticationException
- {
- delegate.connect( source, proxyInfo );
- }
-
- public void connect( Repository source, AuthenticationInfo authenticationInfo )
- throws ConnectionException, AuthenticationException
- {
- delegate.connect( source, authenticationInfo );
- }
-
- public void connect( Repository source, AuthenticationInfo authenticationInfo, ProxyInfo proxyInfo )
- throws ConnectionException, AuthenticationException
- {
- delegate.connect( source, authenticationInfo, proxyInfo );
- }
-
- public void openConnection()
- throws ConnectionException, AuthenticationException
- {
- delegate.openConnection();
- }
-
- public void disconnect()
- throws ConnectionException
- {
- delegate.disconnect();
- }
-
- public void addSessionListener( SessionListener listener )
- {
- delegate.addSessionListener( listener );
- }
-
- public void removeSessionListener( SessionListener listener )
- {
- delegate.removeSessionListener( listener );
- }
-
- public boolean hasSessionListener( SessionListener listener )
- {
- return delegate.hasSessionListener( listener );
- }
-
- public void addTransferListener( TransferListener listener )
- {
- delegate.addTransferListener( listener );
- }
-
- public void removeTransferListener( TransferListener listener )
- {
- delegate.removeTransferListener( listener );
- }
-
- public boolean hasTransferListener( TransferListener listener )
- {
- return delegate.hasTransferListener( listener );
- }
-
- public boolean isInteractive()
- {
- return delegate.isInteractive();
- }
-
- public void setInteractive( boolean interactive )
- {
- delegate.setInteractive( interactive );
- }
-
- public void setDelegate( Wagon delegate )
- {
- this.delegate = delegate;
- }
-
- void setContentToGet( String content )
- {
- contentToGet = content;
- }
-
- private void createIfMissing( File destination )
- {
- // since the mock won't actually copy a file, create an empty one to simulate file existence
- if ( !destination.exists() )
- {
- create( destination );
- }
- }
-
- private void create( File destination )
- {
- try
- {
- destination.getParentFile().mkdirs();
- if ( contentToGet == null )
- {
- destination.createNewFile();
- }
- else
- {
- FileUtils.writeStringToFile( new File( destination.getAbsolutePath() ), contentToGet, null );
- }
- }
- catch ( IOException e )
- {
- throw new RuntimeException( e.getMessage(), e );
- }
- }
-}
+++ /dev/null
-get-default-layout-present-1.0.jar\r
-(managed)\r
+++ /dev/null
-7dfb7ade9a8fa90bfbfac52d3090b8c2 *get-default-layout-present-1.0.jar
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-relocated-artefact</artifactId>
- <version>1.0</version>
-
- <distributionManagement>
- <relocation>
- <artifactId>get-default-layout-present</artifactId>
- </relocation>
- </distributionManagement>
-
-</project>
\ No newline at end of file
+++ /dev/null
-get-default-layout-1.0.jar
+++ /dev/null
-get-default-layout-present-1.0.jar\r
-(proxied)\r
+++ /dev/null
-get-bad-local-checksum-1.0.jar\r
-(managed)\r
-\r
+++ /dev/null
-invalid checksum file
\ No newline at end of file
+++ /dev/null
-invalid checksum file
\ No newline at end of file
+++ /dev/null
-066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-from-managed-repo-1.0.jar\r
+++ /dev/null
-get-default-layout-present-1.0.jar
-(managed)
-
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-default-layout-present-with-pom</artifactId>
- <version>1.0</version>
-</project>
+++ /dev/null
-get-default-layout-present-1.0.jar
-(managed)
-
+++ /dev/null
-7dfb7ade9a8fa90bfbfac52d3090b8c2 *get-default-layout-present-1.0.jar
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-doubly-relocated-artefact</artifactId>
- <version>1.0</version>
-
- <distributionManagement>
- <relocation>
- <artifactId>get-relocated-artefact</artifactId>
- </relocation>
- </distributionManagement>
-
-</project>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-merged-metadata</artifactId>
- <versioning>
- <versions>
- <version>0.9</version>
- <!-- unique -->
- <version>1.0</version>
- <!-- merged with proxied2 -->
- <version>2.0</version>
- <!-- merged with proxied1 -->
- </versions>
- </versioning>
-</metadata>
\ No newline at end of file
+++ /dev/null
-get-present-metadata-snapshot-1.0-20050831.101112-1.jar\r
-(managed)
\ No newline at end of file
+++ /dev/null
-get-present-timestamped-snapshot-1.0-SNAPSHOT.jar\r
-(managed)
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-relocated-artefact-with-pom</artifactId>
- <version>1.0</version>
-
- <distributionManagement>
- <relocation>
- <artifactId>get-default-layout-present-with-pom</artifactId>
- </relocation>
- </distributionManagement>
-
-</project>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project>
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-relocated-artefact</artifactId>
- <version>1.0</version>
-
- <distributionManagement>
- <relocation>
- <artifactId>get-default-layout-present</artifactId>
- </relocation>
- </distributionManagement>
-
-</project>
\ No newline at end of file
+++ /dev/null
-get-removed-from-proxies-1.0.jar
-(managed)
-
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
-
-</metadata>
\ No newline at end of file
+++ /dev/null
-<!--\r
- ~ Copyright 2005-2006 The Apache Software Foundation.\r
- ~\r
- ~ Licensed under the Apache License, Version 2.0 (the "License");\r
- ~ you may not use this file except in compliance with the License.\r
- ~ You may obtain a copy of the License at\r
- ~\r
- ~ http://www.apache.org/licenses/LICENSE-2.0\r
- ~\r
- ~ Unless required by applicable law or agreed to in writing, software\r
- ~ distributed under the License is distributed on an "AS IS" BASIS,\r
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- ~ See the License for the specific language governing permissions and\r
- ~ limitations under the License.\r
- -->\r
-\r
-<metadata>\r
- <groupId>org.apache.maven.test</groupId>\r
- <artifactId>get-updated-metadata</artifactId>\r
- <versioning>\r
- <versions>\r
- <version>1.0</version>\r
- </versions>\r
- </versioning>\r
-</metadata>\r
+++ /dev/null
-<!--\r
- ~ Copyright 2005-2006 The Apache Software Foundation.\r
- ~\r
- ~ Licensed under the Apache License, Version 2.0 (the "License");\r
- ~ you may not use this file except in compliance with the License.\r
- ~ You may obtain a copy of the License at\r
- ~\r
- ~ http://www.apache.org/licenses/LICENSE-2.0\r
- ~\r
- ~ Unless required by applicable law or agreed to in writing, software\r
- ~ distributed under the License is distributed on an "AS IS" BASIS,\r
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- ~ See the License for the specific language governing permissions and\r
- ~ limitations under the License.\r
- -->\r
-\r
-<metadata>\r
- <groupId>org.apache.maven.test</groupId>\r
- <artifactId>get-updated-metadata</artifactId>\r
- <version>1.0-SNAPSHOT</version>\r
- <versioning>\r
- <snapshot>\r
- <timestamp>20050831.1011112</timestamp>\r
- <buildNumber>1</buildNumber>\r
- </snapshot>\r
- </versioning>\r
-</metadata>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-updated-metadata</artifactId>
- <version>1.0-SNAPSHOT</version>
- <versioning>
- <snapshot>
- <timestamp>20050831.1011112</timestamp>
- <buildNumber>1</buildNumber>
- </snapshot>
- </versioning>
-</metadata>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-updated-metadata</artifactId>
- <versioning>
- <versions>
- <version>1.0</version>
- </versions>
- </versioning>
-</metadata>
\ No newline at end of file
+++ /dev/null
-get-bad-local-checksum-1.0.jar\r
-(proxied 1)\r
-\r
+++ /dev/null
-get-checksum-both-bad-1.0.jar\r
-\r
+++ /dev/null
-invalid checksum file
\ No newline at end of file
+++ /dev/null
-invalid checksum file\r
+++ /dev/null
-get-checksum-both-right-1.0.jar\r
+++ /dev/null
-e58f30c6a150a2e843552438d18e15cb *get-checksum-both-right-1.0.jar
\ No newline at end of file
+++ /dev/null
-066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar\r
+++ /dev/null
-066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-from-managed-repo-1.0.jar\r
-(proxied 1)\r
+++ /dev/null
-get-checksum-md5-bad-sha1-1.0.jar\r
-\r
+++ /dev/null
-8a02aa67549d27b2a03cd4547439c6d3 *get-checksum-md5-bad-sha1-1.0.jar
\ No newline at end of file
+++ /dev/null
-invalid checksum file\r
+++ /dev/null
-get-checksum-md5-only-1.0.jar
-
+++ /dev/null
-f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar\r
+++ /dev/null
-get-checksum-sha1-bad-md5-1.0.jar
-
+++ /dev/null
-invalid checksum file\r
+++ /dev/null
-3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar\r
+++ /dev/null
-get-checksum-sha1-only-1.0.jar
-
+++ /dev/null
-748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar
+++ /dev/null
-get-default-layout-present-1.0.jar
-(proxied 1)
-
+++ /dev/null
-get-default-layout-1.0.jar
-
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-default-metadata</artifactId>
- <version>1.0</version>
-</metadata>
\ No newline at end of file
+++ /dev/null
-get-in-both-proxies-1.0.jar
-(proxied 1)
-
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-merged-metadata</artifactId>
- <versioning>
- <versions>
- <version>2.0</version>
- <!-- merge with managed -->
- <version>3.0</version>
- <!-- merge with proxied2 -->
- <version>5.0</version>
- <!-- unique -->
- </versions>
- </versioning>
-</metadata>
\ No newline at end of file
+++ /dev/null
-get-metadata-snapshot-1.0-SNAPSHOT.jar
\ No newline at end of file
+++ /dev/null
-get-present-metadata-snapshot-1.0-20050831.101112-1.jar\r
-(proxied 1)
\ No newline at end of file
+++ /dev/null
-get-present-timestamped-snapshot-1.0-SNAPSHOT.jar\r
-(proxied 1)
\ No newline at end of file
+++ /dev/null
-get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar\r
-(proxied 1)
\ No newline at end of file
+++ /dev/null
-get-timestamped-snapshot-1.0-SNAPSHOT.jar
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-updated-metadata</artifactId>
- <version>1.0-SNAPSHOT</version>
- <versioning>
- <snapshot>
- <timestamp>20050831.111213</timestamp>
- <buildNumber>2</buildNumber>
- </snapshot>
- </versioning>
-</metadata>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-updated-metadata</artifactId>
- <versioning>
- <versions>
- <version>1.0</version>
- <version>2.0</version>
- </versions>
- </versioning>
-</metadata>
\ No newline at end of file
+++ /dev/null
-get-in-both-proxies-1.0.jar
-(proxied 2)
-
+++ /dev/null
-get-in-second-proxy-1.0.jar
-
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<metadata>
- <groupId>org.apache.maven.test</groupId>
- <artifactId>get-merged-metadata</artifactId>
- <versioning>
- <versions>
- <version>1.0</version>
- <!-- merged with managed -->
- <version>3.0</version>
- <!-- merged with proxied1 -->
- <version>4.0</version>
- <!-- unique -->
- </versions>
- </versioning>
-</metadata>
\ No newline at end of file
+++ /dev/null
-get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar\r
-(proxied 2)
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<component-set>
- <components>
- <component>
- <role>org.apache.maven.wagon.Wagon</role>
- <role-hint>test</role-hint>
- <implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
- </component>
- <component>
- <role>org.codehaus.plexus.logging.LoggerManager</role>
- <implementation>org.codehaus.plexus.logging.console.ConsoleLoggerManager</implementation>
- <lifecycle-handler>basic</lifecycle-handler>
- <configuration>
- <threshold>ERROR</threshold>
- </configuration>
- </component>
- </components>
-</component-set>