package org.apache.maven.archiva.indexer.search;
+import java.util.List;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* @param limits the limits to apply to the search results.
* @return the results.
*/
- public SearchResults searchForTerm( String term, SearchResultLimits limits );
+ public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term, SearchResultLimits limits );
/**
* Search for the specific bytecode across all repositories.
* @param limits the limits to apply to the search results.
* @return the results.
*/
- public SearchResults searchForBytecode( String term, SearchResultLimits limits );
+ public SearchResults searchForBytecode( String principal, List<String> selectedRepos, String term, SearchResultLimits limits );
/**
* Search for the specific checksum string across all repositories.
* @param limits the limits to apply to the search results.
* @return the results.
*/
- public SearchResults searchForChecksum( String checksum, SearchResultLimits limits );
+ public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum, SearchResultLimits limits );
}
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
+import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers;
import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers;
import org.apache.maven.archiva.indexer.functors.UserAllowedToSearchRepositoryPredicate;
implements CrossRepositorySearch, RegistryListener, Initializable
{
/**
- * @plexus.requirement role-hint="bytecode"
+ * @plexus.requirement role-hint="lucene"
*/
- private Transformer bytecodeIndexTransformer;
-
- /**
- * @plexus.requirement role-hint="filecontent"
- */
- private Transformer filecontentIndexTransformer;
-
- /**
- * @plexus.requirement role-hint="hashcodes"
- */
- private Transformer hashcodesIndexTransformer;
-
- /**
- * @plexus.requirement role-hint="searchable"
- */
- private Transformer searchableTransformer;
-
- /**
- * @plexus.requirement role-hint="index-exists"
- */
- private Predicate indexExistsPredicate;
-
+ private RepositoryContentIndexFactory indexFactory;
+
/**
* @plexus.requirement
*/
private ArchivaConfiguration configuration;
- private List localIndexedRepositories = new ArrayList();
+ private List<ManagedRepositoryConfiguration> localIndexedRepositories = new ArrayList<ManagedRepositoryConfiguration>();
- public SearchResults searchForChecksum( String checksum, SearchResultLimits limits )
+ public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum, SearchResultLimits limits )
{
- List indexes = getHashcodeIndexes();
+ List<RepositoryContentIndex> indexes = getHashcodeIndexes( principal, selectedRepos );
try
{
return new SearchResults();
}
- public SearchResults searchForBytecode( String term, SearchResultLimits limits )
+ public SearchResults searchForBytecode( String principal, List<String> selectedRepos, String term, SearchResultLimits limits )
{
- List indexes = getHashcodeIndexes();
+ List<RepositoryContentIndex> indexes = getHashcodeIndexes( principal, selectedRepos );
try
{
return new SearchResults();
}
- public SearchResults searchForTerm( String term, SearchResultLimits limits )
+ public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term, SearchResultLimits limits )
{
- List indexes = getFileContentIndexes();
+ List<RepositoryContentIndex> indexes = getFileContentIndexes( principal, selectedRepos );
try
{
return new SearchResults();
}
- private SearchResults searchAll( LuceneQuery luceneQuery, SearchResultLimits limits, List indexes )
+ private SearchResults searchAll( LuceneQuery luceneQuery, SearchResultLimits limits, List<RepositoryContentIndex> indexes )
{
org.apache.lucene.search.Query specificQuery = luceneQuery.getLuceneQuery();
// Setup the converter
LuceneEntryConverter converter = null;
- RepositoryContentIndex index = (RepositoryContentIndex) indexes.get( 0 );
+ RepositoryContentIndex index = indexes.get( 0 );
converter = index.getEntryConverter();
// Process indexes into an array of Searchables.
- List searchableList = new ArrayList( indexes );
- CollectionUtils.transform( searchableList, searchableTransformer );
+ List<Searchable> searchableList = toSearchables( indexes );
Searchable searchables[] = new Searchable[searchableList.size()];
searchableList.toArray( searchables );
return results;
}
- private Predicate getAllowedToSearchReposPredicate()
+ private List<Searchable> toSearchables( List<RepositoryContentIndex> indexes )
{
- return new UserAllowedToSearchRepositoryPredicate();
+ List<Searchable> searchableList = new ArrayList<Searchable>();
+ for ( RepositoryContentIndex contentIndex : indexes )
+ {
+ try
+ {
+ searchableList.add( contentIndex.getSearchable() );
+ }
+ catch ( RepositoryIndexSearchException e )
+ {
+ getLogger().warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :"
+ + e.getMessage(), e );
+ }
+ }
+ return searchableList;
}
- public List getBytecodeIndexes()
+ public List<RepositoryContentIndex> getBytecodeIndexes( String principal, List<String> selectedRepos )
{
- List ret = new ArrayList();
+ List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();
- synchronized ( this.localIndexedRepositories )
+ for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
- ret.addAll( CollectionUtils.select( this.localIndexedRepositories, getAllowedToSearchReposPredicate() ) );
- CollectionUtils.transform( ret, bytecodeIndexTransformer );
- CollectionUtils.filter( ret, indexExistsPredicate );
+ // Only used selected repo
+ if ( selectedRepos.contains( repoConfig.getId() ) )
+ {
+ RepositoryContentIndex index = indexFactory.createBytecodeIndex( repoConfig );
+ // If they exist.
+ if ( indexExists( index ) )
+ {
+ ret.add( index );
+ }
+ }
}
return ret;
}
- public List getFileContentIndexes()
+ public List<RepositoryContentIndex> getFileContentIndexes( String principal, List<String> selectedRepos )
{
- List ret = new ArrayList();
+ List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();
- synchronized ( this.localIndexedRepositories )
+ for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
- ret.addAll( CollectionUtils.select( this.localIndexedRepositories, getAllowedToSearchReposPredicate() ) );
- CollectionUtils.transform( ret, filecontentIndexTransformer );
- CollectionUtils.filter( ret, indexExistsPredicate );
+ // Only used selected repo
+ if ( selectedRepos.contains( repoConfig.getId() ) )
+ {
+ RepositoryContentIndex index = indexFactory.createFileContentIndex( repoConfig );
+ // If they exist.
+ if ( indexExists( index ) )
+ {
+ ret.add( index );
+ }
+ }
}
return ret;
}
- public List getHashcodeIndexes()
+ public List<RepositoryContentIndex> getHashcodeIndexes( String principal, List<String> selectedRepos )
{
- List ret = new ArrayList();
+ List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();
- synchronized ( this.localIndexedRepositories )
+ for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
- ret.addAll( CollectionUtils.select( this.localIndexedRepositories, getAllowedToSearchReposPredicate() ) );
- CollectionUtils.transform( ret, hashcodesIndexTransformer );
- CollectionUtils.filter( ret, indexExistsPredicate );
+ // Only used selected repo
+ if ( selectedRepos.contains( repoConfig.getId() ) )
+ {
+ RepositoryContentIndex index = indexFactory.createHashcodeIndex( repoConfig );
+ // If they exist.
+ if ( indexExists( index ) )
+ {
+ ret.add( index );
+ }
+ }
}
return ret;
}
+
+ private boolean indexExists( RepositoryContentIndex index )
+ {
+ try
+ {
+ return index.exists();
+ }
+ catch ( RepositoryIndexException e )
+ {
+ getLogger().info(
+ "Repository Content Index [" + index.getId() + "] for repository ["
+ + index.getRepository().getId() + "] does not exist yet in ["
+ + index.getIndexDirectory().getAbsolutePath() + "]." );
+ return false;
+ }
+ }
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
+++ /dev/null
-package org.apache.maven.archiva.indexer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import junit.framework.Test;
-import junit.framework.TestSuite;
-
-/**
- * AllTests - conveinence test suite for IDE users.
- *
- * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class AllTests
-{
-
- public static Test suite()
- {
- TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.indexer" );
- //$JUnit-BEGIN$
- suite.addTest( org.apache.maven.archiva.indexer.bytecode.AllTests.suite() );
- suite.addTest( org.apache.maven.archiva.indexer.hashcodes.AllTests.suite() );
- suite.addTest( org.apache.maven.archiva.indexer.query.AllTests.suite() );
- suite.addTest( org.apache.maven.archiva.indexer.search.AllTests.suite() );
- //$JUnit-END$
- return suite;
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.indexer.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import junit.framework.Test;
-import junit.framework.TestSuite;
-
-/**
- * AllTests - conveinence test suite for IDE users.
- *
- * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class AllTests
-{
- public static Test suite()
- {
- TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.indexer.search" );
- //$JUnit-BEGIN$
- suite.addTestSuite( DefaultCrossRepositorySearchTest.class );
- //$JUnit-END$
- return suite;
- }
-}
* under the License.
*/
-import junit.framework.AssertionFailedError;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecordLoader;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.io.File;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.Map;
+import java.util.Map.Entry;
+
+import junit.framework.AssertionFailedError;
/**
* BytecodeIndexPopulator
implements IndexPopulator
{
- public Map getObjectMap()
+ public Map<String,ArchivaArtifact> getObjectMap()
{
- Map dumps = new HashMap();
+ Map<String,ArchivaArtifact> dumps = new HashMap<String,ArchivaArtifact>();
// archiva-common-1.0.jar.txt
dumps.put( "archiva-common",
return artifact;
}
- public Map populate( File basedir )
+ public Map<String, BytecodeRecord> populate( File basedir )
{
- Map records = new HashMap();
+ Map<String, BytecodeRecord> records = new HashMap<String, BytecodeRecord>();
- Map artifactDumps = getObjectMap();
- for ( Iterator iter = artifactDumps.entrySet().iterator(); iter.hasNext(); )
+ for ( Entry<String, ArchivaArtifact> entry : getObjectMap().entrySet() )
{
- Map.Entry entry = (Map.Entry) iter.next();
- ArchivaArtifact artifact = (ArchivaArtifact) entry.getValue();
+ ArchivaArtifact artifact = entry.getValue();
File dumpFile = getDumpFile( basedir, artifact );
BytecodeRecord record = BytecodeRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );
import org.apache.maven.archiva.indexer.MockConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
+import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
+import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
+import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.util.FileUtils;
import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
import java.util.Map;
/**
private static final String TEST_DEFAULT_REPO_ID = "testDefaultRepo";
+ @Override
protected void setUp()
throws Exception
{
RepositoryContentIndex indexContents = indexFactory.createFileContentIndex( repository );
// Now populate them.
- Map hashcodesMap = ( new HashcodesIndexPopulator() ).populate( new File( getBasedir() ) );
+ Map<String, HashcodesRecord> hashcodesMap = new HashcodesIndexPopulator().populate( new File( getBasedir() ) );
indexHashcode.indexRecords( hashcodesMap.values() );
assertEquals( "Hashcode Key Count", hashcodesMap.size(), indexHashcode.getAllRecordKeys().size() );
assertRecordCount( indexHashcode, hashcodesMap.size() );
- Map bytecodeMap = ( new BytecodeIndexPopulator() ).populate( new File( getBasedir() ) );
+ Map<String, BytecodeRecord> bytecodeMap = new BytecodeIndexPopulator().populate( new File( getBasedir() ) );
indexBytecode.indexRecords( bytecodeMap.values() );
assertEquals( "Bytecode Key Count", bytecodeMap.size(), indexBytecode.getAllRecordKeys().size() );
assertRecordCount( indexBytecode, bytecodeMap.size() );
- Map contentMap = ( new FileContentIndexPopulator() ).populate( new File( getBasedir() ) );
+ Map<String, FileContentRecord> contentMap = new FileContentIndexPopulator().populate( new File( getBasedir() ) );
indexContents.indexRecords( contentMap.values() );
assertEquals( "File Content Key Count", contentMap.size(), indexContents.getAllRecordKeys().size() );
assertRecordCount( indexContents, contentMap.size() );
{
CrossRepositorySearch search = lookupCrossRepositorySearch();
- SearchResultLimits limits = new SearchResultLimits( 0 );
- limits.setPageSize( 20 );
-
- SearchResults results = search.searchForTerm( "org", limits );
- assertResults( 1, 7, results );
+ String expectedRepos[] = new String[] {
+ TEST_DEFAULT_REPO_ID
+ };
+
+ String expectedResults[] = new String[] {
+ "org","org2","org3","org4","org5","org6","org7"
+ };
+
+ assertSearchResults( expectedRepos, expectedResults, search, "org" );
}
public void testSearchTerm_Junit()
throws Exception
{
CrossRepositorySearch search = lookupCrossRepositorySearch();
-
- SearchResultLimits limits = new SearchResultLimits( 0 );
- limits.setPageSize( 20 );
-
- SearchResults results = search.searchForTerm( "junit", limits );
- assertResults( 1, 3, results );
+
+ String expectedRepos[] = new String[] {
+ TEST_DEFAULT_REPO_ID
+ };
+
+ String expectedResults[] = new String[] {
+ "junit","junit2","junit3"
+ };
+
+ assertSearchResults( expectedRepos, expectedResults, search, "junit" );
}
public void testSearchInvalidTerm()
{
CrossRepositorySearch search = lookupCrossRepositorySearch();
- SearchResultLimits limits = new SearchResultLimits( 0 );
- limits.setPageSize( 20 );
-
- SearchResults results = search.searchForTerm( "monosodium", limits );
- assertResults( 1, 0, results );
+ String expectedRepos[] = new String[] {
+ TEST_DEFAULT_REPO_ID
+ };
+
+ String expectedResults[] = new String[] {
+ // Nothing.
+ };
+
+ assertSearchResults( expectedRepos, expectedResults, search, "monosodium" );
}
-
- private void assertResults( int repoCount, int hitCount, SearchResults results )
+
+ private void assertSearchResults( String expectedRepos[], String expectedResults[], CrossRepositorySearch search, String term )
+ throws Exception
{
+ SearchResultLimits limits = new SearchResultLimits( 0 );
+ limits.setPageSize( 20 );
+
+ List<String> selectedRepos = new ArrayList<String>();
+ selectedRepos.addAll( Arrays.asList( expectedRepos ) );
+
+ SearchResults results = search.searchForTerm( "guest", selectedRepos, term, limits );
+
assertNotNull( "Search Results should not be null.", results );
- assertEquals( "Repository Hits", repoCount, results.getRepositories().size() );
+ assertEquals( "Repository Hits", expectedRepos.length, results.getRepositories().size() );
+ // TODO: test the repository ids returned.
- assertEquals( "Search Result Hits", hitCount, results.getHits().size() );
+ assertEquals( "Search Result Hits", expectedResults.length, results.getHits().size() );
+ // TODO: test the order of hits.
+ // TODO: test the value of the hits.
}
-
+
protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
{
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
+import org.apache.maven.archiva.model.ArchivaArtifact;
import java.io.File;
import java.io.IOException;
public class FileContentIndexPopulator
implements IndexPopulator
{
- public Map getObjectMap()
+ public Map<String, ArchivaArtifact> getObjectMap()
{
return null;
}
- public Map populate( File basedir )
+ public Map<String, FileContentRecord> populate( File basedir )
{
- Map map = new HashMap();
+ Map<String, FileContentRecord> map = new HashMap<String, FileContentRecord>();
File repoDir = new File( basedir, "src/test/managed-repository" );
package org.apache.maven.archiva.indexer.search;
-import junit.framework.AssertionFailedError;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecordLoader;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.io.File;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.Map;
+import java.util.Map.Entry;
+
+import junit.framework.AssertionFailedError;
+/**
+ * HashcodesIndexPopulator
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
public class HashcodesIndexPopulator
implements IndexPopulator
{
- public Map getObjectMap()
+ public Map<String, ArchivaArtifact> getObjectMap()
{
- Map dumps = new HashMap();
+ Map<String, ArchivaArtifact> dumps = new HashMap<String, ArchivaArtifact>();
// archiva-common-1.0.jar.txt
dumps.put( "archiva-common",
return dumps;
}
- public Map populate( File basedir )
+ public Map<String, HashcodesRecord> populate( File basedir )
{
- Map records = new HashMap();
+ Map<String, HashcodesRecord> records = new HashMap<String, HashcodesRecord>();
- Map artifactDumps = getObjectMap();
- for ( Iterator iter = artifactDumps.entrySet().iterator(); iter.hasNext(); )
+ for ( Entry<String, ArchivaArtifact> entry : getObjectMap().entrySet() )
{
- Map.Entry entry = (Map.Entry) iter.next();
- ArchivaArtifact artifact = (ArchivaArtifact) entry.getValue();
+ ArchivaArtifact artifact = entry.getValue();
File dumpFile = getDumpFile( basedir, artifact );
HashcodesRecord record = HashcodesRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );
* under the License.
*/
+import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+
import java.io.File;
import java.util.Map;
*/
public interface IndexPopulator
{
- public Map getObjectMap();
+ public Map<String, ArchivaArtifact> getObjectMap();
- public Map populate( File basedir );
+ public Map<String, ? extends LuceneRepositoryContentRecord> populate( File basedir );
}
<description>DefaultCrossRepositorySearch</description>
<requirements>
<requirement>
- <role>org.apache.commons.collections.Transformer</role>
- <role-hint>bytecode</role-hint>
- <field-name>bytecodeIndexTransformer</field-name>
- </requirement>
- <requirement>
- <role>org.apache.commons.collections.Transformer</role>
- <role-hint>filecontent</role-hint>
- <field-name>filecontentIndexTransformer</field-name>
- </requirement>
- <requirement>
- <role>org.apache.commons.collections.Transformer</role>
- <role-hint>hashcodes</role-hint>
- <field-name>hashcodesIndexTransformer</field-name>
- </requirement>
- <requirement>
- <role>org.apache.commons.collections.Transformer</role>
- <role-hint>searchable</role-hint>
- <field-name>searchableTransformer</field-name>
- </requirement>
- <requirement>
- <role>org.apache.commons.collections.Predicate</role>
- <role-hint>index-exists</role-hint>
- <field-name>indexExistsPredicate</field-name>
+ <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
+ <role-hint>lucene</role-hint>
+ <field-name>indexFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
import org.apache.maven.archiva.indexer.search.CrossRepositorySearch;
import org.apache.maven.archiva.indexer.search.SearchResultLimits;
import org.apache.maven.archiva.indexer.search.SearchResults;
+import org.apache.maven.archiva.security.AccessDeniedException;
+import org.apache.maven.archiva.security.ArchivaSecurityException;
+import org.apache.maven.archiva.security.ArchivaUser;
+import org.apache.maven.archiva.security.PrincipalNotFoundException;
+import org.apache.maven.archiva.security.UserRepositories;
import org.codehaus.plexus.xwork.action.PlexusActionSupport;
import java.net.MalformedURLException;
+import java.util.Collections;
import java.util.List;
/**
* @plexus.requirement role-hint="default"
*/
private CrossRepositorySearch crossRepoSearch;
+
+ /**
+ * @plexus.requirement
+ */
+ private UserRepositories userRepositories;
+
+ /**
+ * @plexus.requirement role-hint="xwork"
+ */
+ private ArchivaUser archivaUser;
private static final String RESULTS = "results";
SearchResultLimits limits = new SearchResultLimits( 0 );
- results = crossRepoSearch.searchForTerm( q, limits );
+ results = crossRepoSearch.searchForTerm( getPrincipal(), getObservableRepos(), q, limits );
if ( results.isEmpty() )
{
// 1 hit? return it's information directly!
return ARTIFACT;
}
- else
- {
- return RESULTS;
- }
+
+ return RESULTS;
}
+ @Override
public String doInput()
{
return INPUT;
}
+
+ private String getPrincipal()
+ {
+ return archivaUser.getActivePrincipal();
+ }
+
+ private List<String> getObservableRepos()
+ {
+ try
+ {
+ return userRepositories.getObservableRepositoryIds( getPrincipal() );
+ }
+ catch ( PrincipalNotFoundException e )
+ {
+ getLogger().warn( e.getMessage(), e );
+ }
+ catch ( AccessDeniedException e )
+ {
+ getLogger().warn( e.getMessage(), e );
+ // TODO: pass this onto the screen.
+ }
+ catch ( ArchivaSecurityException e )
+ {
+ getLogger().warn( e.getMessage(), e );
+ }
+ return Collections.emptyList();
+ }
public String getQ()
{