Browse Source

MRM-1081 - Archiva model should be updated so that the ARTIFACT table includes repositoryId as a primary key

* Updated so that repositoryId is part of the primary key for an artifact
* removed old search API classes as they needed to be brought over to the new API and doing so would be a waste of time



git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@742859 13f79535-47bb-0310-9956-ffa450edef68
tags/archiva-1.2
James William Dumay 15 years ago
parent
commit
6267a21d29
100 changed files with 261 additions and 7149 deletions
  1. 1
    1
      archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurge.java
  2. 1
    1
      archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/CleanupReleasedSnapshotsRepositoryPurge.java
  3. 1
    1
      archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurgeTest.java
  4. 1
    4
      archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/CleanupReleasedSnapshotsRepositoryPurgeTest.java
  5. 2
    1
      archiva-modules/archiva-base/archiva-consumers/archiva-database-consumers/src/main/java/org/apache/maven/archiva/consumers/database/ArtifactUpdateDatabaseConsumer.java
  6. 0
    1
      archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/main/java/org/apache/archiva/consumers/lucene/NexusIndexerConsumer.java
  7. 24
    35
      archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/main/java/org/apache/maven/archiva/consumers/lucene/LuceneCleanupRemoveIndexedConsumer.java
  8. 76
    76
      archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/maven/archiva/consumers/lucene/LuceneCleanupRemoveIndexedConsumerTest.java
  9. 0
    53
      archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/maven/archiva/consumers/lucene/stubs/LuceneRepositoryContentIndexFactoryStub.java
  10. 0
    144
      archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/maven/archiva/consumers/lucene/stubs/LuceneRepositoryContentIndexStub.java
  11. 0
    18
      archiva-modules/archiva-base/archiva-indexer/pom.xml
  12. 0
    44
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/ArtifactKeys.java
  13. 0
    151
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryContentIndex.java
  14. 0
    70
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryContentIndexEventListener.java
  15. 0
    53
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryContentIndexFactory.java
  16. 0
    72
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeAnalyzer.java
  17. 0
    113
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeEntryConverter.java
  18. 0
    76
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeHandlers.java
  19. 0
    41
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeKeys.java
  20. 0
    177
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeRecord.java
  21. 0
    65
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentAnalyzer.java
  22. 0
    96
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentConverter.java
  23. 0
    78
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentHandlers.java
  24. 0
    35
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentKeys.java
  25. 0
    119
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentRecord.java
  26. 0
    62
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesAnalyzer.java
  27. 0
    90
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesEntryConverter.java
  28. 0
    75
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesHandlers.java
  29. 0
    36
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesKeys.java
  30. 0
    138
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesRecord.java
  31. 0
    154
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneDocumentMaker.java
  32. 0
    49
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneEntryConverter.java
  33. 0
    59
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneIndexHandlers.java
  34. 0
    42
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneQuery.java
  35. 0
    447
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryContentIndex.java
  36. 0
    102
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryContentIndexFactory.java
  37. 0
    41
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryContentRecord.java
  38. 0
    45
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/ArtifactIdTokenizer.java
  39. 0
    57
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/ClassnameTokenizer.java
  40. 0
    50
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/FilenamesTokenizer.java
  41. 0
    50
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/GroupIdTokenizer.java
  42. 0
    49
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/VersionTokenizer.java
  43. 0
    107
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/CompoundQuery.java
  44. 0
    102
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/CompoundQueryTerm.java
  45. 0
    28
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/Query.java
  46. 0
    63
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/QueryTerm.java
  47. 0
    151
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/RangeQuery.java
  48. 0
    64
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/SingleTermQuery.java
  49. 0
    88
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/CrossRepositorySearch.java
  50. 0
    491
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/DefaultCrossRepositorySearch.java
  51. 0
    96
      archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/SearchResults.java
  52. 0
    190
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/AbstractIndexCreationTestCase.java
  53. 0
    235
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/AbstractIndexerTestCase.java
  54. 0
    194
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/AbstractSearchTestCase.java
  55. 0
    167
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/RepositoryContentIndexEventListenerTest.java
  56. 0
    43
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/bytecode/AllTests.java
  57. 0
    65
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/bytecode/BytecodeIndexTest.java
  58. 0
    136
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/bytecode/BytecodeRecordLoader.java
  59. 0
    328
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/bytecode/BytecodeSearchTest.java
  60. 0
    43
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/hashcodes/AllTests.java
  61. 0
    65
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesIndexTest.java
  62. 0
    105
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesRecordLoader.java
  63. 0
    288
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesSearchTest.java
  64. 0
    42
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/query/AllTests.java
  65. 0
    158
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/query/QueryTest.java
  66. 0
    143
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/BytecodeIndexPopulator.java
  67. 0
    327
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/DefaultCrossRepositorySearchTest.java
  68. 0
    102
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/FileContentIndexPopulator.java
  69. 0
    138
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/HashcodesIndexPopulator.java
  70. 0
    38
      archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/IndexPopulator.java
  71. 32
    1
      archiva-modules/archiva-base/archiva-model/src/main/java/org/apache/maven/archiva/model/AbstractArtifactKey.java
  72. 15
    3
      archiva-modules/archiva-base/archiva-model/src/main/java/org/apache/maven/archiva/model/ArchivaArtifact.java
  73. 19
    3
      archiva-modules/archiva-base/archiva-model/src/main/mdo/archiva-base.xml
  74. 1
    1
      archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/project/resolvers/ManagedRepositoryProjectResolver.java
  75. 2
    2
      archiva-modules/archiva-database/src/main/java/org/apache/maven/archiva/database/ArtifactDAO.java
  76. 1
    3
      archiva-modules/archiva-database/src/main/java/org/apache/maven/archiva/database/RepositoryDatabaseEventListener.java
  77. 30
    29
      archiva-modules/archiva-database/src/main/java/org/apache/maven/archiva/database/browsing/DefaultRepositoryBrowsing.java
  78. 5
    4
      archiva-modules/archiva-database/src/main/java/org/apache/maven/archiva/database/jdo/JdoArtifactDAO.java
  79. 3
    2
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/AbstractArchivaDatabaseTestCase.java
  80. 9
    4
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/RepositoryDatabaseEventListenerTest.java
  81. 2
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/browsing/RepositoryBrowsingTest.java
  82. 2
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactVersionsConstraintTest.java
  83. 2
    2
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactsByChecksumConstraintTest.java
  84. 1
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactsByRepositoryConstraintTest.java
  85. 2
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactsProcessedConstraintTest.java
  86. 1
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactsRelatedConstraintTest.java
  87. 1
    0
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/MostRecentRepositoryScanStatisticsTest.java
  88. 1
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/OlderArtifactsByAgeConstraintTest.java
  89. 1
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/OlderSnapshotArtifactsByAgeConstraintTest.java
  90. 7
    6
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ProjectsByArtifactUsageConstraintTest.java
  91. 2
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/RecentArtifactsByAgeConstraintTest.java
  92. 1
    0
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/RepositoryContentStatisticsByRepositoryConstraintTest.java
  93. 1
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/UniqueArtifactIdConstraintTest.java
  94. 1
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/UniqueFieldConstraintTest.java
  95. 1
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/UniqueGroupIdConstraintTest.java
  96. 1
    1
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/UniqueVersionConstraintTest.java
  97. 5
    4
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/jdo/JdoArtifactDAOTest.java
  98. 4
    3
      archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/updater/DatabaseUpdaterTest.java
  99. 2
    2
      archiva-modules/archiva-reporting/archiva-artifact-reports/src/test/java/org/apache/maven/archiva/reporting/artifact/DuplicateArtifactReportTest.java
  100. 0
    0
      archiva-modules/archiva-reporting/archiva-report-manager/src/test/java/org/apache/maven/archiva/reporting/SimpleRepositoryStatisticsReportGeneratorTest.java

+ 1
- 1
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurge.java View File

@@ -89,7 +89,7 @@ public abstract class AbstractRepositoryPurge
ArchivaArtifact artifact =
new ArchivaArtifact( reference.getGroupId(), reference.getArtifactId(), reference.getVersion(),
reference.getClassifier(), reference.getType() );
reference.getClassifier(), reference.getType(), repository.getId() );
for ( RepositoryListener listener : listeners )
{

+ 1
- 1
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/maven/archiva/consumers/core/repository/CleanupReleasedSnapshotsRepositoryPurge.java View File

@@ -165,7 +165,7 @@ public class CleanupReleasedSnapshotsRepositoryPurge
ArchivaArtifact artifact =
new ArchivaArtifact( artifactRef.getGroupId(), artifactRef.getArtifactId(), artifactRef.getVersion(),
artifactRef.getClassifier(), artifactRef.getType() );
artifactRef.getClassifier(), artifactRef.getType(), repository.getId() );
for ( String version : snapshotVersions )
{

+ 1
- 1
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/AbstractRepositoryPurgeTest.java View File

@@ -145,6 +145,6 @@ public abstract class AbstractRepositoryPurgeTest

protected ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
{
return new ArchivaArtifact( groupId, artifactId, version, null, type );
return new ArchivaArtifact( groupId, artifactId, version, null, type, TEST_REPO_ID );
}
}

+ 1
- 4
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/test/java/org/apache/maven/archiva/consumers/core/repository/CleanupReleasedSnapshotsRepositoryPurgeTest.java View File

@@ -39,17 +39,14 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
{
private ArchivaConfiguration archivaConfiguration;

private MockControl listenerControl;
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO =
"org/apache/archiva/released-artifact-in-diff-repo/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar";
public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO = "org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar";

public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO = "org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar";

private RepositoryListener listener;
@Override
protected void setUp()
throws Exception
{

+ 2
- 1
archiva-modules/archiva-base/archiva-consumers/archiva-database-consumers/src/main/java/org/apache/maven/archiva/consumers/database/ArtifactUpdateDatabaseConsumer.java View File

@@ -227,7 +227,8 @@ public class ArtifactUpdateDatabaseConsumer
artifact.getArtifactId(),
artifact.getVersion(),
artifact.getClassifier(),
artifact.getType() );
artifact.getType(),
repository.getId());

return liveArtifact;
}

+ 0
- 1
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/main/java/org/apache/archiva/consumers/lucene/NexusIndexerConsumer.java View File

@@ -105,7 +105,6 @@ public class NexusIndexerConsumer
File indexDirectory = null;
if( indexDir != null && !"".equals( indexDir ) )
{
//indexDirectory = new File( managedRepository, repository.getIndexDir() );
indexDirectory = new File( repository.getIndexDir() );
}
else

+ 24
- 35
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/main/java/org/apache/maven/archiva/consumers/lucene/LuceneCleanupRemoveIndexedConsumer.java View File

@@ -22,12 +22,6 @@ package org.apache.maven.archiva.consumers.lucene;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
@@ -56,11 +50,11 @@ public class LuceneCleanupRemoveIndexedConsumer
* @plexus.configuration default-value="Remove indexed content if not present on filesystem."
*/
private String description;
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory repoIndexFactory;
//
// /**
// * @plexus.requirement role-hint="lucene"
// */
// private RepositoryContentIndexFactory repoIndexFactory;

/**
* @plexus.requirement
@@ -97,32 +91,32 @@ public class LuceneCleanupRemoveIndexedConsumer
if( !file.exists() )
{
RepositoryContentIndex bytecodeIndex = repoIndexFactory.createBytecodeIndex( repoContent.getRepository() );
RepositoryContentIndex hashcodesIndex = repoIndexFactory.createHashcodeIndex( repoContent.getRepository() );
RepositoryContentIndex fileContentIndex =
repoIndexFactory.createFileContentIndex( repoContent.getRepository() );
FileContentRecord fileContentRecord = new FileContentRecord();
fileContentRecord.setFilename( repoContent.toPath( artifact ) );
fileContentIndex.deleteRecord( fileContentRecord );
HashcodesRecord hashcodesRecord = new HashcodesRecord();
hashcodesRecord.setArtifact( artifact );
hashcodesIndex.deleteRecord( hashcodesRecord );
// RepositoryContentIndex bytecodeIndex = repoIndexFactory.createBytecodeIndex( repoContent.getRepository() );
// RepositoryContentIndex hashcodesIndex = repoIndexFactory.createHashcodeIndex( repoContent.getRepository() );
// RepositoryContentIndex fileContentIndex =
// repoIndexFactory.createFileContentIndex( repoContent.getRepository() );
BytecodeRecord bytecodeRecord = new BytecodeRecord();
bytecodeRecord.setArtifact( artifact );
bytecodeIndex.deleteRecord( bytecodeRecord );
// FileContentRecord fileContentRecord = new FileContentRecord();
// fileContentRecord.setFilename( repoContent.toPath( artifact ) );
// fileContentIndex.deleteRecord( fileContentRecord );
//
// HashcodesRecord hashcodesRecord = new HashcodesRecord();
// hashcodesRecord.setArtifact( artifact );
// hashcodesIndex.deleteRecord( hashcodesRecord );
//
// BytecodeRecord bytecodeRecord = new BytecodeRecord();
// bytecodeRecord.setArtifact( artifact );
// bytecodeIndex.deleteRecord( bytecodeRecord );
}
}
catch ( RepositoryException e )
{
throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
}
catch ( RepositoryIndexException e )
{
throw new ConsumerException( e.getMessage() );
}
// catch ( RepositoryIndexException e )
// {
// throw new ConsumerException( e.getMessage() );
// }
}

public String getDescription()
@@ -140,11 +134,6 @@ public class LuceneCleanupRemoveIndexedConsumer
return false;
}

public void setRepositoryIndexFactory( RepositoryContentIndexFactory repoIndexFactory )
{
this.repoIndexFactory = repoIndexFactory;
}

public void setRepositoryContentFactory( RepositoryContentFactory repoFactory )
{
this.repoFactory = repoFactory;

+ 76
- 76
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/maven/archiva/consumers/lucene/LuceneCleanupRemoveIndexedConsumerTest.java View File

@@ -1,76 +1,76 @@
package org.apache.maven.archiva.consumers.lucene;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaArtifactModel;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
/**
* LuceneCleanupRemoveIndexedConsumerTest
*
* @version
*/
public class LuceneCleanupRemoveIndexedConsumerTest
extends PlexusInSpringTestCase
{
private DatabaseCleanupConsumer luceneCleanupRemoveIndexConsumer;
public void setUp()
throws Exception
{
super.setUp();
luceneCleanupRemoveIndexConsumer = (DatabaseCleanupConsumer)
lookup( DatabaseCleanupConsumer.class, "lucene-cleanup" );
}
public void testIfArtifactExists()
throws Exception
{
ArchivaArtifact artifact = createArtifact(
"org.apache.maven.archiva", "archiva-lucene-cleanup", "1.0", "jar" );
luceneCleanupRemoveIndexConsumer.processArchivaArtifact( artifact );
}
public void testIfArtifactDoesNotExist()
throws Exception
{
ArchivaArtifact artifact = createArtifact(
"org.apache.maven.archiva", "deleted-artifact", "1.0", "jar" );
luceneCleanupRemoveIndexConsumer.processArchivaArtifact( artifact );
}
private ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
{
ArchivaArtifactModel model = new ArchivaArtifactModel();
model.setGroupId( groupId );
model.setArtifactId( artifactId );
model.setVersion( version );
model.setType( type );
model.setRepositoryId( "test-repo" );
return new ArchivaArtifact( model );
}
}
//package org.apache.maven.archiva.consumers.lucene;
//
///*
// * Licensed to the Apache Software Foundation (ASF) under one
// * or more contributor license agreements. See the NOTICE file
// * distributed with this work for additional information
// * regarding copyright ownership. The ASF licenses this file
// * to you under the Apache License, Version 2.0 (the
// * "License"); you may not use this file except in compliance
// * with the License. You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing,
// * software distributed under the License is distributed on an
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// * KIND, either express or implied. See the License for the
// * specific language governing permissions and limitations
// * under the License.
// */
//
//import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
//import org.apache.maven.archiva.model.ArchivaArtifact;
//import org.apache.maven.archiva.model.ArchivaArtifactModel;
//import org.codehaus.plexus.spring.PlexusInSpringTestCase;
//
///**
// * LuceneCleanupRemoveIndexedConsumerTest
// *
// * @version
// */
//public class LuceneCleanupRemoveIndexedConsumerTest
// extends PlexusInSpringTestCase
//{
// private DatabaseCleanupConsumer luceneCleanupRemoveIndexConsumer;
//
// public void setUp()
// throws Exception
// {
// super.setUp();
//
// luceneCleanupRemoveIndexConsumer = (DatabaseCleanupConsumer)
// lookup( DatabaseCleanupConsumer.class, "lucene-cleanup" );
// }
//
// public void testIfArtifactExists()
// throws Exception
// {
// ArchivaArtifact artifact = createArtifact(
// "org.apache.maven.archiva", "archiva-lucene-cleanup", "1.0", "jar" );
//
// luceneCleanupRemoveIndexConsumer.processArchivaArtifact( artifact );
// }
//
// public void testIfArtifactDoesNotExist()
// throws Exception
// {
// ArchivaArtifact artifact = createArtifact(
// "org.apache.maven.archiva", "deleted-artifact", "1.0", "jar" );
//
// luceneCleanupRemoveIndexConsumer.processArchivaArtifact( artifact );
// }
//
// private ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
// {
// ArchivaArtifactModel model = new ArchivaArtifactModel();
// model.setGroupId( groupId );
// model.setArtifactId( artifactId );
// model.setVersion( version );
// model.setType( type );
// model.setRepositoryId( "test-repo" );
//
// return new ArchivaArtifact( model );
// }
//
//}

+ 0
- 53
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/maven/archiva/consumers/lucene/stubs/LuceneRepositoryContentIndexFactoryStub.java View File

@@ -1,53 +0,0 @@
package org.apache.maven.archiva.consumers.lucene.stubs;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;

/**
* LuceneRepositoryContenIndexFactoryStub
*
* @version
*/
public class LuceneRepositoryContentIndexFactoryStub
implements RepositoryContentIndexFactory
{

public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}

public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}

public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}

}

+ 0
- 144
archiva-modules/archiva-base/archiva-consumers/archiva-lucene-consumers/src/test/java/org/apache/maven/archiva/consumers/lucene/stubs/LuceneRepositoryContentIndexStub.java View File

@@ -1,144 +0,0 @@
package org.apache.maven.archiva.consumers.lucene.stubs;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.io.File;
import java.util.Collection;

import junit.framework.Assert;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Searchable;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;

/**
* @version
*/
public class LuceneRepositoryContentIndexStub
implements RepositoryContentIndex
{

public void deleteRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
Assert.assertEquals( 2, records.size() );
}

public boolean exists()
throws RepositoryIndexException
{
// TODO Auto-generated method stub
return false;
}

public Collection getAllRecordKeys()
throws RepositoryIndexException
{
// TODO Auto-generated method stub
return null;
}

public Analyzer getAnalyzer()
{
// TODO Auto-generated method stub
return null;
}

public LuceneEntryConverter getEntryConverter()
{
// TODO Auto-generated method stub
return null;
}

public String getId()
{
// TODO Auto-generated method stub
return null;
}

public File getIndexDirectory()
{
// TODO Auto-generated method stub
return null;
}

public QueryParser getQueryParser()
{
// TODO Auto-generated method stub
return null;
}

public ManagedRepositoryConfiguration getRepository()
{
// TODO Auto-generated method stub
return null;
}

public Searchable getSearchable()
throws RepositoryIndexSearchException
{
// TODO Auto-generated method stub
return null;
}

public void indexRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub

}

public void modifyRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
// TODO Auto-generated method stub

}

public void modifyRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub

}
public void deleteRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
Assert.assertNotNull( record );
// fail since the record to be deleted should only be the deleted-artifact-1.0.jar
// according to the tests
if( record.getPrimaryKey().equals(
"org/apache/maven/archiva/archiva-lucene-cleanup/1.0/archiva-lucene-cleanup-1.0.jar" ) &&
record.getPrimaryKey().equals( "org.apache.maven.archiva:archiva-lucene-cleanup:1.0:jar" ) )
{
Assert.fail();
}
}

}

+ 0
- 18
archiva-modules/archiva-base/archiva-indexer/pom.xml View File

@@ -83,24 +83,6 @@
</check>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-maven-plugin</artifactId>
<executions>
<execution>
<id>merge</id>
<goals>
<goal>merge-descriptors</goal>
</goals>
<configuration>
<descriptors>
<descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
<descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
</descriptors>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

+ 0
- 44
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/ArtifactKeys.java View File

@@ -1,44 +0,0 @@
package org.apache.maven.archiva.indexer;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/**
* ArtifactKeys
*
* @version $Id$
*/
public class ArtifactKeys
{
public static final String GROUPID = "groupId";

public static final String GROUPID_EXACT = GROUPID + "_u";

public static final String ARTIFACTID = "artifactId";

public static final String ARTIFACTID_EXACT = ARTIFACTID + "_u";

public static final String VERSION = "version";

public static final String VERSION_EXACT = VERSION + "_u";

public static final String TYPE = "type";

public static final String CLASSIFIER = "classifier";
}

+ 0
- 151
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryContentIndex.java View File

@@ -1,151 +0,0 @@
package org.apache.maven.archiva.indexer;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Searchable;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;

import java.io.File;
import java.util.Collection;

/**
* Common access methods for a Repository Content index.
*
*/
public interface RepositoryContentIndex
{
/**
* Indexes the records.
*
* @param records list of {@link LuceneRepositoryContentRecord} objects.
* @throws RepositoryIndexException if there is a problem indexing the records.
*/
void indexRecords( Collection records )
throws RepositoryIndexException;

/**
* Modify (potentially) existing records in the index.
*
* @param records the collection of {@link LuceneRepositoryContentRecord} objects to modify in the index.
* @throws RepositoryIndexException if there is a problem modifying the records.
*/
public void modifyRecords( Collection records )
throws RepositoryIndexException;

/**
* Modify an existing (potential) record in the index.
*
* @param record the record to modify.
* @throws RepositoryIndexException if there is a problem modifying the record.
*/
public void modifyRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException;

/**
* Check if the index already exists.
*
* @return true if the index already exists
* @throws RepositoryIndexException if the index location is not valid
*/
boolean exists()
throws RepositoryIndexException;

/**
* Delete records from the index. Simply ignore the request any did not exist.
*
* @param records the records to delete
* @throws RepositoryIndexException if there is a problem removing the record
*/
void deleteRecords( Collection records )
throws RepositoryIndexException;
/**
* Delete a record from the index. Simply ignore the request any did not exist.
*
* @param record the record to be deleted
* @throws RepositoryIndexException if there is a problem removing the record
*/
void deleteRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException;

/**
* Retrieve all primary keys of records in the index.
*
* @return the keys
* @throws RepositoryIndexException if there was an error searching the index
*/
Collection getAllRecordKeys()
throws RepositoryIndexException;

/**
* Get the index directory.
*
* @return the index directory.
*/
File getIndexDirectory();

/**
* Get the {@link QueryParser} appropriate for searches within this index.
*
* @return the query parser;
*/
QueryParser getQueryParser();

/**
* Get the id of index.
*
* @return the id of index.
*/
String getId();

/**
* Get the repository that this index belongs to.
*
* @return the repository that this index belongs to.
*/
ManagedRepositoryConfiguration getRepository();

/**
* Get the analyzer in use for this index.
*
* @return the analyzer in use.
*/
Analyzer getAnalyzer();

/**
* Get the document to record (and back again) converter.
*
* @return the converter in use.
*/
LuceneEntryConverter getEntryConverter();

/**
* Create a Searchable for this index.
*
* @return the Searchable.
* @throws RepositoryIndexSearchException if there was a problem creating the searchable.
*/
Searchable getSearchable()
throws RepositoryIndexSearchException;
}

+ 0
- 70
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryContentIndexEventListener.java View File

@@ -1,70 +0,0 @@
package org.apache.maven.archiva.indexer;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;

/**
* Process repository management events and respond appropriately.
*
* @todo creating index instances every time is inefficient, the plugin needs to have a repository context to operate in
* @plexus.component role="org.apache.maven.archiva.repository.events.RepositoryListener" role-hint="indexer"
*/
public class RepositoryContentIndexEventListener
implements RepositoryListener
{
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;

public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
{
try
{
RepositoryContentIndex index = indexFactory.createFileContentIndex( repository.getRepository() );
FileContentRecord fileContentRecord = new FileContentRecord();
fileContentRecord.setRepositoryId( repository.getRepository().getId() );
fileContentRecord.setFilename( repository.toPath( artifact ) );
index.deleteRecord( fileContentRecord );

index = indexFactory.createHashcodeIndex( repository.getRepository() );
HashcodesRecord hashcodesRecord = new HashcodesRecord();
fileContentRecord.setRepositoryId( repository.getRepository().getId() );
hashcodesRecord.setArtifact( artifact );
index.deleteRecord( hashcodesRecord );

index = indexFactory.createBytecodeIndex( repository.getRepository() );
BytecodeRecord bytecodeRecord = new BytecodeRecord();
fileContentRecord.setRepositoryId( repository.getRepository().getId() );
bytecodeRecord.setArtifact( artifact );
index.deleteRecord( bytecodeRecord );
}
catch ( RepositoryIndexException e )
{
// Ignore
}
}
}

+ 0
- 53
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryContentIndexFactory.java View File

@@ -1,53 +0,0 @@
package org.apache.maven.archiva.indexer;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;

/**
* Obtain an index instance.
*
*/
public interface RepositoryContentIndexFactory
{
/**
* Method to create an instance of the bytecode index.
*
* @param repository the repository to create the content index from.
* @return the index instance
*/
RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository );
/**
* Method to create an instance of the file content index.
*
* @param repository the repository to create the file content index from.
* @return the index instance
*/
RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository );

/**
* Method to create an instance of the hashcode index.
*
* @param repository the repository to create the content index from.
* @return the index instance
*/
RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository );
}

+ 0
- 72
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeAnalyzer.java View File

@@ -1,72 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopAnalyzer;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.lucene.analyzers.ClassnameTokenizer;
import org.apache.maven.archiva.indexer.lucene.analyzers.FilenamesTokenizer;
import org.apache.maven.archiva.indexer.lucene.analyzers.GroupIdTokenizer;
import org.apache.maven.archiva.indexer.lucene.analyzers.VersionTokenizer;

import java.io.Reader;

/**
* BytecodeAnalyzer
*
* @version $Id$
*/
public class BytecodeAnalyzer extends Analyzer
{
private static final Analyzer STANDARD = new StandardAnalyzer();

public TokenStream tokenStream( String field, Reader reader )
{
TokenStream tokenStream = null;

if ( BytecodeKeys.CLASSES.equals( field ) )
{
tokenStream = new ClassnameTokenizer( reader );
}
else if ( BytecodeKeys.FILES.equals( field ) )
{
tokenStream = new FilenamesTokenizer( reader );
}
else if ( ArtifactKeys.GROUPID.equals( field ) )
{
tokenStream = new GroupIdTokenizer( reader );
}
else if ( ArtifactKeys.VERSION.equals( field ) )
{
tokenStream = new VersionTokenizer( reader );
}
else
{
tokenStream = STANDARD.tokenStream( field, reader );
}
return new LowerCaseFilter( new StopFilter( tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS ) );
}
}

+ 0
- 113
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeEntryConverter.java View File

@@ -1,113 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.document.Document;
import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.lucene.LuceneDocumentMaker;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.platform.JavaArtifactHelper;

import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
* Converter for Bytecode records and documents.
*
* @version $Id$
*/
public class BytecodeEntryConverter implements LuceneEntryConverter
{

public Document convert( LuceneRepositoryContentRecord record )
{
if ( !( record instanceof BytecodeRecord ) )
{
throw new ClassCastException( "Unable to convert type " + record.getClass().getName() + " to "
+ BytecodeRecord.class.getName() + "." );
}

BytecodeRecord bytecode = (BytecodeRecord) record;

LuceneDocumentMaker doc = new LuceneDocumentMaker( bytecode );

// Artifact Reference
doc.addFieldTokenized( ArtifactKeys.GROUPID, bytecode.getArtifact().getGroupId() );
doc.addFieldExact( ArtifactKeys.GROUPID_EXACT, bytecode.getArtifact().getGroupId() );
doc.addFieldTokenized( ArtifactKeys.ARTIFACTID, bytecode.getArtifact().getArtifactId() );
doc.addFieldExact( ArtifactKeys.ARTIFACTID_EXACT, bytecode.getArtifact().getArtifactId() );
doc.addFieldTokenized( ArtifactKeys.VERSION, bytecode.getArtifact().getVersion() );
doc.addFieldExact( ArtifactKeys.VERSION_EXACT, bytecode.getArtifact().getVersion() );
doc.addFieldTokenized( ArtifactKeys.TYPE, bytecode.getArtifact().getType() );
doc.addFieldUntokenized( ArtifactKeys.CLASSIFIER, bytecode.getArtifact().getClassifier() );

// Bytecode Specifics
doc.addFieldExact( BytecodeKeys.JDK, JavaArtifactHelper.getJavaDetails( bytecode.getArtifact() ).getJdk() );
doc.addFieldTokenized( BytecodeKeys.CLASSES, bytecode.getClasses() );
doc.addFieldTokenized( BytecodeKeys.METHODS, bytecode.getMethods() );
doc.addFieldTokenized( BytecodeKeys.FILES, bytecode.getFiles() );

return doc.getDocument();
}

public LuceneRepositoryContentRecord convert( Document document ) throws ParseException
{
BytecodeRecord record = new BytecodeRecord();

record.setRepositoryId( document.get( LuceneDocumentMaker.REPOSITORY_ID ) );
// Artifact Reference
String groupId = document.get( ArtifactKeys.GROUPID );
String artifactId = document.get( ArtifactKeys.ARTIFACTID );
String version = document.get( ArtifactKeys.VERSION );
String classifier = document.get( ArtifactKeys.CLASSIFIER );
String type = document.get( ArtifactKeys.TYPE );

ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
record.setArtifact( artifact );

// Bytecode Specifics
JavaArtifactHelper.getJavaDetails( record.getArtifact() ).setJdk( document.get( BytecodeKeys.JDK ) );
record.setClasses( getList( document, BytecodeKeys.CLASSES ) );
record.setMethods( getList( document, BytecodeKeys.METHODS ) );
record.setFiles( getList( document, BytecodeKeys.FILES ) );

return record;
}

public List getList( Document document, String key )
{
String rawlist = document.get( key );

if ( rawlist == null )
{
return null;
}

List ret = new ArrayList();
ret.addAll( Arrays.asList( rawlist.split( "\n" ) ) );

return ret;
}
}

+ 0
- 76
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeHandlers.java View File

@@ -1,76 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;

/**
* BytecodeHandlers
*
* @version $Id$
*/
public class BytecodeHandlers
implements LuceneIndexHandlers
{
private BytecodeAnalyzer analyzer;

private BytecodeEntryConverter converter;

private QueryParser queryParser;

public BytecodeHandlers()
{
converter = new BytecodeEntryConverter();
analyzer = new BytecodeAnalyzer();
queryParser = new MultiFieldQueryParser( new String[] {
BytecodeKeys.GROUPID,
BytecodeKeys.ARTIFACTID,
BytecodeKeys.VERSION,
BytecodeKeys.CLASSIFIER,
BytecodeKeys.TYPE,
BytecodeKeys.CLASSES,
BytecodeKeys.FILES,
BytecodeKeys.METHODS }, analyzer );
}
public String getId()
{
return BytecodeKeys.ID;
}

public Analyzer getAnalyzer()
{
return analyzer;
}

public LuceneEntryConverter getConverter()
{
return converter;
}

public QueryParser getQueryParser()
{
return queryParser;
}
}

+ 0
- 41
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeKeys.java View File

@@ -1,41 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.ArtifactKeys;

/**
* BytecodeKeys
*
* @version $Id$
*/
public class BytecodeKeys
extends ArtifactKeys
{
public static final String ID = "bytecode";

public static final String CLASSES = "classes";

public static final String METHODS = "methods";

public static final String FILES = "files";

public static final String JDK = "jdk";
}

+ 0
- 177
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/bytecode/BytecodeRecord.java View File

@@ -1,177 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.util.List;

/**
* Lucene Record for Bytecode information.
*
* @version $Id$
*/
public class BytecodeRecord
implements LuceneRepositoryContentRecord
{
private String repositoryId;

private ArchivaArtifact artifact;

private String filename;

private List<String> classes;

private List<String> methods;

private List<String> files;

public ArchivaArtifact getArtifact()
{
return artifact;
}

public List<String> getClasses()
{
return classes;
}

public List<String> getFiles()
{
return files;
}

public List<String> getMethods()
{
return methods;
}

public String getRepositoryId()
{
return repositoryId;
}

public String getPrimaryKey()
{
StringBuffer id = new StringBuffer();
id.append( artifact.getGroupId() ).append( ":" );
id.append( artifact.getArtifactId() ).append( ":" );
id.append( artifact.getVersion() );

if ( artifact.getClassifier() != null )
{
id.append( ":" ).append( artifact.getClassifier() );
}

id.append( ":" ).append( artifact.getType() );

return id.toString();
}

public void setArtifact( ArchivaArtifact artifact )
{
this.artifact = artifact;
}

public void setClasses( List<String> classes )
{
this.classes = classes;
}

public void setFiles( List<String> files )
{
this.files = files;
}

public void setMethods( List<String> methods )
{
this.methods = methods;
}

public void setRepositoryId( String repositoryId )
{
this.repositoryId = repositoryId;
}

public int hashCode()
{
final int PRIME = 31;
int result = 1;
result = PRIME * result + ( ( artifact == null ) ? 0 : artifact.hashCode() );
return result;
}

public boolean equals( Object obj )
{
if ( this == obj )
{
return true;
}

if ( obj == null )
{
return false;
}

if ( getClass() != obj.getClass() )
{
return false;
}

final BytecodeRecord other = (BytecodeRecord) obj;

if ( artifact == null )
{
if ( other.artifact != null )
{
return false;
}
}
else if ( !artifact.equals( other.artifact ) )
{
return false;
}

return true;
}

public String getFilename()
{
return filename;
}

public void setFilename( String filename )
{
this.filename = filename;
}

public String toString()
{
StringBuffer sb = new StringBuffer();

sb.append( "BytecodeRecord[" );
sb.append( "artifact=" ).append( artifact );
sb.append( ",filename=" ).append( filename );
sb.append( "]" );
return sb.toString();
}

}

+ 0
- 65
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentAnalyzer.java View File

@@ -1,65 +0,0 @@
package org.apache.maven.archiva.indexer.filecontent;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.maven.archiva.indexer.lucene.analyzers.FilenamesTokenizer;
import org.apache.maven.archiva.indexer.lucene.analyzers.ArtifactIdTokenizer;
import org.apache.maven.archiva.indexer.lucene.analyzers.GroupIdTokenizer;

import java.io.Reader;
import org.apache.maven.archiva.indexer.lucene.analyzers.VersionTokenizer;

/**
* FileContentAnalyzer
*
* @version $Id$
*/
public class FileContentAnalyzer extends Analyzer
{
private static final Analyzer STANDARD = new StandardAnalyzer();

public TokenStream tokenStream( String field, Reader reader )
{
if ( FileContentKeys.FILENAME.equals( field ) )
{
return new FilenamesTokenizer( reader );
}

if ( FileContentKeys.ARTIFACTID.equals( field ))
{
return new ArtifactIdTokenizer(reader);
}

if ( FileContentKeys.GROUPID.equals( field ) )
{
return new GroupIdTokenizer(reader);
}

if ( FileContentKeys.VERSION.equals( field ))
{
return new VersionTokenizer(reader);
}

return STANDARD.tokenStream( field, reader );
}
}

+ 0
- 96
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentConverter.java View File

@@ -1,96 +0,0 @@
package org.apache.maven.archiva.indexer.filecontent;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.commons.lang.StringUtils;
import org.apache.lucene.document.Document;
import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.lucene.LuceneDocumentMaker;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.text.ParseException;

/**
* FileContentConverter
*
* @version $Id$
*/
public class FileContentConverter
implements LuceneEntryConverter
{
public Document convert( LuceneRepositoryContentRecord record )
{
if ( !( record instanceof FileContentRecord ) )
{
throw new ClassCastException( "Unable to convert type " + record.getClass().getName() + " to "
+ FileContentRecord.class.getName() + "." );
}

FileContentRecord filecontent = (FileContentRecord) record;

LuceneDocumentMaker doc = new LuceneDocumentMaker( filecontent );

if( filecontent.getArtifact() != null )
{
// Artifact Reference
doc.addFieldTokenized( ArtifactKeys.GROUPID, filecontent.getArtifact().getGroupId() );
doc.addFieldExact( ArtifactKeys.GROUPID_EXACT, filecontent.getArtifact().getGroupId() );
doc.addFieldTokenized( ArtifactKeys.ARTIFACTID, filecontent.getArtifact().getArtifactId()); //, 2.0f);
doc.addFieldExact( ArtifactKeys.ARTIFACTID_EXACT, filecontent.getArtifact().getArtifactId(), 2.0f);
doc.addFieldTokenized( ArtifactKeys.VERSION, filecontent.getArtifact().getVersion() );
doc.addFieldExact( ArtifactKeys.VERSION_EXACT, filecontent.getArtifact().getVersion() );
doc.addFieldTokenized( ArtifactKeys.TYPE, filecontent.getArtifact().getType() );
doc.addFieldUntokenized( ArtifactKeys.CLASSIFIER, filecontent.getArtifact().getClassifier() );
}

doc.addFieldTokenized( FileContentKeys.FILENAME, filecontent.getFilename() );

return doc.getDocument();
}

public LuceneRepositoryContentRecord convert( Document document )
throws ParseException
{
FileContentRecord record = new FileContentRecord();

record.setRepositoryId( document.get( LuceneDocumentMaker.REPOSITORY_ID ) );
// Artifact Reference
String groupId = document.get( ArtifactKeys.GROUPID );
String artifactId = document.get( ArtifactKeys.ARTIFACTID );
String version = document.get( ArtifactKeys.VERSION );
String classifier = document.get( ArtifactKeys.CLASSIFIER );
String type = document.get( ArtifactKeys.TYPE );
if( StringUtils.isNotBlank( groupId ) && StringUtils.isNotBlank( artifactId ) )
{
ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
record.setArtifact( artifact );
}

// Filecontent Specifics
record.setFilename( document.get( FileContentKeys.FILENAME ) );

return record;
}

}

+ 0
- 78
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentHandlers.java View File

@@ -1,78 +0,0 @@
package org.apache.maven.archiva.indexer.filecontent;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;

/**
* FileContentHandlers
*
* @version $Id$
*/
public class FileContentHandlers
implements LuceneIndexHandlers
{
private FileContentAnalyzer analyzer;

private FileContentConverter converter;

private QueryParser queryParser;

public FileContentHandlers()
{
analyzer = new FileContentAnalyzer();
converter = new FileContentConverter();
queryParser = new MultiFieldQueryParser( new String[] {
FileContentKeys.FILENAME,
FileContentKeys.ARTIFACTID,
FileContentKeys.GROUPID,
FileContentKeys.ARTIFACTID_EXACT,
FileContentKeys.GROUPID_EXACT,
FileContentKeys.VERSION,
FileContentKeys.VERSION_EXACT},
analyzer );
//We prefer the narrowing approach to search results.
queryParser.setDefaultOperator(MultiFieldQueryParser.Operator.AND);
}

public String getId()
{
return FileContentKeys.ID;
}

public Analyzer getAnalyzer()
{
return analyzer;
}

public LuceneEntryConverter getConverter()
{
return converter;
}

public QueryParser getQueryParser()
{
return queryParser;
}
}

+ 0
- 35
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentKeys.java View File

@@ -1,35 +0,0 @@
package org.apache.maven.archiva.indexer.filecontent;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.ArtifactKeys;

/**
* Lucene Index Keys for the various fields in the FileContent index.
*
* @version $Id$
*/
public class FileContentKeys
extends ArtifactKeys
{
public static final String ID = "filecontent";

public static final String FILENAME = "filename";
}

+ 0
- 119
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/filecontent/FileContentRecord.java View File

@@ -1,119 +0,0 @@
package org.apache.maven.archiva.indexer.filecontent;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

/**
* Lucene record for {@link File} contents.
*
* @version $Id$
*/
public class FileContentRecord
implements LuceneRepositoryContentRecord
{
private String repositoryId;

private String filename;
/**
* Optional artifact reference for the file content.
*/
private ArchivaArtifact artifact;

public String getRepositoryId()
{
return repositoryId;
}

public void setRepositoryId( String repositoryId )
{
this.repositoryId = repositoryId;
}

public String getPrimaryKey()
{
return repositoryId + ":" + filename;
}

@Override
public int hashCode()
{
final int PRIME = 31;
int result = 1;
result = PRIME * result + ( ( filename == null ) ? 0 : filename.hashCode() );
return result;
}

@Override
public boolean equals( Object obj )
{
if ( this == obj )
{
return true;
}

if ( obj == null )
{
return false;
}

if ( getClass() != obj.getClass() )
{
return false;
}

final FileContentRecord other = (FileContentRecord) obj;

if ( filename == null )
{
if ( other.filename != null )
{
return false;
}
}
else if ( !filename.equals( other.filename ) )
{
return false;
}
return true;
}

public String getFilename()
{
return filename;
}

public void setFilename( String filename )
{
this.filename = filename;
}

public ArchivaArtifact getArtifact()
{
return artifact;
}

public void setArtifact( ArchivaArtifact artifact )
{
this.artifact = artifact;
}
}

+ 0
- 62
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesAnalyzer.java View File

@@ -1,62 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopAnalyzer;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.lucene.analyzers.GroupIdTokenizer;
import org.apache.maven.archiva.indexer.lucene.analyzers.VersionTokenizer;

import java.io.Reader;

/**
* HashcodesAnalyzer
*
* @version $Id$
*/
public class HashcodesAnalyzer extends Analyzer
{
private static final Analyzer STANDARD = new StandardAnalyzer();

public TokenStream tokenStream( String field, Reader reader )
{
TokenStream tokenStream = null;

if ( ArtifactKeys.GROUPID.equals( field ) )
{
tokenStream = new GroupIdTokenizer( reader );
}
else if ( ArtifactKeys.VERSION.equals( field ) )
{
tokenStream = new VersionTokenizer( reader );
}
else
{
tokenStream = STANDARD.tokenStream( field, reader );
}

return new LowerCaseFilter( new StopFilter( tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS ) );
}
}

+ 0
- 90
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesEntryConverter.java View File

@@ -1,90 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.document.Document;
import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.lucene.LuceneDocumentMaker;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.text.ParseException;

/**
* Converter for Hashcode records and documents.
*
* @version $Id$
*/
public class HashcodesEntryConverter implements LuceneEntryConverter
{

public Document convert( LuceneRepositoryContentRecord record )
{
if ( !( record instanceof HashcodesRecord ) )
{
throw new ClassCastException( "Unable to convert type " + record.getClass().getName() + " to "
+ HashcodesRecord.class.getName() + "." );
}

HashcodesRecord hashcodes = (HashcodesRecord) record;

LuceneDocumentMaker doc = new LuceneDocumentMaker( hashcodes );
// Artifact Reference
doc.addFieldTokenized( ArtifactKeys.GROUPID, hashcodes.getArtifact().getGroupId() );
doc.addFieldExact( ArtifactKeys.GROUPID_EXACT, hashcodes.getArtifact().getGroupId() );
doc.addFieldTokenized( ArtifactKeys.ARTIFACTID, hashcodes.getArtifact().getArtifactId() );
doc.addFieldExact( ArtifactKeys.ARTIFACTID_EXACT, hashcodes.getArtifact().getArtifactId() );
doc.addFieldTokenized( ArtifactKeys.VERSION, hashcodes.getArtifact().getVersion() );
doc.addFieldExact( ArtifactKeys.VERSION_EXACT, hashcodes.getArtifact().getVersion() );
doc.addFieldTokenized( ArtifactKeys.TYPE, hashcodes.getArtifact().getType() );
doc.addFieldUntokenized( ArtifactKeys.CLASSIFIER, hashcodes.getArtifact().getClassifier() );

// Hashcode Specifics
doc.addFieldUntokenized( HashcodesKeys.MD5, hashcodes.getArtifact().getModel().getChecksumMD5() );
doc.addFieldUntokenized( HashcodesKeys.SHA1, hashcodes.getArtifact().getModel().getChecksumSHA1() );

return doc.getDocument();
}

public LuceneRepositoryContentRecord convert( Document document ) throws ParseException
{
HashcodesRecord record = new HashcodesRecord();
record.setRepositoryId( document.get( LuceneDocumentMaker.REPOSITORY_ID ) );

// Artifact Reference
String groupId = document.get( ArtifactKeys.GROUPID );
String artifactId = document.get( ArtifactKeys.ARTIFACTID );
String version = document.get( ArtifactKeys.VERSION );
String classifier = document.get( ArtifactKeys.CLASSIFIER );
String type = document.get( ArtifactKeys.TYPE );

ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
record.setArtifact( artifact );

// Hashcode Specifics
record.getArtifact().getModel().setChecksumMD5( document.get( HashcodesKeys.MD5 ) );
record.getArtifact().getModel().setChecksumSHA1( document.get( HashcodesKeys.SHA1 ) );

return record;
}
}

+ 0
- 75
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesHandlers.java View File

@@ -1,75 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;

/**
* HashcodesHandlers
*
* @version $Id$
*/
public class HashcodesHandlers
implements LuceneIndexHandlers
{
private HashcodesAnalyzer analyzer;

private HashcodesEntryConverter converter;

private QueryParser queryParser;

public HashcodesHandlers()
{
converter = new HashcodesEntryConverter();
analyzer = new HashcodesAnalyzer();
queryParser = new MultiFieldQueryParser( new String[] {
HashcodesKeys.GROUPID,
HashcodesKeys.ARTIFACTID,
HashcodesKeys.VERSION,
HashcodesKeys.CLASSIFIER,
HashcodesKeys.TYPE,
HashcodesKeys.MD5,
HashcodesKeys.SHA1 }, analyzer );
}

public String getId()
{
return HashcodesKeys.ID;
}

public Analyzer getAnalyzer()
{
return analyzer;
}

public LuceneEntryConverter getConverter()
{
return converter;
}

public QueryParser getQueryParser()
{
return queryParser;
}
}

+ 0
- 36
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesKeys.java View File

@@ -1,36 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.ArtifactKeys;

/**
* Lucene Index Keys for the various fields in the Hashcodes Index.
*
* @version $Id$
*/
public class HashcodesKeys extends ArtifactKeys
{
public static final String ID = "hashcodes";
public static final String MD5 = "md5";

public static final String SHA1 = "sha1";
}

+ 0
- 138
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesRecord.java View File

@@ -1,138 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

/**
* Lucene record for {@link ArchivaArtifact} hashcodes information.
*
* @version $Id$
*/
public class HashcodesRecord
implements LuceneRepositoryContentRecord
{
private String repositoryId;

private ArchivaArtifact artifact;

private String filename;

public ArchivaArtifact getArtifact()
{
return artifact;
}

public void setArtifact( ArchivaArtifact artifact )
{
this.artifact = artifact;
}

public String getPrimaryKey()
{
StringBuffer id = new StringBuffer();
id.append( artifact.getGroupId() ).append( ":" );
id.append( artifact.getArtifactId() ).append( ":" );
id.append( artifact.getVersion() );

if ( artifact.getClassifier() != null )
{
id.append( ":" ).append( artifact.getClassifier() );
}

id.append( ":" ).append( artifact.getType() );

return id.toString();
}

public int hashCode()
{
final int PRIME = 31;
int result = 1;
result = PRIME * result + ( ( artifact == null ) ? 0 : artifact.hashCode() );
return result;
}

public boolean equals( Object obj )
{
if ( this == obj )
{
return true;
}

if ( obj == null )
{
return false;
}

if ( getClass() != obj.getClass() )
{
return false;
}

final HashcodesRecord other = (HashcodesRecord) obj;

if ( artifact == null )
{
if ( other.artifact != null )
{
return false;
}
}
else if ( !artifact.equals( other.artifact ) )
{
return false;
}

return true;
}

public String getRepositoryId()
{
return this.repositoryId;
}

public void setRepositoryId( String repositoryId )
{
this.repositoryId = repositoryId;
}

public String getFilename()
{
return filename;
}

public void setFilename( String filename )
{
this.filename = filename;
}

public String toString()
{
StringBuffer sb = new StringBuffer();

sb.append( "HashcodesRecord[" );
sb.append( "artifact=" ).append( artifact );
sb.append( ",filename=" ).append( filename );
sb.append( "]" );
return sb.toString();
}
}

+ 0
- 154
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneDocumentMaker.java View File

@@ -1,154 +0,0 @@
package org.apache.maven.archiva.indexer.lucene;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.commons.lang.StringUtils;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;

import java.util.List;

/**
* LuceneDocumentMaker - a utility class for making lucene documents.
*
* @version $Id$
*/
public class LuceneDocumentMaker
{
public static final String PRIMARY_KEY = "pk";
public static final String REPOSITORY_ID = "repoId";

private Document document;

/**
* Construct a LuceneDocumentMaker based on the record provider.
*
* @param record the record.
* @throws IllegalArgumentException if the primary key is invalid.
*/
public LuceneDocumentMaker( LuceneRepositoryContentRecord record ) throws IllegalArgumentException
{
if ( record == null )
{
throw new IllegalArgumentException( "Not allowed to have a null record provider." );
}

String primaryKey = record.getPrimaryKey();

if ( StringUtils.isBlank( primaryKey ) )
{
throw new IllegalArgumentException( "Not allowed to have a blank primary key." );
}

String repositoryId = record.getRepositoryId();
if ( StringUtils.isBlank( repositoryId ) )
{
throw new IllegalArgumentException( "Not allowed to have a blank repository id." );
}

document = new Document();

document.add( new Field( PRIMARY_KEY, primaryKey, Field.Store.NO, Field.Index.UN_TOKENIZED ) );
document.add( new Field( REPOSITORY_ID, repositoryId, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
}

public LuceneDocumentMaker addFieldTokenized( String key, String value )
{
if ( value != null )
{
document.add( new Field( key, value, Field.Store.YES, Field.Index.TOKENIZED ) );
}

return this;
}

public LuceneDocumentMaker addFieldTokenized( String key, String value, float boost )
{
if ( value != null )
{
Field field = new Field( key, value, Field.Store.YES, Field.Index.TOKENIZED );
field.setBoost(boost);
document.add( field );
}

return this;
}

public LuceneDocumentMaker addFieldTokenized( String key, List list )
{
if ( ( list != null ) && ( !list.isEmpty() ) )
{
return addFieldTokenized( key, StringUtils.join( list.iterator(), "\n" ) );
}

return this;
}

public LuceneDocumentMaker addFieldUntokenized( String name, String value )
{
if ( value != null )
{
document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
}

return this;
}

public LuceneDocumentMaker addFieldUntokenized( String name, String value, float boost )
{
if ( value != null )
{
Field field = new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED );
field.setBoost(boost);
document.add( field );
}

return this;
}

public LuceneDocumentMaker addFieldExact( String name, String value )
{
if ( value != null )
{
document.add( new Field( name, value, Field.Store.NO, Field.Index.UN_TOKENIZED ) );
}

return this;
}

public LuceneDocumentMaker addFieldExact( String name, String value, float boost )
{
if ( value != null )
{
Field field = new Field( name, value, Field.Store.NO, Field.Index.UN_TOKENIZED );
field.setBoost(boost);
document.add( field );
}

return this;
}

public Document getDocument()
{
return this.document;
}
}

+ 0
- 49
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneEntryConverter.java View File

@@ -1,49 +0,0 @@
package org.apache.maven.archiva.indexer.lucene;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.document.Document;

import java.text.ParseException;

/**
* A converter for {@link LuceneRepositoryContentRecord} to Lucene {@link Document} objects and back.
*
*/
public interface LuceneEntryConverter
{
/**
* Convert an index record to a Lucene document.
*
* @param record the record
* @return the document
*/
Document convert( LuceneRepositoryContentRecord record );

/**
* Convert a Lucene document to an index record.
*
* @param document the document
* @return the record
* @throws java.text.ParseException if there is a problem parsing a field (specifically, dates)
*/
LuceneRepositoryContentRecord convert( Document document )
throws ParseException;
}

+ 0
- 59
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneIndexHandlers.java View File

@@ -1,59 +0,0 @@
package org.apache.maven.archiva.indexer.lucene;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.QueryParser;

/**
* The important bits and pieces for handling a specific lucene index
*
* @version $Id$
*/
public interface LuceneIndexHandlers
{
/**
* Get the converter to use with this index.
*
* @return the converter to use.
*/
public LuceneEntryConverter getConverter();
/**
* Get the analyzer to user with this index.
*
* @return the analzer to use.
*/
public Analyzer getAnalyzer();
/**
* Get the {@link QueryParser} appropriate for searches within this index.
*
* @return the query parser.
*/
public QueryParser getQueryParser();

/**
* Get the id of the index handler.
*
* @return the id of the index handler.
*/
public String getId();
}

+ 0
- 42
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneQuery.java View File

@@ -1,42 +0,0 @@
package org.apache.maven.archiva.indexer.lucene;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.query.Query;

/**
* A holder for a lucene query to pass to the indexer API.
*
*/
public class LuceneQuery
implements Query
{
private final org.apache.lucene.search.Query query;

public LuceneQuery( org.apache.lucene.search.Query query )
{
this.query = query;
}

public org.apache.lucene.search.Query getLuceneQuery()
{
return query;
}
}

+ 0
- 447
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryContentIndex.java View File

@@ -1,447 +0,0 @@
package org.apache.maven.archiva.indexer.lucene;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Searchable;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;

/**
* Lucene implementation of a repository index.
*
*/
public class LuceneRepositoryContentIndex
implements RepositoryContentIndex
{
/**
* The max field length for a field in a document.
*/
private static final int MAX_FIELD_LENGTH = 40000;

/**
* The location of the index on the file system.
*/
private File indexLocation;

/**
* The Lucene Index Handlers
*/
private LuceneIndexHandlers indexHandlers;
private final ManagedRepositoryConfiguration repository;

public LuceneRepositoryContentIndex( ManagedRepositoryConfiguration repository, File indexDir, LuceneIndexHandlers handlers )
{
this.repository = repository;
this.indexLocation = indexDir;
this.indexHandlers = handlers;
}

public void indexRecords( Collection records )
throws RepositoryIndexException
{
deleteRecords( records );

addRecords( records );
}

public void modifyRecords( Collection records )
throws RepositoryIndexException
{
synchronized( repository )
{
IndexWriter indexWriter = null;
try
{
indexWriter = new IndexWriter( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexWriter.setMaxFieldLength( MAX_FIELD_LENGTH );
for ( Iterator i = records.iterator(); i.hasNext(); )
{
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexWriter.deleteDocuments( term );
Document document = indexHandlers.getConverter().convert( record );
indexWriter.addDocument( document );
}
}
indexWriter.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexWriter );
}
}
}

public void modifyRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
synchronized( repository )
{
IndexWriter indexWriter = null;
try
{
indexWriter = new IndexWriter( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexWriter.setMaxFieldLength( MAX_FIELD_LENGTH );
if ( record != null )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexWriter.deleteDocuments( term );
Document document = indexHandlers.getConverter().convert( record );
indexWriter.addDocument( document );
}
indexWriter.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexWriter );
}
}
}

private void addRecords( Collection records )
throws RepositoryIndexException
{
synchronized( repository )
{
IndexWriter indexWriter;
try
{
indexWriter = new IndexWriter( indexLocation, indexHandlers.getAnalyzer(), !exists() );
indexWriter.setMaxFieldLength( MAX_FIELD_LENGTH );
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Unable to open index", e );
}
try
{
for ( Iterator i = records.iterator(); i.hasNext(); )
{
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
{
Document document = indexHandlers.getConverter().convert( record );
indexWriter.addDocument( document );
}
}
indexWriter.optimize();
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Failed to add an index document", e );
}
finally
{
closeQuietly( indexWriter );
}
}
}

public void deleteRecords( Collection records )
throws RepositoryIndexException
{
synchronized( repository )
{
if ( exists() )
{
IndexReader indexReader = null;
try
{
indexReader = IndexReader.open( indexLocation );
for ( Iterator i = records.iterator(); i.hasNext(); )
{
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
if ( record != null )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexReader.deleteDocuments( term );
}
}
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexReader );
}
}
}
}
public void deleteRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
synchronized( repository )
{
if ( exists() )
{
IndexReader indexReader = null;
try
{
indexReader = IndexReader.open( indexLocation );
if ( record != null )
{
Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
indexReader.deleteDocuments( term );
}
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexReader );
}
}
}
}
public Collection getAllRecordKeys()
throws RepositoryIndexException
{
return getAllFieldValues( LuceneDocumentMaker.PRIMARY_KEY );
}

private List getAllFieldValues( String fieldName )
throws RepositoryIndexException
{
synchronized( repository )
{
List keys = new ArrayList();
if ( exists() )
{
IndexReader indexReader = null;
TermEnum terms = null;
try
{
indexReader = IndexReader.open( indexLocation );
terms = indexReader.terms( new Term( fieldName, "" ) );
while ( fieldName.equals( terms.term().field() ) )
{
keys.add( terms.term().text() );
if ( !terms.next() )
{
break;
}
}
}
catch ( IOException e )
{
throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
}
finally
{
closeQuietly( indexReader );
closeQuietly( terms );
}
}
return keys;
}
}
public Searchable getSearchable()
throws RepositoryIndexSearchException
{
try
{
IndexSearcher searcher = new IndexSearcher( indexLocation.getAbsolutePath() );
return searcher;
}
catch ( IOException e )
{
throw new RepositoryIndexSearchException( "Unable to open index: " + e.getMessage(), e );
}
}

public boolean exists()
throws RepositoryIndexException
{
if ( IndexReader.indexExists( indexLocation ) )
{
return true;
}
else if ( !indexLocation.exists() )
{
return false;
}
else if ( indexLocation.isDirectory() )
{
if ( indexLocation.listFiles().length > 1 )
{
throw new RepositoryIndexException( indexLocation + " is not a valid index directory." );
}
else
{
return false;
}
}
else
{
throw new RepositoryIndexException( indexLocation + " is not a directory." );
}
}

public QueryParser getQueryParser()
{
return this.indexHandlers.getQueryParser();
}

public static void closeSearchable( Searchable searchable )
{
if( searchable != null )
{
try
{
searchable.close();
}
catch ( IOException e )
{
// Ignore
}
}
}
private static void closeQuietly( TermEnum terms )
throws RepositoryIndexException
{
if ( terms != null )
{
try
{
terms.close();
}
catch ( IOException e )
{
// ignore
}
}
}

private static void closeQuietly( IndexWriter indexWriter )
throws RepositoryIndexException
{
try
{
if ( indexWriter != null )
{
indexWriter.close();
}
}
catch ( IOException e )
{
// write should compain if it can't be closed, data probably not persisted
throw new RepositoryIndexException( e.getMessage(), e );
}
}

private static void closeQuietly( IndexReader reader )
{
try
{
if ( reader != null )
{
reader.close();
}
}
catch ( IOException e )
{
// ignore
}
}

public File getIndexDirectory()
{
return this.indexLocation;
}

public String getId()
{
return this.indexHandlers.getId();
}

public ManagedRepositoryConfiguration getRepository()
{
return repository;
}
public Analyzer getAnalyzer()
{
return this.indexHandlers.getAnalyzer();
}
public LuceneEntryConverter getEntryConverter()
{
return this.indexHandlers.getConverter();
}
}

+ 0
- 102
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryContentIndexFactory.java View File

@@ -1,102 +0,0 @@
package org.apache.maven.archiva.indexer.lucene;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers;
import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers;

import java.io.File;

/**
* Factory for Lucene repository content index instances.
*
* @plexus.component role="org.apache.maven.archiva.indexer.RepositoryContentIndexFactory" role-hint="lucene"
*/
public class LuceneRepositoryContentIndexFactory
implements RepositoryContentIndexFactory
{
/**
* @plexus.requirement
*/
private ArchivaConfiguration configuration;

public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
{
File indexDir = toIndexDir( repository, "bytecode" );
return new LuceneRepositoryContentIndex( repository, indexDir, new BytecodeHandlers() );
}

public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
{
File indexDir = toIndexDir( repository, "filecontent" );
return new LuceneRepositoryContentIndex( repository, indexDir, new FileContentHandlers() );
}

public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
{
File indexDir = toIndexDir( repository, "hashcodes" );
return new LuceneRepositoryContentIndex( repository, indexDir, new HashcodesHandlers() );
}

/**
* Obtain the index directory for the provided repository.
*
* @param repository the repository to obtain the index directory from.
* @param indexId the id of the index
* @return the directory to put the index into.
*/
private File toIndexDir( ManagedRepositoryConfiguration repository, String indexId )
{
// Attempt to get the specified indexDir in the configuration first.
ManagedRepositoryConfiguration repoConfig =
configuration.getConfiguration().findManagedRepositoryById( repository.getId() );
File indexDir;

if ( repoConfig == null )
{
// No configured index dir, use the repository path instead.
String repoPath = repository.getLocation();
indexDir = new File( repoPath, ".index/" + indexId + "/" );
}
else
{
// Use configured index dir.
String repoPath = repoConfig.getIndexDir();
if ( StringUtils.isBlank( repoPath ) )
{
repoPath = repository.getLocation();
if ( !repoPath.endsWith( "/" ) )
{
repoPath += "/";
}
repoPath += ".index";
}
indexDir = new File( repoPath, "/" + indexId + "/" );
}

return indexDir;
}
}

+ 0
- 41
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryContentRecord.java View File

@@ -1,41 +0,0 @@
package org.apache.maven.archiva.indexer.lucene;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/**
* A repository content index record.
*
*/
public interface LuceneRepositoryContentRecord
{
/**
* Get the primary key used to identify the record uniquely in the index.
*
* @return the primary key
*/
public String getPrimaryKey();
/**
* Get the repository that this record belongs to.
*
* @return the repository id for this record.
*/
public String getRepositoryId();
}

+ 0
- 45
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/ArtifactIdTokenizer.java View File

@@ -1,45 +0,0 @@
package org.apache.maven.archiva.indexer.lucene.analyzers;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.io.Reader;
import org.apache.lucene.analysis.CharTokenizer;

/**
* Lucene Tokenizer for {@link ArtifactKeys#ARTIFACTID} fields.
*/
public class ArtifactIdTokenizer extends CharTokenizer
{
public ArtifactIdTokenizer( Reader reader )
{
super( reader );
}

/**
* Break on "-" for "atlassian-plugins-core"
* @param c
* @return
*/
@Override
protected boolean isTokenChar(char c)
{
return (c != '-');
}
}

+ 0
- 57
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/ClassnameTokenizer.java View File

@@ -1,57 +0,0 @@
package org.apache.maven.archiva.indexer.lucene.analyzers;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.CharTokenizer;

import java.io.Reader;

/**
* Lucene Tokenizer for {@link BytecodeKeys#CLASSES} fields.
*
* @version $Id$
*/
public class ClassnameTokenizer extends CharTokenizer
{
public ClassnameTokenizer( Reader reader )
{
super( reader );
}

/**
* Determine Token Character.
*
* The field is a list of full classnames "com.foo.Object" seperated by
* newline characters. "\n".
*
* Identify newline "\n" and "." as the token delimiters.
*/
protected boolean isTokenChar( char c )
{
return ( ( c != '\n' ) && ( c != '.' ) );
}

/*
protected char normalize( char c )
{
return Character.toLowerCase( c );
}
*/
}

+ 0
- 50
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/FilenamesTokenizer.java View File

@@ -1,50 +0,0 @@
package org.apache.maven.archiva.indexer.lucene.analyzers;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.CharTokenizer;

import java.io.Reader;

/**
* Lucene Tokenizer for {@link BytecodeKeys#FILES} fields.
*
* @version $Id$
*/
public class FilenamesTokenizer extends CharTokenizer
{
public FilenamesTokenizer( Reader reader )
{
super( reader );
}

/**
* Determine Token Character.
*
* The field is a list of full filenames "/home/archiva/foo/readme.txt" seperated by
* newline characters. "\n".
*
* Identify newline "\n" and "/" as the token delimiters.
*/
protected boolean isTokenChar( char c )
{
return ( ( c != '\n' ) && ( c != '/' ) );
}
}

+ 0
- 50
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/GroupIdTokenizer.java View File

@@ -1,50 +0,0 @@
package org.apache.maven.archiva.indexer.lucene.analyzers;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.CharTokenizer;

import java.io.Reader;

/**
* Lucene Tokenizer for {@link ArtifactKeys#GROUPID} fields.
*
* @version $Id$
*/
public class GroupIdTokenizer extends CharTokenizer
{
public GroupIdTokenizer( Reader reader )
{
super( reader );
}

/**
* Determine Token Character.
*
* The field is a groupId "com.foo.project".
*
* Identify "." as the token delimiter.
*/
protected boolean isTokenChar( char c )
{
return ( c != '.' );
}

}

+ 0
- 49
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/analyzers/VersionTokenizer.java View File

@@ -1,49 +0,0 @@
package org.apache.maven.archiva.indexer.lucene.analyzers;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.analysis.CharTokenizer;

import java.io.Reader;

/**
* Lucene Tokenizer for {@link ArtifactKeys#VERSION} fields.
*
* @version $Id$
*/
public class VersionTokenizer extends CharTokenizer
{
public VersionTokenizer( Reader reader )
{
super( reader );
}

/**
* Determine Token Character.
*
* The field is a version id in the form "1.0-alpha-4-SNAPSHOT".
*
* Identify "-" as the token delimiter.
*/
protected boolean isTokenChar( char c )
{
return ( c != '.' ) && ( c != '-' );
}
}

+ 0
- 107
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/CompoundQuery.java View File

@@ -1,107 +0,0 @@
package org.apache.maven.archiva.indexer.query;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.util.ArrayList;
import java.util.List;

/**
* Class to hold multiple SinglePhraseQueries and/or other CompoundQueries.
*
*/
public class CompoundQuery
implements Query
{
/**
* The query terms.
*/
private final List compoundQueryTerms = new ArrayList();

/**
* Appends a required term to this query.
*
* @param term the term to be appended to this query
*/
public void and( QueryTerm term )
{
compoundQueryTerms.add( CompoundQueryTerm.and( new SingleTermQuery( term ) ) );
}

/**
* Appends an optional term to this query.
*
* @param term the term to be appended to this query
*/
public void or( QueryTerm term )
{
compoundQueryTerms.add( CompoundQueryTerm.or( new SingleTermQuery( term ) ) );
}

/**
* Appends a prohibited term to this query.
*
* @param term the term to be appended to this query
*/
public void not( QueryTerm term )
{
compoundQueryTerms.add( CompoundQueryTerm.not( new SingleTermQuery( term ) ) );
}

/**
* Appends a required subquery to this query.
*
* @param query the subquery to be appended to this query
*/
public void and( Query query )
{
compoundQueryTerms.add( CompoundQueryTerm.and( query ) );
}

/**
* Appends an optional subquery to this query.
*
* @param query the subquery to be appended to this query
*/
public void or( Query query )
{
compoundQueryTerms.add( CompoundQueryTerm.or( query ) );
}

/**
* Appends a prohibited subquery to this query.
*
* @param query the subquery to be appended to this query
*/
public void not( Query query )
{
compoundQueryTerms.add( CompoundQueryTerm.not( query ) );
}

/**
* Method to get the List of Queries appended into this
*
* @return List of all Queries added to this Query
*/
public List getCompoundQueryTerms()
{
return compoundQueryTerms;
}

}

+ 0
- 102
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/CompoundQueryTerm.java View File

@@ -1,102 +0,0 @@
package org.apache.maven.archiva.indexer.query;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/**
* Base of all query terms.
*
*/
public class CompoundQueryTerm
{
/**
* The query to add to the compound query.
*/
private final Query query;

/**
* Whether the term is required (an AND).
*/
private final boolean required;

/**
* Whether the term is prohibited (a NOT).
*/
private final boolean prohibited;

/**
* Class constructor
*
* @param query the subquery to add
* @param required whether the term is required (an AND)
* @param prohibited whether the term is prohibited (a NOT)
*/
private CompoundQueryTerm( Query query, boolean required, boolean prohibited )
{
this.query = query;
this.prohibited = prohibited;
this.required = required;
}

/**
* Method to test if the Query is a search requirement
*
* @return true if this Query is a search requirement, otherwise returns false
*/
public boolean isRequired()
{
return required;
}

/**
* Method to test if the Query is prohibited in the search result
*
* @return true if this Query is prohibited in the search result
*/
public boolean isProhibited()
{
return prohibited;
}


/**
* The subquery to execute.
*
* @return the query
*/
public Query getQuery()
{
return query;
}

static CompoundQueryTerm and( Query query )
{
return new CompoundQueryTerm( query, true, false );
}

static CompoundQueryTerm or( Query query )
{
return new CompoundQueryTerm( query, false, false );
}

static CompoundQueryTerm not( Query query )
{
return new CompoundQueryTerm( query, false, true );
}
}

+ 0
- 28
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/Query.java View File

@@ -1,28 +0,0 @@
package org.apache.maven.archiva.indexer.query;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/**
* Interface to label the query classes
*
*/
public interface Query
{
}

+ 0
- 63
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/QueryTerm.java View File

@@ -1,63 +0,0 @@
package org.apache.maven.archiva.indexer.query;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/**
* Class to hold a single field search condition
*
*/
public class QueryTerm
{
private String field;

private String value;

/**
* Class constructor
*
* @param field the index field to search
* @param value the index value requirement
*/
public QueryTerm( String field, String value )
{
this.field = field;
this.value = value;
}

/**
* Method to retrieve the name of the index field searched
*
* @return the name of the index field
*/
public String getField()
{
return field;
}

/**
* Method to retrieve the value used in searching the index field
*
* @return the value to corresspond the index field
*/
public String getValue()
{
return value;
}
}

+ 0
- 151
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/RangeQuery.java View File

@@ -1,151 +0,0 @@
package org.apache.maven.archiva.indexer.query;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/**
* Query object that handles range queries (presently used for dates).
*
*/
public class RangeQuery
implements Query
{
/**
* Whether values equal to the boundaries are included in the query results.
*/
private final boolean inclusive;

/**
* The lower bound.
*/
private final QueryTerm begin;

/**
* The upper bound.
*/
private final QueryTerm end;

/**
* Constructor.
*
* @param begin the lower bound
* @param end the upper bound
* @param inclusive whether to include the boundaries in the query
*/
private RangeQuery( QueryTerm begin, QueryTerm end, boolean inclusive )
{
this.begin = begin;
this.end = end;
this.inclusive = inclusive;
}

/**
* Create an open range, including all results.
*
* @return the query object
*/
public static RangeQuery createOpenRange()
{
return new RangeQuery( null, null, false );
}

/**
* Create a bounded range, excluding the endpoints.
*
* @param begin the lower bound value to compare to
* @param end the upper bound value to compare to
* @return the query object
*/
public static RangeQuery createExclusiveRange( QueryTerm begin, QueryTerm end )
{
return new RangeQuery( begin, end, false );
}

/**
* Create a bounded range, including the endpoints.
*
* @param begin the lower bound value to compare to
* @param end the upper bound value to compare to
* @return the query object
*/
public static RangeQuery createInclusiveRange( QueryTerm begin, QueryTerm end )
{
return new RangeQuery( begin, end, true );
}

/**
* Create a range that is greater than or equal to a given term.
*
* @param begin the value to compare to
* @return the query object
*/
public static RangeQuery createGreaterThanOrEqualToRange( QueryTerm begin )
{
return new RangeQuery( begin, null, true );
}

/**
* Create a range that is greater than a given term.
*
* @param begin the value to compare to
* @return the query object
*/
public static RangeQuery createGreaterThanRange( QueryTerm begin )
{
return new RangeQuery( begin, null, false );
}

/**
* Create a range that is less than or equal to a given term.
*
* @param end the value to compare to
* @return the query object
*/
public static RangeQuery createLessThanOrEqualToRange( QueryTerm end )
{
return new RangeQuery( null, end, true );
}

/**
* Create a range that is less than a given term.
*
* @param end the value to compare to
* @return the query object
*/
public static RangeQuery createLessThanRange( QueryTerm end )
{
return new RangeQuery( null, end, false );
}

public QueryTerm getBegin()
{
return begin;
}

public QueryTerm getEnd()
{
return end;
}

public boolean isInclusive()
{
return inclusive;
}

}

+ 0
- 64
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/query/SingleTermQuery.java View File

@@ -1,64 +0,0 @@
package org.apache.maven.archiva.indexer.query;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/**
* Query for a single term.
*
*/
public class SingleTermQuery
implements Query
{
/**
* The term to query for.
*/
private final QueryTerm term;

/**
* Constructor.
*
* @param term the term to query
*/
public SingleTermQuery( QueryTerm term )
{
this.term = term;
}

/**
* Shorthand constructor - create a single term query from a field and value
*
* @param field the field name
* @param value the value to check for
*/
public SingleTermQuery( String field, String value )
{
this.term = new QueryTerm( field, value );
}

public String getField()
{
return term.getField();
}

public String getValue()
{
return term.getValue();
}
}

+ 0
- 88
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/CrossRepositorySearch.java View File

@@ -1,88 +0,0 @@
package org.apache.maven.archiva.indexer.search;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.util.List;

/**
* Search across repositories in lucene indexes.
*
* @version $Id$
* @todo add security to not perform search in repositories you don't have access to.
*/
public interface CrossRepositorySearch
{
/**
* Search for the specific term across all repositories.
*
* @param term the term to search for.
* @param limits the limits to apply to the search results.
* @return the results.
*/
public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term, SearchResultLimits limits );
/**
* Search for a specific term from the previous search results.
*
* @param principal the user doing the search.
* @param selectedRepos the repositories to search from.
* @param term the term to search for.
* @param limits the limits to apply to the search results.
* @param previousSearchTerms the list of the previous search terms.
* @return the results
*/
public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term,
SearchResultLimits limits, List<String> previousSearchTerms );
/**
* Search for the specific bytecode across all repositories.
*
* @param term the term to search for.
* @param limits the limits to apply to the search results.
* @return the results.
*/
public SearchResults searchForBytecode( String principal, List<String> selectedRepos, String term, SearchResultLimits limits );

/**
* Search for the specific checksum string across all repositories.
*
* @param checksum the checksum string to search for.
* @param limits the limits to apply to the search results.
* @return the results.
*/
public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum, SearchResultLimits limits );
/**
* Search for a specific artifact matching the given field values. The search is performed on the bytecode
* index/indices.
*
* @param principal
* @param selectedRepos repository to be searched
* @param groupId groupId to be matched
* @param artifactId artifactId to be matched
* @param version version to be matched
* @param className Java class or package name to be matched
* @param limits the limits to apply to the search results
* @return
*/
public SearchResults executeFilteredSearch( String principal, List<String> selectedRepos, String groupId,
String artifactId, String version, String className,
SearchResultLimits limits );
}

+ 0
- 491
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/DefaultCrossRepositorySearch.java View File

@@ -1,491 +0,0 @@
package org.apache.maven.archiva.indexer.search;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.lucene.document.Document;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanFilter;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DuplicateFilter;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilterClause;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.MultiSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.Searchable;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers;
import org.apache.maven.archiva.indexer.bytecode.BytecodeKeys;
import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers;
import org.apache.maven.archiva.indexer.filecontent.FileContentKeys;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* DefaultCrossRepositorySearch
*
* @version $Id$
* @plexus.component role="org.apache.maven.archiva.indexer.search.CrossRepositorySearch" role-hint="default"
*/
public class DefaultCrossRepositorySearch
implements CrossRepositorySearch, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( DefaultCrossRepositorySearch.class );

/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;

/**
* @plexus.requirement
*/
private ArchivaConfiguration configuration;

private final List<ManagedRepositoryConfiguration> localIndexedRepositories = new ArrayList<ManagedRepositoryConfiguration>();
public SearchResults executeFilteredSearch( String principal, List<String> selectedRepos, String groupId,
String artifactId, String version, String className,
SearchResultLimits limits )
{
List<RepositoryContentIndex> indexes = getBytecodeIndexes( principal, selectedRepos );
SearchResults results = new SearchResults();
List<String> fieldsList = new ArrayList<String>();
List<String> termsList = new ArrayList<String>();
List<BooleanClause.Occur> flagsList = new ArrayList<BooleanClause.Occur>();
if( groupId != null && !"".equals( groupId.trim() ) )
{
fieldsList.add( ArtifactKeys.GROUPID );
termsList.add( groupId );
flagsList.add( BooleanClause.Occur.MUST );
}
if( artifactId != null && !"".equals( artifactId.trim() ) )
{
fieldsList.add( ArtifactKeys.ARTIFACTID );
termsList.add( artifactId );
flagsList.add( BooleanClause.Occur.MUST );
}
if( version != null && !"".equals( version.trim() ) )
{
fieldsList.add( ArtifactKeys.VERSION );
termsList.add( version );
flagsList.add( BooleanClause.Occur.MUST );
}
if( className != null && !"".equals( className.trim() ) )
{
fieldsList.add( BytecodeKeys.CLASSES );
fieldsList.add( BytecodeKeys.FILES );
fieldsList.add( BytecodeKeys.METHODS );
termsList.add( className.trim() );
termsList.add( className.trim() );
termsList.add( className.trim() );
flagsList.add( BooleanClause.Occur.SHOULD );
flagsList.add( BooleanClause.Occur.SHOULD );
flagsList.add( BooleanClause.Occur.SHOULD );
}
try
{
String[] fieldsArr = new String[ fieldsList.size() ];
String[] queryArr = new String[ termsList.size() ];
BooleanClause.Occur[] flagsArr = new BooleanClause.Occur[ flagsList.size() ];
Query fieldsQuery =
MultiFieldQueryParser.parse( termsList.toArray( queryArr ), fieldsList.toArray( fieldsArr ),
flagsList.toArray( flagsArr ), new BytecodeHandlers().getAnalyzer() );
LuceneQuery query = new LuceneQuery( fieldsQuery );
results = searchAll( query, limits, indexes, null );
results.getRepositories().add( this.localIndexedRepositories );
}
catch ( ParseException e )
{
log.warn( "Unable to parse advanced search fields and query terms." );
}

return results;
}

public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum,
SearchResultLimits limits )
{
List<RepositoryContentIndex> indexes = getHashcodeIndexes( principal, selectedRepos );

try
{
QueryParser parser = new MultiFieldQueryParser( new String[]{HashcodesKeys.MD5, HashcodesKeys.SHA1},
new HashcodesHandlers().getAnalyzer() );
LuceneQuery query = new LuceneQuery( parser.parse( checksum ) );
SearchResults results = searchAll( query, limits, indexes, null );
results.getRepositories().addAll( this.localIndexedRepositories );

return results;
}
catch ( ParseException e )
{
log.warn( "Unable to parse query [" + checksum + "]: " + e.getMessage(), e );
}

// empty results.
return new SearchResults();
}

public SearchResults searchForBytecode( String principal, List<String> selectedRepos, String term, SearchResultLimits limits )
{
List<RepositoryContentIndex> indexes = getBytecodeIndexes( principal, selectedRepos );

try
{
QueryParser parser = new BytecodeHandlers().getQueryParser();
LuceneQuery query = new LuceneQuery( parser.parse( term ) );
SearchResults results = searchAll( query, limits, indexes, null );
results.getRepositories().addAll( this.localIndexedRepositories );

return results;
}
catch ( ParseException e )
{
log.warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
}

// empty results.
return new SearchResults();
}

public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term, SearchResultLimits limits )
{
return searchForTerm( principal, selectedRepos, term, limits, null );
}

public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term,
SearchResultLimits limits, List<String> previousSearchTerms )
{
List<RepositoryContentIndex> indexes = getFileContentIndexes( principal, selectedRepos );

try
{
QueryParser parser = new FileContentHandlers().getQueryParser();
LuceneQuery query = null;
SearchResults results = null;

BooleanFilter duplicateFilter = new BooleanFilter();
DuplicateFilter artifactIdDuplicateFilter = new DuplicateFilter(FileContentKeys.ARTIFACTID_EXACT);
duplicateFilter.add(new FilterClause(artifactIdDuplicateFilter, BooleanClause.Occur.SHOULD));
DuplicateFilter groupIdDuplicateFilter = new DuplicateFilter(FileContentKeys.GROUPID_EXACT);
duplicateFilter.add(new FilterClause(groupIdDuplicateFilter, BooleanClause.Occur.SHOULD));

if ( previousSearchTerms == null || previousSearchTerms.isEmpty() )
{
query = new LuceneQuery( parser.parse( term ) );
results = searchAll( query, limits, indexes, duplicateFilter );
}
else
{
// AND the previous search terms
BooleanQuery booleanQuery = new BooleanQuery();
for ( String previousSearchTerm : previousSearchTerms )
{
booleanQuery.add( parser.parse( previousSearchTerm ), BooleanClause.Occur.MUST );
}

query = new LuceneQuery( booleanQuery );
Filter filter = new QueryWrapperFilter( parser.parse( term ) );
duplicateFilter.add(new FilterClause(filter, BooleanClause.Occur.SHOULD));
results = searchAll( query, limits, indexes, duplicateFilter );
}
results.getRepositories().addAll( this.localIndexedRepositories );

return results;
}
catch ( ParseException e )
{
log.warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
}

// empty results.
return new SearchResults();
}

private SearchResults searchAll( LuceneQuery luceneQuery, SearchResultLimits limits, List<RepositoryContentIndex> indexes, Filter filter )
{
org.apache.lucene.search.Query specificQuery = luceneQuery.getLuceneQuery();

SearchResults results = new SearchResults();

if ( indexes.isEmpty() )
{
// No point going any further.
return results;
}

// Setup the converter
LuceneEntryConverter converter = null;
RepositoryContentIndex index = indexes.get( 0 );
converter = index.getEntryConverter();

// Process indexes into an array of Searchables.
List<Searchable> searchableList = toSearchables( indexes );

Searchable searchables[] = new Searchable[searchableList.size()];
searchableList.toArray( searchables );

MultiSearcher searcher = null;

try
{
// Create a multi-searcher for looking up the information.
searcher = new MultiSearcher( searchables );
// Perform the search.
Hits hits = null;
if ( filter != null )
{
hits = searcher.search( specificQuery, filter );
}
else
{
hits = searcher.search( specificQuery );
}

int hitCount = hits.length();
// Now process the limits.
results.setLimits( limits );
results.setTotalHits( hitCount );

int fetchCount = limits.getPageSize();
int offset = ( limits.getSelectedPage() * limits.getPageSize() );

if ( limits.getSelectedPage() == SearchResultLimits.ALL_PAGES )
{
fetchCount = hitCount;
offset = 0;
}

// Goto offset.
if ( offset < hitCount )
{
// only process if the offset is within the hit count.
for ( int i = 0; i < fetchCount; i++ )
{
// Stop fetching if we are past the total # of available hits.
if ( offset + i >= hitCount )
{
break;
}

try
{
Document doc = hits.doc( offset + i );
LuceneRepositoryContentRecord record = converter.convert( doc );
results.addHit( record );
}
catch ( java.text.ParseException e )
{
log.warn( "Unable to parse document into record: " + e.getMessage(), e );
}
}
}

}
catch ( IOException e )
{
log.error( "Unable to setup multi-search: " + e.getMessage(), e );
}
finally
{
try
{
if ( searcher != null )
{
searcher.close();
}
}
catch ( IOException ie )
{
log.error( "Unable to close index searcher: " + ie.getMessage(), ie );
}
}

return results;
}

private List<Searchable> toSearchables( List<RepositoryContentIndex> indexes )
{
List<Searchable> searchableList = new ArrayList<Searchable>();
for ( RepositoryContentIndex contentIndex : indexes )
{
try
{
searchableList.add( contentIndex.getSearchable() );
}
catch ( RepositoryIndexSearchException e )
{
log.warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :"
+ e.getMessage(), e );
}
}
return searchableList;
}

public List<RepositoryContentIndex> getBytecodeIndexes( String principal, List<String> selectedRepos )
{
List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();

for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
// Only used selected repo
if ( selectedRepos.contains( repoConfig.getId() ) )
{
RepositoryContentIndex index = indexFactory.createBytecodeIndex( repoConfig );
// If they exist.
if ( indexExists( index ) )
{
ret.add( index );
}
}
}

return ret;
}

public List<RepositoryContentIndex> getFileContentIndexes( String principal, List<String> selectedRepos )
{
List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();

for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
// Only used selected repo
if ( selectedRepos.contains( repoConfig.getId() ) )
{
RepositoryContentIndex index = indexFactory.createFileContentIndex( repoConfig );
// If they exist.
if ( indexExists( index ) )
{
ret.add( index );
}
}
}

return ret;
}

public List<RepositoryContentIndex> getHashcodeIndexes( String principal, List<String> selectedRepos )
{
List<RepositoryContentIndex> ret = new ArrayList<RepositoryContentIndex>();

for ( ManagedRepositoryConfiguration repoConfig : localIndexedRepositories )
{
// Only used selected repo
if ( selectedRepos.contains( repoConfig.getId() ) )
{
RepositoryContentIndex index = indexFactory.createHashcodeIndex( repoConfig );
// If they exist.
if ( indexExists( index ) )
{
ret.add( index );
}
}
}

return ret;
}

private boolean indexExists( RepositoryContentIndex index )
{
try
{
return index.exists();
}
catch ( RepositoryIndexException e )
{
log.info(
"Repository Content Index [" + index.getId() + "] for repository ["
+ index.getRepository().getId() + "] does not exist yet in ["
+ index.getIndexDirectory().getAbsolutePath() + "]." );
return false;
}
}

public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
if ( ConfigurationNames.isManagedRepositories( propertyName ) )
{
initRepositories();
}
}

public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
/* Nothing to do here */
}

private void initRepositories()
{
synchronized ( this.localIndexedRepositories )
{
this.localIndexedRepositories.clear();

List<ManagedRepositoryConfiguration> repos = configuration.getConfiguration().getManagedRepositories();
for ( ManagedRepositoryConfiguration repo : repos )
{
if ( repo.isScanned() )
{
localIndexedRepositories.add( repo );
}
}
}
}

public void initialize()
throws InitializationException
{
initRepositories();
configuration.addChangeListener( this );
}
}

+ 0
- 96
archiva-modules/archiva-base/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/search/SearchResults.java View File

@@ -19,13 +19,6 @@ package org.apache.maven.archiva.indexer.search;
* under the License.
*/

import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -56,95 +49,6 @@ public class SearchResults
{
hits.put( id, hit );
}
public void addHit( LuceneRepositoryContentRecord record )
{
if ( record instanceof FileContentRecord )
{
FileContentRecord filecontent = (FileContentRecord) record;
addFileContentHit( filecontent );
}
else if ( record instanceof HashcodesRecord )
{
HashcodesRecord hashcodes = (HashcodesRecord) record;
addHashcodeHit( hashcodes );
}
else if ( record instanceof BytecodeRecord )
{
BytecodeRecord bytecode = (BytecodeRecord) record;
addBytecodeHit( bytecode );
}
}

private void addBytecodeHit( BytecodeRecord bytecode )
{
String key = toKey( bytecode.getArtifact() );
SearchResultHit hit = (SearchResultHit) this.hits.get( key );

if ( hit == null )
{
hit = new SearchResultHit();
}
hit.setRepositoryId( bytecode.getRepositoryId() );
hit.setArtifact( bytecode.getArtifact() );
hit.setContext( null ); // TODO: provide context on why this is a valuable hit.

this.hits.put( key, hit );
}

private String toKey( ArchivaArtifact artifact )
{
StringBuffer key = new StringBuffer();

key.append( StringUtils.defaultString( artifact.getModel().getRepositoryId() ) ).append( ":" );
key.append( StringUtils.defaultString( artifact.getGroupId() ) ).append( ":" );
key.append( StringUtils.defaultString( artifact.getArtifactId() ) ).append( ":" );
key.append( StringUtils.defaultString( artifact.getVersion() ) );

return key.toString();
}

private void addHashcodeHit( HashcodesRecord hashcodes )
{
String key = toKey( hashcodes.getArtifact() );

SearchResultHit hit = (SearchResultHit) this.hits.get( key );

if ( hit == null )
{
hit = new SearchResultHit();
}

hit.setArtifact( hashcodes.getArtifact() );
hit.setContext( null ); // TODO: provide context on why this is a valuable hit.

hits.put( key, hit );
}

public void addFileContentHit( FileContentRecord filecontent )
{
final String key = filecontent.getPrimaryKey();
SearchResultHit hit = hits.get( key );
if ( hit == null )
{
// Only need to worry about this hit if it is truely new.
hit = new SearchResultHit();

hit.setRepositoryId( filecontent.getRepositoryId() );
hit.setUrl( filecontent.getRepositoryId() + "/" + filecontent.getFilename() );
hit.setContext( null ); // TODO: handle context + highlight later.
// Test for possible artifact reference ...
if( filecontent.getArtifact() != null )
{
hit.setArtifact( filecontent.getArtifact() );
}

this.hits.put( key, hit );
}
}

/**
* Get the list of {@link SearchResultHit} objects.

+ 0
- 190
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/AbstractIndexCreationTestCase.java View File

@@ -1,190 +0,0 @@
package org.apache.maven.archiva.indexer;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.commons.io.FileUtils;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;

import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.util.Collections;

/**
* AbstractIndexCreationTestCase
*
* @version $Id$
*/
public abstract class AbstractIndexCreationTestCase extends AbstractIndexerTestCase
{
protected abstract LuceneRepositoryContentRecord createSimpleRecord();

public void testIndexExists() throws Exception
{
assertFalse( "check index doesn't exist", index.exists() );

File indexLocation = index.getIndexDirectory();
// create empty directory
indexLocation.mkdirs();
assertFalse( "check index doesn't exist even if directory does", index.exists() );

// create index, with no records
createEmptyIndex();
assertTrue( "check index is considered to exist", index.exists() );

// Test non-directory
FileUtils.deleteDirectory( indexLocation );
indexLocation.createNewFile();
try
{
index.exists();
fail( "Index operation should fail as the location is not valid" );
}
catch ( RepositoryIndexException e )
{
// great
}
finally
{
indexLocation.delete();
}
}

public void testAddRecordNoIndex() throws IOException, RepositoryIndexException, ParseException
{
LuceneRepositoryContentRecord record = createSimpleRecord();

index.indexRecords( Collections.singletonList( record ) );

IndexReader reader = IndexReader.open( index.getIndexDirectory() );
try
{
assertEquals( "Check index size", 1, reader.numDocs() );

Document document = reader.document( 0 );
assertRecord( record, document );
}
finally
{
reader.close();
}
}

public void testAddRecordExistingEmptyIndex() throws IOException, RepositoryIndexException, ParseException
{
createEmptyIndex();

LuceneRepositoryContentRecord record = createSimpleRecord();

index.indexRecords( Collections.singletonList( record ) );

IndexReader reader = IndexReader.open( index.getIndexDirectory() );
try
{
assertEquals( "Check index size", 1, reader.numDocs() );

Document document = reader.document( 0 );
assertRecord( record, document );
}
finally
{
reader.close();
}
}

public void testAddRecordInIndex() throws IOException, RepositoryIndexException, ParseException
{
createEmptyIndex();

LuceneRepositoryContentRecord record = createSimpleRecord();

index.indexRecords( Collections.singletonList( record ) );

// Do it again
record = createSimpleRecord();

index.indexRecords( Collections.singletonList( record ) );

IndexReader reader = IndexReader.open( index.getIndexDirectory() );
try
{
assertEquals( "Check index size", 1, reader.numDocs() );

Document document = reader.document( 0 );
assertRecord( record, document );
}
finally
{
reader.close();
}
}

public void testDeleteRecordInIndex() throws IOException, RepositoryIndexException
{
createEmptyIndex();

LuceneRepositoryContentRecord record = createSimpleRecord();

index.indexRecords( Collections.singletonList( record ) );

index.deleteRecords( Collections.singletonList( record ) );

IndexReader reader = IndexReader.open( index.getIndexDirectory() );
try
{
assertEquals( "No documents", 0, reader.numDocs() );
}
finally
{
reader.close();
}
}

public void testDeleteRecordNotInIndex() throws IOException, RepositoryIndexException
{
createEmptyIndex();

LuceneRepositoryContentRecord record = createSimpleRecord();

index.deleteRecords( Collections.singletonList( record ) );

IndexReader reader = IndexReader.open( index.getIndexDirectory() );
try
{
assertEquals( "No documents", 0, reader.numDocs() );
}
finally
{
reader.close();
}
}

public void testDeleteRecordNoIndex() throws IOException, RepositoryIndexException
{
LuceneRepositoryContentRecord record = createSimpleRecord();

index.deleteRecords( Collections.singleton( record ) );

assertFalse( index.exists() );
}
}

+ 0
- 235
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/AbstractIndexerTestCase.java View File

@@ -1,235 +0,0 @@
package org.apache.maven.archiva.indexer;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.commons.io.FileUtils;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexWriter;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;

import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
* AbstractIndexerTestCase
*
* @version $Id$
*/
public abstract class AbstractIndexerTestCase
extends PlexusInSpringTestCase
{
protected RepositoryContentIndex index;

protected LuceneIndexHandlers indexHandlers;

private static final String TEST_DEFAULT_REPOSITORY_NAME = "Test Default Repository";

private static final String TEST_DEFAULT_REPO_ID = "testDefaultRepo";

public abstract String getIndexName();

protected void assertRecord( LuceneRepositoryContentRecord expectedRecord, Document luceneDocument )
throws ParseException
{
LuceneRepositoryContentRecord actualRecord = indexHandlers.getConverter().convert( luceneDocument );
assertRecord( expectedRecord, actualRecord );
}

protected void assertRecord( LuceneRepositoryContentRecord expectedRecord,
LuceneRepositoryContentRecord actualRecord )
{
assertEquals( expectedRecord, actualRecord );
}

public abstract RepositoryContentIndex createIndex( RepositoryContentIndexFactory indexFactory,
ManagedRepositoryConfiguration repository );

public abstract LuceneIndexHandlers getIndexHandler();

protected void setUp()
throws Exception
{
super.setUp();

RepositoryContentIndexFactory indexFactory =
(RepositoryContentIndexFactory) lookup( RepositoryContentIndexFactory.class
.getName(), "lucene" );

ManagedRepositoryConfiguration repository = createTestIndex( getIndexName() );

index = createIndex( indexFactory, repository );

indexHandlers = getIndexHandler();
}

private ManagedRepositoryConfiguration createTestIndex( String indexName )
throws Exception
{
File repoDir = new File( getBasedir(), "src/test/managed-repository" );
File testIndexesDir = new File( getBasedir(), "target/test-indexes" );

if ( !testIndexesDir.exists() )
{
testIndexesDir.mkdirs();
}

assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );

ManagedRepositoryConfiguration repository = createRepository( TEST_DEFAULT_REPO_ID,
TEST_DEFAULT_REPOSITORY_NAME, repoDir );

File indexLocation = new File( testIndexesDir, "/index-" + indexName + "-" + getName() + "/" );

MockConfiguration config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );

ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
repoConfig.setId( TEST_DEFAULT_REPO_ID );
repoConfig.setName( TEST_DEFAULT_REPOSITORY_NAME );
repoConfig.setLocation( repoDir.getAbsolutePath() );
repoConfig.setIndexDir( indexLocation.getAbsolutePath() );

if ( indexLocation.exists() )
{
FileUtils.deleteDirectory( indexLocation );
}

config.getConfiguration().addManagedRepository( repoConfig );
return repository;
}

protected Map getArchivaArtifactDumpMap()
{
Map dumps = new HashMap();

// archiva-common-1.0.jar.txt
dumps.put( "archiva-common",
createArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "", "jar" ) );

// continuum-webapp-1.0.3-SNAPSHOT.war.txt
dumps.put( "continuum-webapp", createArchivaArtifact( "org.apache.maven.continuum", "continuum-webapp",
"1.0.3-SNAPSHOT", "", "war" ) );

// daytrader-ear-1.1.ear.txt
dumps.put( "daytrader-ear", createArchivaArtifact( "org.apache.geronimo", "daytrader-ear", "1.1", "", "ear" ) );

// maven-archetype-simple-1.0-alpha-4.jar.txt
dumps.put( "maven-archetype-simple", createArchivaArtifact( "org.apache.maven", "maven-archetype-simple",
"1.0-alpha-4", "", "maven-archetype" ) );

// maven-help-plugin-2.0.2-20070119.121239-2.jar.txt
dumps.put( "maven-help-plugin", createArchivaArtifact( "org.apache.maven.plugins", "maven-help-plugin",
"2.0.2-20070119.121239-2", "", "maven-plugin" ) );

// redback-authorization-open-1.0-alpha-1-SNAPSHOT.jar.txt
dumps.put( "redback-authorization-open", createArchivaArtifact( "org.codehaus.plexus.redback",
"redback-authorization-open",
"1.0-alpha-1-SNAPSHOT", "", "jar" ) );

// testng-5.1-jdk15.jar.txt
dumps.put( "testng", createArchivaArtifact( "org.testng", "testng", "5.1", "jdk15", "jar" ) );

// wagon-provider-api-1.0-beta-3-20070209.213958-2.jar.txt
dumps.put( "wagon-provider-api", createArchivaArtifact( "org.apache.maven.wagon", "wagon-provider-api",
"1.0-beta-3-20070209.213958-2", "", "jar" ) );

return dumps;
}

protected File getDumpFile( ArchivaArtifact artifact )
{
File dumpDir = new File( getBasedir(), "src/test/artifact-dumps" );
StringBuffer filename = new StringBuffer();

filename.append( artifact.getArtifactId() ).append( "-" ).append( artifact.getVersion() );

if ( artifact.hasClassifier() )
{
filename.append( "-" ).append( artifact.getClassifier() );
}

filename.append( "." );

// TODO: use the ArtifactExtensionMapping object
if ( "maven-plugin".equals( artifact.getType() ) || "maven-archetype".equals( artifact.getType() ) )
{
filename.append( "jar" );
}
else
{
filename.append( artifact.getType() );
}
filename.append( ".txt" );

File dumpFile = new File( dumpDir, filename.toString() );

if ( !dumpFile.exists() )
{
fail( "Dump file " + dumpFile.getAbsolutePath() + " does not exist (should it?)." );
}

return dumpFile;
}

private ArchivaArtifact createArchivaArtifact( String groupId, String artifactId, String version, String classifier,
String type )
{
ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
return artifact;
}

protected void createEmptyIndex()
throws IOException
{
createIndex( Collections.EMPTY_LIST );
}

protected void createIndex( List documents )
throws IOException
{
IndexWriter writer = new IndexWriter( index.getIndexDirectory(), indexHandlers.getAnalyzer(), true );
for ( Iterator i = documents.iterator(); i.hasNext(); )
{
Document document = (Document) i.next();
writer.addDocument( document );
}
writer.optimize();
writer.close();
}
protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
{
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
repo.setId( id );
repo.setName( name );
repo.setLocation( location.getAbsolutePath() );
return repo;
}
}

+ 0
- 194
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/AbstractSearchTestCase.java View File

@@ -1,194 +0,0 @@
package org.apache.maven.archiva.indexer;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Hit;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.search.TermQuery;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import junit.framework.ComparisonFailure;

/**
* AbstractSearchTestCase
*
* @version $Id$
*/
public abstract class AbstractSearchTestCase
extends AbstractIndexerTestCase
{
protected Map records;

protected abstract Map createSampleRecordsMap();

protected void setUp()
throws Exception
{
super.setUp();

records = createSampleRecordsMap();

index.indexRecords( records.values() );
}

protected Query createExactMatchQuery( String field, String value )
{
return new TermQuery( new Term( field, value ) );
}

protected Query createMatchQuery( String field, String value )
throws ParseException
{
QueryParser queryParser = new QueryParser( field, indexHandlers.getAnalyzer() );
queryParser.setLowercaseExpandedTerms( true );
return queryParser.parse( value );
}

protected void assertResults( String expectedKeys[], List actualResults )
{
if ( actualResults == null )
{
fail( "Got null results, expected <" + expectedKeys.length + "> results." );
}

if ( actualResults.isEmpty() )
{
fail( "Got empty results, expected <" + expectedKeys.length + "> results." );
}

if ( expectedKeys.length != actualResults.size() )
{
dumpResults( actualResults );
throw new ComparisonFailure( "Results count", String.valueOf( expectedKeys.length ), String
.valueOf( actualResults.size() ) );
}

assertEquals( "Results count", expectedKeys.length, actualResults.size() );

for ( int i = 0; i < expectedKeys.length; i++ )
{
String key = expectedKeys[i];
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) records.get( key );

if ( record == null )
{
dumpResults( actualResults );
fail( "Expected record <" + key
+ "> not in records map (smack the unit test developer, tell them to fix method " + getName() + ")" );
}

if ( !actualResults.contains( record ) )
{
dumpResults( actualResults );
fail( "Results should contain expected record: " + record );
}
}
}

protected void dumpResults( List results )
{
System.out.println( "Results <" + results.size() + "> - " + getName() );
int i = 1;
for ( Iterator iter = results.iterator(); iter.hasNext(); )
{
Object result = (Object) iter.next();
System.out.println( "Result [" + ( i++ ) + "] : " + result );
}
}

protected void assertNoResults( List results )
{
if ( results == null )
{
return;
}

if ( !results.isEmpty() )
{
dumpResults( results );
fail( "Expected no results, but actually got <" + results.size() + "> entries." );
}
}

protected void assertQueryExactMatchNoResults( String key, String term )
throws Exception
{
Query query = createExactMatchQuery( key, term );
List results = search( query );
assertNoResults( results );
}

protected void assertQueryExactMatch( String key, String names[], String term )
throws Exception
{
Query query = createExactMatchQuery( key, term );
List results = search( query );
assertResults( names, results );
}

protected void assertQueryMatch( String key, String names[], String term )
throws Exception
{
Query query = createMatchQuery( key, term );
List results = search( query );
assertResults( names, results );
}

protected void assertQueryMatchNoResults( String key, String term )
throws Exception
{
Query query = createMatchQuery( key, term );

List results = search( query );

assertNoResults( results );
}

protected List search( Query query )
throws RepositoryIndexSearchException, IOException, java.text.ParseException
{
Searcher searcher = (Searcher) index.getSearchable();; // this shouldn't cause a problem.

Hits hits = searcher.search( query );

List results = new ArrayList();
Iterator it = hits.iterator();
while ( it.hasNext() )
{
Hit hit = (Hit) it.next();
Document doc = hit.getDocument();
LuceneRepositoryContentRecord record = index.getEntryConverter().convert( doc );
results.add( record );
}
return results;
}
}

+ 0
- 167
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/RepositoryContentIndexEventListenerTest.java View File

@@ -1,167 +0,0 @@
package org.apache.maven.archiva.indexer;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import java.io.File;
import java.util.List;
import java.util.Map;

import org.apache.commons.io.FileUtils;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Searcher;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.indexer.search.BytecodeIndexPopulator;
import org.apache.maven.archiva.indexer.search.FileContentIndexPopulator;
import org.apache.maven.archiva.indexer.search.HashcodesIndexPopulator;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.plexus.spring.PlexusToSpringUtils;

public class RepositoryContentIndexEventListenerTest
extends PlexusInSpringTestCase
{
private static final String TEST_DEFAULT_REPOSITORY_NAME = "Test Default Repository";

private static final String TEST_DEFAULT_REPO_ID = "test-repo";

private RepositoryListener listener;

@Override
protected void setUp()
throws Exception
{
super.setUp();

listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "indexer" );
}

public void testWiring()
{
List<RepositoryListener> listeners =
PlexusToSpringUtils.lookupList( PlexusToSpringUtils.buildSpringId( RepositoryListener.class ),
getApplicationContext() );

assertEquals( 1, listeners.size() );
assertEquals( listener, listeners.get( 0 ) );
}

public ArchivaArtifact createArtifact( String artifactId, String version )
{
ArchivaArtifact artifact =
new ArchivaArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
}

public void testDeleteArtifact()
throws Exception
{
RepositoryContentIndexFactory indexFactory =
(RepositoryContentIndexFactory) lookup( RepositoryContentIndexFactory.class.getName(), "lucene" );

File repoDir = new File( getBasedir(), "src/test/managed-repository" );

assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );

ManagedRepositoryConfiguration repository =
createRepository( TEST_DEFAULT_REPO_ID, TEST_DEFAULT_REPOSITORY_NAME, repoDir );

File indexLocation = new File( "target/index-events-" + getName() + "/" );

MockConfiguration config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );

ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
repoConfig.setId( TEST_DEFAULT_REPO_ID );
repoConfig.setName( TEST_DEFAULT_REPOSITORY_NAME );
repoConfig.setLocation( repoDir.getAbsolutePath() );
repoConfig.setIndexDir( indexLocation.getAbsolutePath() );
repoConfig.setScanned( true );

if ( indexLocation.exists() )
{
FileUtils.deleteDirectory( indexLocation );
}

config.getConfiguration().addManagedRepository( repoConfig );

// Create the (empty) indexes.
RepositoryContentIndex indexHashcode = indexFactory.createHashcodeIndex( repository );
RepositoryContentIndex indexBytecode = indexFactory.createBytecodeIndex( repository );
RepositoryContentIndex indexContents = indexFactory.createFileContentIndex( repository );

// Now populate them.
Map<String, HashcodesRecord> hashcodesMap = new HashcodesIndexPopulator().populate( new File( getBasedir() ) );
indexHashcode.indexRecords( hashcodesMap.values() );
assertEquals( "Hashcode Key Count", hashcodesMap.size(), indexHashcode.getAllRecordKeys().size() );
assertRecordCount( indexHashcode, hashcodesMap.size() );

Map<String, BytecodeRecord> bytecodeMap = new BytecodeIndexPopulator().populate( new File( getBasedir() ) );
indexBytecode.indexRecords( bytecodeMap.values() );
assertEquals( "Bytecode Key Count", bytecodeMap.size(), indexBytecode.getAllRecordKeys().size() );
assertRecordCount( indexBytecode, bytecodeMap.size() );

Map<String, FileContentRecord> contentMap = new FileContentIndexPopulator().populate( new File( getBasedir() ) );
indexContents.indexRecords( contentMap.values() );
assertEquals( "File Content Key Count", contentMap.size(), indexContents.getAllRecordKeys().size() );
assertRecordCount( indexContents, contentMap.size() );

ManagedRepositoryContent repositoryContent =
(ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
repositoryContent.setRepository( repository );

ArchivaArtifact artifact =
new ArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "", "jar" );
listener.deleteArtifact( repositoryContent, artifact );

artifact =
new ArchivaArtifact( "org.apache.maven.archiva.record", "test-pom", "1.0", "", "pom" );
listener.deleteArtifact( repositoryContent, artifact );

assertRecordCount( indexHashcode, hashcodesMap.size() - 1 );
assertRecordCount( indexBytecode, bytecodeMap.size() - 1 );
assertRecordCount( indexContents, contentMap.size() - 1 );
}

protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
{
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
repo.setId( id );
repo.setName( name );
repo.setLocation( location.getAbsolutePath() );
return repo;
}

private void assertRecordCount( RepositoryContentIndex index, int expectedCount )
throws Exception
{
Query query = new MatchAllDocsQuery();
Searcher searcher = (Searcher) index.getSearchable();
Hits hits = searcher.search( query );
assertEquals( "Expected Record Count for " + index.getId(), expectedCount, hits.length() );
}
}

+ 0
- 43
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/bytecode/AllTests.java View File

@@ -1,43 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import junit.framework.Test;
import junit.framework.TestSuite;

/**
* AllTests - conveinence test suite for IDE users.
*
* @version $Id$
*/
public class AllTests
{

public static Test suite()
{
TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.indexer.bytecode" );
//$JUnit-BEGIN$
suite.addTestSuite( BytecodeSearchTest.class );
suite.addTestSuite( BytecodeIndexTest.class );
//$JUnit-END$
return suite;
}

}

+ 0
- 65
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/bytecode/BytecodeIndexTest.java View File

@@ -1,65 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.AbstractIndexCreationTestCase;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.io.File;
import java.util.Map;

/**
* BytecodeIndexTest
*
* @version $Id$
*/
public class BytecodeIndexTest extends AbstractIndexCreationTestCase
{
public String getIndexName()
{
return "bytecode";
}

public LuceneIndexHandlers getIndexHandler()
{
return new BytecodeHandlers();
}

public RepositoryContentIndex createIndex( RepositoryContentIndexFactory indexFactory, ManagedRepositoryConfiguration repository )
{
return indexFactory.createBytecodeIndex( repository );
}

protected LuceneRepositoryContentRecord createSimpleRecord()
{
Map dumps = getArchivaArtifactDumpMap();
ArchivaArtifact artifact = (ArchivaArtifact) dumps.get( "archiva-common" );
File dumpFile = getDumpFile( artifact );
BytecodeRecord record = BytecodeRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );
return record;
}
}

+ 0
- 136
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/bytecode/BytecodeRecordLoader.java View File

@@ -1,136 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import junit.framework.AssertionFailedError;

import org.apache.commons.io.IOUtils;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaArtifactJavaDetails;
import org.apache.maven.archiva.model.platform.JavaArtifactHelper;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;

/**
* BytecodeRecordLoader - Utility method for loading dump files into BytecordRecords.
*
* @version $Id$
*/
public class BytecodeRecordLoader
{
// private static Map cache = new HashMap();

public static BytecodeRecord loadRecord( File dumpFile, ArchivaArtifact artifact )
{
BytecodeRecord record;
// record = (BytecodeRecord) cache.get( artifact );
// if ( record != null )
// {
// return record;
// }

record = new BytecodeRecord();
record.setArtifact( artifact );

record.setClasses( new ArrayList() );
record.setMethods( new ArrayList() );
record.setFiles( new ArrayList() );

FileReader freader = null;
BufferedReader reader = null;

try
{
freader = new FileReader( dumpFile );
reader = new BufferedReader( freader );

String line = reader.readLine();
while ( line != null )
{
if ( line.startsWith( "FILENAME|" ) )
{
String filename = line.substring( "FILENAME|".length() );
record.setFilename( filename );
}
else if ( line.startsWith( "SIZE|" ) )
{
String size = line.substring( "SIZE|".length() );
record.getArtifact().getModel().setSize( Long.parseLong( size ) );
}
else if ( line.startsWith( "HASH_MD5|" ) )
{
String md5 = line.substring( "HASH_MD5|".length() );
record.getArtifact().getModel().setChecksumMD5( md5 );
}
else if ( line.startsWith( "HASH_SHA1|" ) )
{
String sha1 = line.substring( "HASH_SHA1|".length() );
record.getArtifact().getModel().setChecksumSHA1( sha1 );
}
else if ( line.startsWith( "HASH_BYTECODE|" ) )
{
String hash = line.substring( "HASH_BYTECODE|".length() );
ArchivaArtifactJavaDetails javaDetails = JavaArtifactHelper.getJavaDetails( record.getArtifact() );
javaDetails.setChecksumBytecode( hash );
}
else if ( line.startsWith( "JDK|" ) )
{
String jdk = line.substring( "JDK|".length() );
ArchivaArtifactJavaDetails javaDetails = JavaArtifactHelper.getJavaDetails( record.getArtifact() );
javaDetails.setJdk( jdk );
}
else if ( line.startsWith( "CLASS|" ) )
{
String classname = line.substring( "CLASS|".length() );
record.getClasses().add( classname );
}
else if ( line.startsWith( "METHOD|" ) )
{
String methodName = line.substring( "METHOD|".length() );
record.getMethods().add( methodName );
}
else if ( line.startsWith( "FILE|" ) )
{
String fileentry = line.substring( "FILE|".length() );
record.getFiles().add( fileentry );
}

line = reader.readLine();
}
}
catch ( IOException e )
{
throw new AssertionFailedError( "Unable to load record " + dumpFile + " from disk: " + e.getMessage() );
}
finally
{
IOUtils.closeQuietly( reader );
IOUtils.closeQuietly( freader );
}

// cache.put( artifact, record );

return record;
}
}

+ 0
- 328
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/bytecode/BytecodeSearchTest.java View File

@@ -1,328 +0,0 @@
package org.apache.maven.archiva.indexer.bytecode;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.AbstractSearchTestCase;
import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
* BytecodeSearchTest
*
* @version $Id$
*/
public class BytecodeSearchTest extends AbstractSearchTestCase
{
public String getIndexName()
{
return "bytecode";
}

public LuceneIndexHandlers getIndexHandler()
{
return new BytecodeHandlers();
}

public RepositoryContentIndex createIndex( RepositoryContentIndexFactory indexFactory, ManagedRepositoryConfiguration repository )
{
return indexFactory.createBytecodeIndex( repository );
}

protected Map createSampleRecordsMap()
{
Map records = new HashMap();

Map artifactDumps = getArchivaArtifactDumpMap();
for ( Iterator iter = artifactDumps.entrySet().iterator(); iter.hasNext(); )
{
Map.Entry entry = (Map.Entry) iter.next();
ArchivaArtifact artifact = (ArchivaArtifact) entry.getValue();
File dumpFile = getDumpFile( artifact );
BytecodeRecord record = BytecodeRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );
records.put( entry.getKey(), record );
}

return records;
}

public void testExactMatchVersionSimple() throws Exception
{
assertQueryExactMatch( ArtifactKeys.VERSION_EXACT, new String[] { "archiva-common" }, "1.0" );
}

public void testExactMatchVersionSnapshot() throws Exception
{
assertQueryExactMatch( ArtifactKeys.VERSION_EXACT, new String[] { "continuum-webapp" }, "1.0.3-SNAPSHOT" );
}

public void testExactMatchVersionAlphaSnapshot() throws Exception
{
assertQueryExactMatch( ArtifactKeys.VERSION_EXACT, new String[] { "redback-authorization-open" },
"1.0-alpha-1-SNAPSHOT" );
}

public void testExactMatchVersionTimestampedSnapshot() throws Exception
{
assertQueryExactMatch( ArtifactKeys.VERSION_EXACT, new String[] { "wagon-provider-api" },
"1.0-beta-3-20070209.213958-2" );
}

public void testExactMatchVersionInvalid() throws Exception
{
assertQueryExactMatchNoResults( ArtifactKeys.VERSION_EXACT, "foo" );
}

public void testExactMatchGroupIdOrgApacheMavenArchiva() throws Exception
{
assertQueryExactMatch( ArtifactKeys.GROUPID_EXACT, new String[] { "archiva-common" },
"org.apache.maven.archiva" );
}

public void testExactMatchGroupIdOrgApacheMaven() throws Exception
{
assertQueryExactMatch( ArtifactKeys.GROUPID_EXACT, new String[] { "maven-archetype-simple" },
"org.apache.maven" );
}

public void testExactMatchGroupIdInvalid() throws Exception
{
assertQueryExactMatchNoResults( ArtifactKeys.GROUPID_EXACT, "foo" );
}

public void testExactMatchArtifactIdArchivaCommon() throws Exception
{
assertQueryExactMatch( ArtifactKeys.ARTIFACTID_EXACT, new String[] { "archiva-common" }, "archiva-common" );
}

public void testExactMatchArtifactIdTestNg() throws Exception
{
assertQueryExactMatch( ArtifactKeys.ARTIFACTID_EXACT, new String[] { "testng" }, "testng" );
}

public void testExactMatchArtifactIdInvalid() throws Exception
{
assertQueryExactMatchNoResults( ArtifactKeys.ARTIFACTID_EXACT, "foo" );
}

public void testExactMatchTypeJar() throws Exception
{
assertQueryExactMatch( ArtifactKeys.TYPE, ( new String[] { "archiva-common", "redback-authorization-open",
"testng", "wagon-provider-api" } ), "jar" );
}

public void testExactMatchTypeWar() throws Exception
{
assertQueryExactMatch( ArtifactKeys.TYPE, ( new String[] { "continuum-webapp" } ), "war" );
}

/* TODO: Fix 'maven-plugin' type
public void testExactMatchTypePlugin() throws Exception
{
assertQueryExactMatch( ArtifactKeys.TYPE, ( new String[] { "maven-help-plugin" } ), "maven-plugin" );
} */

/* TODO: Fix 'maven-archetype' type
public void testExactMatchTypeArchetype() throws Exception
{
assertQueryExactMatch( ArtifactKeys.TYPE, ( new String[] { "maven-archetype-simple" } ), "maven-archetype" );
}
*/

public void testExactMatchTypeInvalid() throws Exception
{
assertQueryExactMatchNoResults( ArtifactKeys.TYPE, "foo" );
}

public void testMatchGroupIdOrgApacheMaven() throws Exception
{
assertQueryMatch( ArtifactKeys.GROUPID, new String[] { "archiva-common", "continuum-webapp",
"maven-archetype-simple", "maven-help-plugin", "wagon-provider-api" }, "org.apache.maven" );
}

public void testMatchGroupIdMaven() throws Exception
{
assertQueryMatch( ArtifactKeys.GROUPID, new String[] { "archiva-common", "continuum-webapp",
"maven-archetype-simple", "maven-help-plugin", "wagon-provider-api" }, "maven" );
}

public void testMatchGroupIdMavenMixed() throws Exception
{
assertQueryMatch( ArtifactKeys.GROUPID, new String[] { "archiva-common", "continuum-webapp",
"maven-archetype-simple", "maven-help-plugin", "wagon-provider-api" }, "Maven" );
}

public void testMatchGroupIdInvalid() throws Exception
{
assertQueryMatchNoResults( ArtifactKeys.GROUPID, "foo" );
}

public void testMatchArtifactIdPlugin() throws Exception
{
assertQueryMatch( ArtifactKeys.ARTIFACTID, new String[] { "maven-help-plugin" }, "plugin" );
}

public void testMatchArtifactIdMaven() throws Exception
{
assertQueryMatch( ArtifactKeys.ARTIFACTID, new String[] { "maven-help-plugin", "maven-archetype-simple" },
"maven" );
}

public void testMatchArtifactIdHelp() throws Exception
{
assertQueryMatch( ArtifactKeys.ARTIFACTID, new String[] { "maven-help-plugin" }, "help" );
}

public void testMatchVersionOne() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "daytrader-ear", "testng", "archiva-common",
"redback-authorization-open", "maven-archetype-simple", "continuum-webapp", "wagon-provider-api" }, "1" );
}

public void testMatchVersionOneOh() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "archiva-common", "continuum-webapp",
"maven-archetype-simple", "redback-authorization-open", "wagon-provider-api" }, "1.0" );
}

public void testMatchVersionSnapshotLower() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "continuum-webapp", "redback-authorization-open" },
"snapshot" );
}

public void testMatchVersionSnapshotUpper() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "continuum-webapp", "redback-authorization-open" },
"SNAPSHOT" );
}

public void testMatchVersionAlpha() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION,
new String[] { "maven-archetype-simple", "redback-authorization-open" }, "alpha" );
}

public void testMatchVersionOneAlpha() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "redback-authorization-open" }, "1.0-alpha-1" );
}

public void testMatchVersionInvalid() throws Exception
{
assertQueryMatchNoResults( ArtifactKeys.VERSION, "255" );
}

public void testMatchClassifierNotJdk15() throws Exception
{
BooleanQuery bQuery = new BooleanQuery();
bQuery.add( new MatchAllDocsQuery(), BooleanClause.Occur.MUST );
bQuery.add( createMatchQuery( ArtifactKeys.CLASSIFIER, "jdk15" ), BooleanClause.Occur.MUST_NOT );
List results = search( bQuery );

assertResults( new String[] { "archiva-common", "continuum-webapp", "redback-authorization-open",
"daytrader-ear", "maven-archetype-simple", "maven-help-plugin", "wagon-provider-api" }, results );
}

public void testMatchClassifierJdk15() throws Exception
{
assertQueryMatch( ArtifactKeys.CLASSIFIER, new String[] { "testng" }, "jdk15" );
}

public void testMatchClassifierInvalid() throws Exception
{
assertQueryMatchNoResults( ArtifactKeys.CLASSIFIER, "redo" );
}

public void testMatchClassSessionListener() throws Exception
{
assertQueryMatch( BytecodeKeys.CLASSES, new String[] { "wagon-provider-api" }, "wagon.events.SessionListener" );
}

/* TODO: Suffix searching does not seem to work.
public void testMatchClassUtil() throws Exception
{
assertQueryMatch( BytecodeKeys.CLASSES, new String[] { "archiva-common", "continuum-webapp", "testng",
"wagon-provider-api" }, "Util" );
}
*/

public void testMatchClassWagon() throws Exception
{
assertQueryMatch( BytecodeKeys.CLASSES, new String[] { "wagon-provider-api" }, "Wagon" );
}

/* TODO: Suffix searching does not seem to work.
public void testMatchClassMojoAllUpper() throws Exception
{
assertQueryMatch( BytecodeKeys.CLASSES, new String[] { "maven-help-plugin" }, "MOJO" );
}
*/

/* TODO: Suffix searching does not seem to work.
public void testMatchClassMojo() throws Exception
{
assertQueryMatch( BytecodeKeys.CLASSES, new String[] { "maven-help-plugin" }, "Mojo" );
}
*/

public void testMatchClassInvalid() throws Exception
{
assertQueryMatchNoResults( BytecodeKeys.CLASSES, "Destruct|Button" );
}

public void testMatchFilesManifestMf() throws Exception
{
assertQueryMatch( BytecodeKeys.FILES, new String[] { "daytrader-ear", "maven-archetype-simple",
"redback-authorization-open", "maven-help-plugin", "archiva-common", "wagon-provider-api",
"continuum-webapp", "testng" }, "MANIFEST.MF" );
}

public void testMatchFilesMetaInf() throws Exception
{
assertQueryMatch( BytecodeKeys.FILES, new String[] { "daytrader-ear", "maven-archetype-simple",
"redback-authorization-open", "maven-help-plugin", "archiva-common", "wagon-provider-api",
"continuum-webapp", "testng" }, "META-INF" );
}

public void testMatchFilesPluginXml() throws Exception
{
assertQueryMatch( BytecodeKeys.FILES, new String[] { "maven-help-plugin" }, "plugin.xml" );
}

public void testMatchFilesInvalid() throws Exception
{
assertQueryMatchNoResults( BytecodeKeys.FILES, "Veni Vidi Castratavi Illegitimos" );
}

}

+ 0
- 43
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/hashcodes/AllTests.java View File

@@ -1,43 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import junit.framework.Test;
import junit.framework.TestSuite;

/**
* AllTests - conveinence test suite for IDE users.
*
* @version $Id$
*/
public class AllTests
{

public static Test suite()
{
TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.indexer.hashcodes" );
//$JUnit-BEGIN$
suite.addTestSuite( HashcodesIndexTest.class );
suite.addTestSuite( HashcodesSearchTest.class );
//$JUnit-END$
return suite;
}

}

+ 0
- 65
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesIndexTest.java View File

@@ -1,65 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.AbstractIndexCreationTestCase;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

/**
* HashcodesIndexTest
*
* @version $Id$
*/
public class HashcodesIndexTest extends AbstractIndexCreationTestCase
{
public String getIndexName()
{
return "hashcodes";
}

public LuceneIndexHandlers getIndexHandler()
{
return new HashcodesHandlers();
}

public RepositoryContentIndex createIndex( RepositoryContentIndexFactory indexFactory, ManagedRepositoryConfiguration repository )
{
return indexFactory.createHashcodeIndex( repository );
}

protected LuceneRepositoryContentRecord createSimpleRecord()
{
ArchivaArtifact artifact = new ArchivaArtifact( "com.foo", "projfoo", "1.0", "", "jar" );
HashcodesRecord record = new HashcodesRecord();
record.setRepositoryId( "test-repo" );
record.setArtifact( artifact );
artifact.getModel().setChecksumSHA1( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
artifact.getModel().setChecksumMD5( "3a0adc365f849366cd8b633cad155cb7" );
return record;
}
}

+ 0
- 105
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesRecordLoader.java View File

@@ -1,105 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.commons.io.IOUtils;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaArtifactJavaDetails;
import org.apache.maven.archiva.model.platform.JavaArtifactHelper;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;

import junit.framework.AssertionFailedError;

/**
* HashcodesRecordLoader
*
* @version $Id$
*/
public class HashcodesRecordLoader
{
public static HashcodesRecord loadRecord( File dumpFile, ArchivaArtifact artifact )
{
HashcodesRecord record = new HashcodesRecord();
record.setArtifact( artifact );

FileReader freader = null;
BufferedReader reader = null;

try
{
freader = new FileReader( dumpFile );
reader = new BufferedReader( freader );

String line = reader.readLine();
while ( line != null )
{
if ( line.startsWith( "FILENAME|" ) )
{
String filename = line.substring( "FILENAME|".length() );
record.setFilename( filename );
}
else if ( line.startsWith( "SIZE|" ) )
{
String size = line.substring( "SIZE|".length() );
record.getArtifact().getModel().setSize( Long.parseLong( size ) );
}
else if ( line.startsWith( "HASH_MD5|" ) )
{
String md5 = line.substring( "HASH_MD5|".length() );
record.getArtifact().getModel().setChecksumMD5( md5 );
}
else if ( line.startsWith( "HASH_SHA1|" ) )
{
String sha1 = line.substring( "HASH_SHA1|".length() );
record.getArtifact().getModel().setChecksumSHA1( sha1 );
}
else if ( line.startsWith( "HASH_BYTECODE|" ) )
{
String hash = line.substring( "HASH_BYTECODE|".length() );
ArchivaArtifactJavaDetails javaDetails = JavaArtifactHelper.getJavaDetails( record.getArtifact() );
javaDetails.setChecksumBytecode( hash );
}
else if ( line.startsWith( "JDK|" ) )
{
String jdk = line.substring( "JDK|".length() );
ArchivaArtifactJavaDetails javaDetails = JavaArtifactHelper.getJavaDetails( record.getArtifact() );
javaDetails.setJdk( jdk );
}

line = reader.readLine();
}
}
catch ( IOException e )
{
throw new AssertionFailedError( "Unable to load record " + dumpFile + " from disk: " + e.getMessage() );
}
finally
{
IOUtils.closeQuietly( reader );
IOUtils.closeQuietly( freader );
}

return record;
}
}

+ 0
- 288
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/hashcodes/HashcodesSearchTest.java View File

@@ -1,288 +0,0 @@
package org.apache.maven.archiva.indexer.hashcodes;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.AbstractSearchTestCase;
import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.lucene.LuceneIndexHandlers;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
* HashcodesSearchTest
*
* @version $Id$
*/
public class HashcodesSearchTest extends AbstractSearchTestCase
{
public String getIndexName()
{
return "hashcodes";
}

public LuceneIndexHandlers getIndexHandler()
{
return new HashcodesHandlers();
}

public RepositoryContentIndex createIndex( RepositoryContentIndexFactory indexFactory, ManagedRepositoryConfiguration repository )
{
return indexFactory.createHashcodeIndex( repository );
}

protected Map createSampleRecordsMap()
{
Map records = new HashMap();

Map artifactDumps = getArchivaArtifactDumpMap();
for ( Iterator iter = artifactDumps.entrySet().iterator(); iter.hasNext(); )
{
Map.Entry entry = (Map.Entry) iter.next();
ArchivaArtifact artifact = (ArchivaArtifact) entry.getValue();
File dumpFile = getDumpFile( artifact );
HashcodesRecord record = HashcodesRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );
records.put( entry.getKey(), record );
}

return records;
}

public void testExactMatchVersionSimple() throws Exception
{
assertQueryExactMatch( ArtifactKeys.VERSION_EXACT, new String[] { "archiva-common" }, "1.0" );
}

public void testExactMatchVersionSnapshot() throws Exception
{
assertQueryExactMatch( ArtifactKeys.VERSION_EXACT, new String[] { "continuum-webapp" }, "1.0.3-SNAPSHOT" );
}

public void testExactMatchVersionAlphaSnapshot() throws Exception
{
assertQueryExactMatch( ArtifactKeys.VERSION_EXACT, new String[] { "redback-authorization-open" },
"1.0-alpha-1-SNAPSHOT" );
}

public void testExactMatchVersionTimestampedSnapshot() throws Exception
{
assertQueryExactMatch( ArtifactKeys.VERSION_EXACT, new String[] { "wagon-provider-api" },
"1.0-beta-3-20070209.213958-2" );
}

public void testExactMatchVersionInvalid() throws Exception
{
assertQueryExactMatchNoResults( ArtifactKeys.VERSION_EXACT, "foo" );
}

public void testExactMatchGroupIdOrgApacheMavenArchiva() throws Exception
{
assertQueryExactMatch( ArtifactKeys.GROUPID_EXACT, new String[] { "archiva-common" },
"org.apache.maven.archiva" );
}

public void testExactMatchGroupIdOrgApacheMaven() throws Exception
{
assertQueryExactMatch( ArtifactKeys.GROUPID_EXACT, new String[] { "maven-archetype-simple" },
"org.apache.maven" );
}

public void testExactMatchGroupIdInvalid() throws Exception
{
assertQueryExactMatchNoResults( ArtifactKeys.GROUPID_EXACT, "foo" );
}

public void testExactMatchArtifactIdArchivaCommon() throws Exception
{
assertQueryExactMatch( ArtifactKeys.ARTIFACTID_EXACT, new String[] { "archiva-common" }, "archiva-common" );
}

public void testExactMatchArtifactIdTestNg() throws Exception
{
assertQueryExactMatch( ArtifactKeys.ARTIFACTID_EXACT, new String[] { "testng" }, "testng" );
}

public void testExactMatchArtifactIdInvalid() throws Exception
{
assertQueryExactMatchNoResults( ArtifactKeys.ARTIFACTID_EXACT, "foo" );
}

public void testExactMatchTypeJar() throws Exception
{
assertQueryExactMatch( ArtifactKeys.TYPE, ( new String[] { "archiva-common", "redback-authorization-open",
"testng", "wagon-provider-api" } ), "jar" );
}

public void testExactMatchTypeWar() throws Exception
{
assertQueryExactMatch( ArtifactKeys.TYPE, ( new String[] { "continuum-webapp" } ), "war" );
}

/* TODO: Fix 'maven-plugin' type
public void testExactMatchTypePlugin() throws Exception
{
assertQueryExactMatch( ArtifactKeys.TYPE, ( new String[] { "maven-help-plugin" } ), "maven-plugin" );
} */

/* TODO: Fix 'maven-archetype' type
public void testExactMatchTypeArchetype() throws Exception
{
assertQueryExactMatch( ArtifactKeys.TYPE, ( new String[] { "maven-archetype-simple" } ), "maven-archetype" );
}
*/

public void testExactMatchTypeInvalid() throws Exception
{
assertQueryExactMatchNoResults( ArtifactKeys.TYPE, "foo" );
}

public void testExactMatchMd5() throws Exception
{
assertQueryExactMatch( HashcodesKeys.MD5, ( new String[] { "redback-authorization-open" } ),
"f42047fe2e177ac04d0df7aa44d408be" );
}

public void testExactMatchMd5Invalid() throws Exception
{
assertQueryExactMatchNoResults( HashcodesKeys.MD5, "foo" );
}

public void testExactMatchSha1() throws Exception
{
assertQueryExactMatch( HashcodesKeys.SHA1, ( new String[] { "archiva-common" } ),
"c2635a1b38bd4520a6604664c04b2b3c32330864" );
}

public void testExactMatchSha1Invalid() throws Exception
{
assertQueryExactMatchNoResults( HashcodesKeys.SHA1, "foo" );
}

public void testMatchGroupIdOrgApacheMaven() throws Exception
{
assertQueryMatch( ArtifactKeys.GROUPID, new String[] { "archiva-common", "continuum-webapp",
"maven-archetype-simple", "maven-help-plugin", "wagon-provider-api" }, "org.apache.maven" );
}

public void testMatchGroupIdMaven() throws Exception
{
assertQueryMatch( ArtifactKeys.GROUPID, new String[] { "archiva-common", "continuum-webapp",
"maven-archetype-simple", "maven-help-plugin", "wagon-provider-api" }, "maven" );
}

public void testMatchGroupIdMavenMixed() throws Exception
{
assertQueryMatch( ArtifactKeys.GROUPID, new String[] { "archiva-common", "continuum-webapp",
"maven-archetype-simple", "maven-help-plugin", "wagon-provider-api" }, "Maven" );
}

public void testMatchGroupIdInvalid() throws Exception
{
assertQueryMatchNoResults( ArtifactKeys.GROUPID, "foo" );
}

public void testMatchArtifactIdPlugin() throws Exception
{
assertQueryMatch( ArtifactKeys.ARTIFACTID, new String[] { "maven-help-plugin" }, "plugin" );
}

public void testMatchArtifactIdMaven() throws Exception
{
assertQueryMatch( ArtifactKeys.ARTIFACTID, new String[] { "maven-help-plugin", "maven-archetype-simple" },
"maven" );
}

public void testMatchArtifactIdHelp() throws Exception
{
assertQueryMatch( ArtifactKeys.ARTIFACTID, new String[] { "maven-help-plugin" }, "help" );
}

public void testMatchVersionOne() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "daytrader-ear", "testng", "archiva-common",
"redback-authorization-open", "maven-archetype-simple", "continuum-webapp", "wagon-provider-api" }, "1" );
}

public void testMatchVersionOneOh() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "archiva-common", "continuum-webapp",
"maven-archetype-simple", "redback-authorization-open", "wagon-provider-api" }, "1.0" );
}

public void testMatchVersionSnapshotLower() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "continuum-webapp", "redback-authorization-open" },
"snapshot" );
}

public void testMatchVersionSnapshotUpper() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "continuum-webapp", "redback-authorization-open" },
"SNAPSHOT" );
}

public void testMatchVersionAlpha() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION,
new String[] { "maven-archetype-simple", "redback-authorization-open" }, "alpha" );
}

public void testMatchVersionOneAlpha() throws Exception
{
assertQueryMatch( ArtifactKeys.VERSION, new String[] { "redback-authorization-open" }, "1.0-alpha-1" );
}

public void testMatchVersionInvalid() throws Exception
{
assertQueryMatchNoResults( ArtifactKeys.VERSION, "255" );
}

public void testMatchClassifierNotJdk15() throws Exception
{
BooleanQuery bQuery = new BooleanQuery();
bQuery.add( new MatchAllDocsQuery(), BooleanClause.Occur.MUST );
bQuery.add( createMatchQuery( ArtifactKeys.CLASSIFIER, "jdk15" ), BooleanClause.Occur.MUST_NOT );
List results = search( bQuery );

assertResults( new String[] { "archiva-common", "continuum-webapp", "redback-authorization-open",
"daytrader-ear", "maven-archetype-simple", "maven-help-plugin", "wagon-provider-api" }, results );
}

public void testMatchClassifierJdk15() throws Exception
{
assertQueryMatch( ArtifactKeys.CLASSIFIER, new String[] { "testng" }, "jdk15" );
}

public void testMatchClassifierInvalid() throws Exception
{
assertQueryMatchNoResults( ArtifactKeys.CLASSIFIER, "redo" );
}
}

+ 0
- 42
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/query/AllTests.java View File

@@ -1,42 +0,0 @@
package org.apache.maven.archiva.indexer.query;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import junit.framework.Test;
import junit.framework.TestSuite;

/**
* AllTests - conveinence test suite for IDE users.
*
* @version $Id$
*/
public class AllTests
{

public static Test suite()
{
TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.indexer.query" );
//$JUnit-BEGIN$
suite.addTestSuite( QueryTest.class );
//$JUnit-END$
return suite;
}

}

+ 0
- 158
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/query/QueryTest.java View File

@@ -1,158 +0,0 @@
package org.apache.maven.archiva.indexer.query;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import junit.framework.TestCase;

import java.util.Iterator;

/**
*/
public class QueryTest
extends TestCase
{
private QueryTerm term1 = new QueryTerm( "field1", "value1" );

private QueryTerm term2 = new QueryTerm( "field2", "value2" );

private QueryTerm term3 = new QueryTerm( "field3", "value3" );

public void testQueryTerm()
{
QueryTerm query = new QueryTerm( "Field", "Value" );
assertEquals( "check field setting", "Field", query.getField() );
assertEquals( "check value setting", "Value", query.getValue() );
}

public void testSingleTermQuery()
{
SingleTermQuery query = new SingleTermQuery( "Field", "Value" );
assertEquals( "check field setting", "Field", query.getField() );
assertEquals( "check value setting", "Value", query.getValue() );

query = new SingleTermQuery( term1 );
assertEquals( "check field setting", "field1", query.getField() );
assertEquals( "check value setting", "value1", query.getValue() );
}

public void testRangeQueryOpen()
{
RangeQuery rangeQuery = RangeQuery.createOpenRange();
assertNull( "Check range has no start", rangeQuery.getBegin() );
assertNull( "Check range has no end", rangeQuery.getEnd() );
}

public void testRangeQueryExclusive()
{
RangeQuery rangeQuery = RangeQuery.createExclusiveRange( term1, term2 );
assertEquals( "Check range start", term1, rangeQuery.getBegin() );
assertEquals( "Check range end", term2, rangeQuery.getEnd() );
assertFalse( "Check exclusive", rangeQuery.isInclusive() );
}

public void testRangeQueryInclusive()
{
RangeQuery rangeQuery = RangeQuery.createInclusiveRange( term1, term2 );
assertEquals( "Check range start", term1, rangeQuery.getBegin() );
assertEquals( "Check range end", term2, rangeQuery.getEnd() );
assertTrue( "Check inclusive", rangeQuery.isInclusive() );
}

public void testRangeQueryOpenEnded()
{
RangeQuery rangeQuery = RangeQuery.createGreaterThanOrEqualToRange( term1 );
assertEquals( "Check range start", term1, rangeQuery.getBegin() );
assertNull( "Check range end", rangeQuery.getEnd() );
assertTrue( "Check inclusive", rangeQuery.isInclusive() );

rangeQuery = RangeQuery.createGreaterThanRange( term1 );
assertEquals( "Check range start", term1, rangeQuery.getBegin() );
assertNull( "Check range end", rangeQuery.getEnd() );
assertFalse( "Check exclusive", rangeQuery.isInclusive() );

rangeQuery = RangeQuery.createLessThanOrEqualToRange( term1 );
assertNull( "Check range start", rangeQuery.getBegin() );
assertEquals( "Check range end", term1, rangeQuery.getEnd() );
assertTrue( "Check inclusive", rangeQuery.isInclusive() );

rangeQuery = RangeQuery.createLessThanRange( term1 );
assertNull( "Check range start", rangeQuery.getBegin() );
assertEquals( "Check range end", term1, rangeQuery.getEnd() );
assertFalse( "Check exclusive", rangeQuery.isInclusive() );
}

public void testCompundQuery()
{
CompoundQuery query = new CompoundQuery();
assertTrue( "check query is empty", query.getCompoundQueryTerms().isEmpty() );

query.and( term1 );
query.or( term2 );
query.not( term3 );

Iterator i = query.getCompoundQueryTerms().iterator();
CompoundQueryTerm term = (CompoundQueryTerm) i.next();
assertEquals( "Check first term", "field1", getQuery( term ).getField() );
assertEquals( "Check first term", "value1", getQuery( term ).getValue() );
assertTrue( "Check first term", term.isRequired() );
assertFalse( "Check first term", term.isProhibited() );

term = (CompoundQueryTerm) i.next();
assertEquals( "Check second term", "field2", getQuery( term ).getField() );
assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
assertFalse( "Check second term", term.isRequired() );
assertFalse( "Check second term", term.isProhibited() );

term = (CompoundQueryTerm) i.next();
assertEquals( "Check third term", "field3", getQuery( term ).getField() );
assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
assertFalse( "Check third term", term.isRequired() );
assertTrue( "Check third term", term.isProhibited() );

CompoundQuery query2 = new CompoundQuery();
query2.and( query );
query2.or( new SingleTermQuery( term2 ) );
query2.not( new SingleTermQuery( term3 ) );

i = query2.getCompoundQueryTerms().iterator();
term = (CompoundQueryTerm) i.next();
assertEquals( "Check first term", query, term.getQuery() );
assertTrue( "Check first term", term.isRequired() );
assertFalse( "Check first term", term.isProhibited() );

term = (CompoundQueryTerm) i.next();
assertEquals( "Check second term", "field2", getQuery( term ).getField() );
assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
assertFalse( "Check second term", term.isRequired() );
assertFalse( "Check second term", term.isProhibited() );

term = (CompoundQueryTerm) i.next();
assertEquals( "Check third term", "field3", getQuery( term ).getField() );
assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
assertFalse( "Check third term", term.isRequired() );
assertTrue( "Check third term", term.isProhibited() );
}

private static SingleTermQuery getQuery( CompoundQueryTerm term )
{
return (SingleTermQuery) term.getQuery();
}
}


+ 0
- 143
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/BytecodeIndexPopulator.java View File

@@ -1,143 +0,0 @@
package org.apache.maven.archiva.indexer.search;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecordLoader;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import junit.framework.AssertionFailedError;

/**
* BytecodeIndexPopulator
*
* @version $Id$
*/
public class BytecodeIndexPopulator
implements IndexPopulator
{

public Map<String,ArchivaArtifact> getObjectMap()
{

Map<String,ArchivaArtifact> dumps = new HashMap<String,ArchivaArtifact>();

// archiva-common-1.0.jar.txt
dumps.put( "archiva-common",
createArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "", "jar" ) );

// continuum-webapp-1.0.3-SNAPSHOT.war.txt
dumps.put( "continuum-webapp", createArchivaArtifact( "org.apache.maven.continuum", "continuum-webapp",
"1.0.3-SNAPSHOT", "", "war" ) );

// daytrader-ear-1.1.ear.txt
dumps.put( "daytrader-ear", createArchivaArtifact( "org.apache.geronimo", "daytrader-ear", "1.1", "", "ear" ) );

// maven-archetype-simple-1.0-alpha-4.jar.txt
dumps.put( "maven-archetype-simple", createArchivaArtifact( "org.apache.maven", "maven-archetype-simple",
"1.0-alpha-4", "", "maven-archetype" ) );

// maven-help-plugin-2.0.2-20070119.121239-2.jar.txt
dumps.put( "maven-help-plugin", createArchivaArtifact( "org.apache.maven.plugins", "maven-help-plugin",
"2.0.2-20070119.121239-2", "", "maven-plugin" ) );

// redback-authorization-open-1.0-alpha-1-SNAPSHOT.jar.txt
dumps.put( "redback-authorization-open", createArchivaArtifact( "org.codehaus.plexus.redback",
"redback-authorization-open",
"1.0-alpha-1-SNAPSHOT", "", "jar" ) );

// testng-5.1-jdk15.jar.txt
dumps.put( "testng", createArchivaArtifact( "org.testng", "testng", "5.1", "jdk15", "jar" ) );

// wagon-provider-api-1.0-beta-3-20070209.213958-2.jar.txt
dumps.put( "wagon-provider-api", createArchivaArtifact( "org.apache.maven.wagon", "wagon-provider-api",
"1.0-beta-3-20070209.213958-2", "", "jar" ) );
// a-common5
dumps.put( "a-common5", createArchivaArtifact( "a", "a-common5", "1.0", "", "jar" ) );

return dumps;

}

private ArchivaArtifact createArchivaArtifact( String groupId, String artifactId, String version, String classifier,
String type )
{
ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
return artifact;
}

public Map<String, BytecodeRecord> populate( File basedir )
{
Map<String, BytecodeRecord> records = new HashMap<String, BytecodeRecord>();

for ( Entry<String, ArchivaArtifact> entry : getObjectMap().entrySet() )
{
ArchivaArtifact artifact = entry.getValue();
File dumpFile = getDumpFile( basedir, artifact );
BytecodeRecord record = BytecodeRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );
records.put( entry.getKey(), record );
}

return records;
}

protected File getDumpFile( File basedir, ArchivaArtifact artifact )
{
File dumpDir = new File( basedir, "src/test/artifact-dumps" );
StringBuffer filename = new StringBuffer();

filename.append( artifact.getArtifactId() ).append( "-" ).append( artifact.getVersion() );

if ( artifact.hasClassifier() )
{
filename.append( "-" ).append( artifact.getClassifier() );
}

filename.append( "." );

// TODO: use the ArtifactExtensionMapping object
if ( "maven-plugin".equals( artifact.getType() ) || "maven-archetype".equals( artifact.getType() ) )
{
filename.append( "jar" );
}
else
{
filename.append( artifact.getType() );
}
filename.append( ".txt" );

File dumpFile = new File( dumpDir, filename.toString() );

if ( !dumpFile.exists() )
{
throw new AssertionFailedError(
"Dump file " + dumpFile.getAbsolutePath() + " does not exist (should it?)." );
}

return dumpFile;
}
}

+ 0
- 327
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/DefaultCrossRepositorySearchTest.java View File

@@ -1,327 +0,0 @@
package org.apache.maven.archiva.indexer.search;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.commons.io.FileUtils;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Searcher;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.MockConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;

import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

/**
* DefaultCrossRepositorySearchTest
*
* @version $Id$
*/
public class DefaultCrossRepositorySearchTest
extends PlexusInSpringTestCase
{
private static final String TEST_DEFAULT_REPOSITORY_NAME = "Test Default Repository";

private static final String TEST_DEFAULT_REPO_ID = "testDefaultRepo";

@Override
protected void setUp()
throws Exception
{
super.setUp();

RepositoryContentIndexFactory indexFactory =
(RepositoryContentIndexFactory) lookup( RepositoryContentIndexFactory.class
.getName(), "lucene" );

File repoDir = new File( getBasedir(), "src/test/managed-repository" );

assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );

ManagedRepositoryConfiguration repository = createRepository( TEST_DEFAULT_REPO_ID, TEST_DEFAULT_REPOSITORY_NAME, repoDir );

File indexLocation = new File( "target/index-crossrepo-" + getName() + "/" );

MockConfiguration config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );

ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
repoConfig.setId( TEST_DEFAULT_REPO_ID );
repoConfig.setName( TEST_DEFAULT_REPOSITORY_NAME );
repoConfig.setLocation( repoDir.getAbsolutePath() );
repoConfig.setIndexDir( indexLocation.getAbsolutePath() );
repoConfig.setScanned( true );

if ( indexLocation.exists() )
{
FileUtils.deleteDirectory( indexLocation );
}

config.getConfiguration().addManagedRepository( repoConfig );

// Create the (empty) indexes.
RepositoryContentIndex indexHashcode = indexFactory.createHashcodeIndex( repository );
RepositoryContentIndex indexBytecode = indexFactory.createBytecodeIndex( repository );
RepositoryContentIndex indexContents = indexFactory.createFileContentIndex( repository );

// Now populate them.
Map<String, HashcodesRecord> hashcodesMap = new HashcodesIndexPopulator().populate( new File( getBasedir() ) );
indexHashcode.indexRecords( hashcodesMap.values() );
assertEquals( "Hashcode Key Count", hashcodesMap.size(), indexHashcode.getAllRecordKeys().size() );
assertRecordCount( indexHashcode, hashcodesMap.size() );

Map<String, BytecodeRecord> bytecodeMap = new BytecodeIndexPopulator().populate( new File( getBasedir() ) );
indexBytecode.indexRecords( bytecodeMap.values() );
assertEquals( "Bytecode Key Count", bytecodeMap.size(), indexBytecode.getAllRecordKeys().size() );
assertRecordCount( indexBytecode, bytecodeMap.size() );

Map<String, FileContentRecord> contentMap = new FileContentIndexPopulator().populate( new File( getBasedir() ) );
indexContents.indexRecords( contentMap.values() );
assertEquals( "File Content Key Count", contentMap.size(), indexContents.getAllRecordKeys().size() );
assertRecordCount( indexContents, contentMap.size() );
}

private void assertRecordCount( RepositoryContentIndex index, int expectedCount )
throws Exception
{
Query query = new MatchAllDocsQuery();
Searcher searcher = (Searcher) index.getSearchable();
Hits hits = searcher.search( query );
assertEquals( "Expected Record Count for " + index.getId(), expectedCount, hits.length() );
}

private CrossRepositorySearch lookupCrossRepositorySearch()
throws Exception
{
CrossRepositorySearch search =
(CrossRepositorySearch) lookup( CrossRepositorySearch.class.getName(), "default" );
assertNotNull( "CrossRepositorySearch:default should not be null.", search );
return search;
}

public void testSearchArtifactIdHasMoreWieghtThanGroupId() throws Exception
{
CrossRepositorySearch search = lookupCrossRepositorySearch();

String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID
};

List<SearchResultHit> expectedHits = new ArrayList<SearchResultHit>();
SearchResultHit hit = new SearchResultHit();
hit.setGroupId("ant");
hit.setArtifactId("ant");
hit.setVersion("1.5");
expectedHits.add(hit);

hit = new SearchResultHit();
hit.setGroupId("ant");
hit.setArtifactId("ant-optional");
hit.setVersion("1.5.1");
expectedHits.add(hit);

hit = new SearchResultHit();
hit.setGroupId("ant");
hit.setArtifactId("ant-junit");
hit.setVersion("1.6.5");
expectedHits.add(hit);

assertSearchResults( expectedRepos, expectedHits, search, "ant", null, false );
}

public void testSearchInvalidTerm()
throws Exception
{
CrossRepositorySearch search = lookupCrossRepositorySearch();

String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID
};

assertSearchResults( expectedRepos, new ArrayList<SearchResultHit>(), search, "monosodium", null, false );
}

public void testSearchForClassesAndPackages()
throws Exception
{
CrossRepositorySearch search = lookupCrossRepositorySearch();

String expectedRepos[] = new String[] {
TEST_DEFAULT_REPO_ID
};

SearchResultHit archivaCommon = new SearchResultHit();
archivaCommon.setArtifactId("archiva-common");
archivaCommon.setGroupId("org.apache.maven.archiva");
archivaCommon.setVersion("1.0");

// class with packagename search
assertSearchResults( expectedRepos, Arrays.asList(archivaCommon), search,
"org.apache.maven.archiva.common.utils.BaseFile", null, true );
// class name search
assertSearchResults( expectedRepos, Arrays.asList(archivaCommon), search,
"BaseFile", null, true );

SearchResultHit hit = new SearchResultHit();
hit.setGroupId("org.apache.maven.continuum");
hit.setArtifactId("continuum-webapp");
hit.setVersion("1.0.3-SNAPSHOT");

// method search
assertSearchResults( expectedRepos, Arrays.asList(hit), search,
"org.apache.maven.continuum.web.action.BuildDefinitionAction.isBuildFresh", null, true );
}
public void testExecuteFilteredSearch()
throws Exception
{
CrossRepositorySearch search = lookupCrossRepositorySearch();

String expectedRepos[] = new String[] { TEST_DEFAULT_REPO_ID };

String expectedResults[] = new String[] { "org1", "org2", "org3", "org4", "org5", "org6", "org7", "org8" };

String secondExpectedResults[] = new String[] { "continuum-webapp" };

String thirdExpectedResults[] = new String[] { "archiva-common" };

// search for groupId
assertFilteredSearchResults( expectedRepos, expectedResults, search, "org", null, null, null, 30 );

// search for groupId and artifactId
assertFilteredSearchResults( expectedRepos, secondExpectedResults, search, "org.apache.maven",
"continuum-webapp", null, null, 30 );

// search for groupId , artifactId and version
assertFilteredSearchResults( expectedRepos, thirdExpectedResults, search, "org.apache.maven.archiva",
"archiva-common", "1.0", null, 30 );
}
// MRM-981 - artifactIds with numeric characters aren't found in advanced search
public void testFilteredSearchArtifactIdHasNumericChar()
throws Exception
{
CrossRepositorySearch search = lookupCrossRepositorySearch();

String expectedRepos[] = new String[] { TEST_DEFAULT_REPO_ID };

String expectedResults[] = new String[] { "a-common5" };
assertFilteredSearchResults( expectedRepos, expectedResults, search, null, "a-common5", null, null, 30 );
assertFilteredSearchResults( expectedRepos, expectedResults, search, "a", "a-common5", null, null, 30 );
assertFilteredSearchResults( expectedRepos, expectedResults, search, "a", "a-common5", "1.0", null, 30 );
assertFilteredSearchResults( expectedRepos, expectedResults, search, "a", "a-common5", "1.0", "ACommonTestFile", 30 );
assertFilteredSearchResults( expectedRepos, expectedResults, search, "a", "a-common5", "1.0", "a.common5.package.", 30 );
String noHitsExpectedResults[] = new String[] {};
assertFilteredSearchResults( expectedRepos, noHitsExpectedResults, search, "org.apache.maven.archiva",
"a-common5", null, null, 30 );
}
private void assertFilteredSearchResults ( String expectedRepos[], String expectedResults[], CrossRepositorySearch search,
String groupId, String artifactId, String version, String className , int rowCount )
{
SearchResultLimits limits = new SearchResultLimits( 0 );
limits.setPageSize( rowCount );
List<String> selectedRepos = new ArrayList<String>();
selectedRepos.addAll( Arrays.asList( expectedRepos ) );
SearchResults results = null;
results = search.executeFilteredSearch( "guest" , selectedRepos, groupId, artifactId, version, className, limits );
assertNotNull( "Search Results should not be null.", results );
assertEquals( "Repository Hits", expectedRepos.length, results.getRepositories().size() );
assertEquals( expectedRepos.length, 1);
assertEquals( TEST_DEFAULT_REPO_ID , selectedRepos.get( 0 ) );
assertEquals( "Search Result Hits", expectedResults.length, results.getHits().size() );
}
private void assertSearchResults( String expectedRepos[], List<SearchResultHit> expectedResults, CrossRepositorySearch search,
String term, List<String> previousSearchTerms, boolean bytecode )
throws Exception
{
SearchResultLimits limits = new SearchResultLimits( 0 );
limits.setPageSize( 20 );
List<String> selectedRepos = new ArrayList<String>();
selectedRepos.addAll( Arrays.asList( expectedRepos ) );
SearchResults results = null;

if( previousSearchTerms == null )
{
if( bytecode )
{
results = search.searchForBytecode( "guest", selectedRepos, term, limits );
}
else
{
results = search.searchForTerm( "guest", selectedRepos, term, limits );
}
}
else
{
results = search.searchForTerm( "guest", selectedRepos, term, limits, previousSearchTerms );
}

assertNotNull( "Search Results should not be null.", results );
assertEquals( "Repository Hits", expectedRepos.length, results.getRepositories().size() );
// TODO: test the repository ids returned.

assertEquals( "Search Result Hits", expectedResults.size(), results.getHits().size() );

for (int i = 0; i < expectedResults.size(); i++)
{
final SearchResultHit expectedResult = expectedResults.get(i);
final SearchResultHit hit = results.getHits().get(i);
assertEquals("artifactid", expectedResult.getArtifactId(), hit.getArtifactId());
assertEquals("groupid", expectedResult.getGroupId(), hit.getGroupId());
assertEquals("version", expectedResult.getVersion(), hit.getVersion());
}
}
protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
{
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
repo.setId( id );
repo.setName( name );
repo.setLocation( location.getAbsolutePath() );
return repo;
}
}

+ 0
- 102
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/FileContentIndexPopulator.java View File

@@ -1,102 +0,0 @@
package org.apache.maven.archiva.indexer.search;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.io.File;
import java.util.HashMap;
import java.util.Map;

import junit.framework.AssertionFailedError;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.content.DefaultPathParser;
import org.apache.maven.archiva.repository.content.PathParser;
import org.apache.maven.archiva.repository.layout.LayoutException;

/**
* FileContentIndexPopulator
*
* @version $Id$
*/
public class FileContentIndexPopulator
implements IndexPopulator
{
public Map<String, ArchivaArtifact> getObjectMap()
{
return null;
}

public Map<String, FileContentRecord> populate( File basedir )
{
Map<String, FileContentRecord> map = new HashMap<String, FileContentRecord>();

File repoDir = new File( basedir, "src/test/managed-repository" );

String prefix = "org/apache/maven/archiva/record/";

map.put( "parent-pom-1", createFileContentRecord( repoDir, prefix + "parent-pom/1/parent-pom-1.pom" ) );
map.put( "child-pom-1.0-SNAPSHOT", createFileContentRecord( repoDir, prefix
+ "test-child-pom/1.0-SNAPSHOT/test-child-pom-1.0-20060728.121314-1.pom" ) );
map.put( "test-archetype-1.0", createFileContentRecord( repoDir, prefix
+ "test-archetype/1.0/test-archetype-1.0.pom" ) );
map.put( "test-jar-and-pom-1.0-alpha-1", createFileContentRecord( repoDir, prefix
+ "test-jar-and-pom/1.0-alpha-1/test-jar-and-pom-1.0-alpha-1.pom" ) );
map.put( "test-plugin-1.0", createFileContentRecord( repoDir, prefix + "test-plugin/1.0/test-plugin-1.0.pom" ) );
map.put( "test-pom-1.0", createFileContentRecord( repoDir, prefix + "test-pom/1.0/test-pom-1.0.pom" ) );
map.put( "test-skin-1.0", createFileContentRecord( repoDir, prefix + "test-skin/1.0/test-skin-1.0.pom" ) );

map.put("ant-1.5.pom", createFileContentRecord(repoDir, "ant/ant/1.5/ant-1.5.pom"));
map.put("ant-1.5.1.pom", createFileContentRecord(repoDir, "ant/ant/1.5.1/ant-1.5.1.pom"));
map.put("ant-junit-1.6.5.pom", createFileContentRecord(repoDir, "ant/ant-junit/1.6.5/ant-junit-1.6.5.pom"));
map.put("ant-optional-1.5.1.pom", createFileContentRecord(repoDir, "ant/ant-optional/1.5.1/ant-optional-1.5.1.pom"));
return map;
}

private FileContentRecord createFileContentRecord( File repoDir, String path )
{
File pathToFile = new File( repoDir, path );

if ( !pathToFile.exists() )
{
throw new AssertionFailedError( "Can't find test file: " + pathToFile.getAbsolutePath() );
}

FileContentRecord record = new FileContentRecord();
record.setRepositoryId( "test-repo" );
record.setFilename( path );

PathParser pathParser = new DefaultPathParser();
try
{
ArtifactReference reference = pathParser.toArtifactReference(path);
ArchivaArtifact artifact = new ArchivaArtifact( reference );
record.setArtifact(artifact);
}
catch (LayoutException e)
{
throw new RuntimeException(e);
}

return record;
}
}

+ 0
- 138
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/HashcodesIndexPopulator.java View File

@@ -1,138 +0,0 @@
package org.apache.maven.archiva.indexer.search;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecordLoader;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import junit.framework.AssertionFailedError;

/**
* HashcodesIndexPopulator
*
* @version $Id$
*/
public class HashcodesIndexPopulator
implements IndexPopulator
{

public Map<String, ArchivaArtifact> getObjectMap()
{
Map<String, ArchivaArtifact> dumps = new HashMap<String, ArchivaArtifact>();

// archiva-common-1.0.jar.txt
dumps.put( "archiva-common",
createArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "", "jar" ) );

// continuum-webapp-1.0.3-SNAPSHOT.war.txt
dumps.put( "continuum-webapp", createArchivaArtifact( "org.apache.maven.continuum", "continuum-webapp",
"1.0.3-SNAPSHOT", "", "war" ) );

// daytrader-ear-1.1.ear.txt
dumps.put( "daytrader-ear", createArchivaArtifact( "org.apache.geronimo", "daytrader-ear", "1.1", "", "ear" ) );

// maven-archetype-simple-1.0-alpha-4.jar.txt
dumps.put( "maven-archetype-simple", createArchivaArtifact( "org.apache.maven", "maven-archetype-simple",
"1.0-alpha-4", "", "maven-archetype" ) );

// maven-help-plugin-2.0.2-20070119.121239-2.jar.txt
dumps.put( "maven-help-plugin", createArchivaArtifact( "org.apache.maven.plugins", "maven-help-plugin",
"2.0.2-20070119.121239-2", "", "maven-plugin" ) );

// redback-authorization-open-1.0-alpha-1-SNAPSHOT.jar.txt
dumps.put( "redback-authorization-open", createArchivaArtifact( "org.codehaus.plexus.redback",
"redback-authorization-open",
"1.0-alpha-1-SNAPSHOT", "", "jar" ) );

// testng-5.1-jdk15.jar.txt
dumps.put( "testng", createArchivaArtifact( "org.testng", "testng", "5.1", "jdk15", "jar" ) );

// wagon-provider-api-1.0-beta-3-20070209.213958-2.jar.txt
dumps.put( "wagon-provider-api", createArchivaArtifact( "org.apache.maven.wagon", "wagon-provider-api",
"1.0-beta-3-20070209.213958-2", "", "jar" ) );

return dumps;
}

public Map<String, HashcodesRecord> populate( File basedir )
{
Map<String, HashcodesRecord> records = new HashMap<String, HashcodesRecord>();

for ( Entry<String, ArchivaArtifact> entry : getObjectMap().entrySet() )
{
ArchivaArtifact artifact = entry.getValue();
File dumpFile = getDumpFile( basedir, artifact );
HashcodesRecord record = HashcodesRecordLoader.loadRecord( dumpFile, artifact );
record.setRepositoryId( "test-repo" );
records.put( entry.getKey(), record );
}

return records;
}

protected File getDumpFile( File basedir, ArchivaArtifact artifact )
{
File dumpDir = new File( basedir, "src/test/artifact-dumps" );
StringBuffer filename = new StringBuffer();

filename.append( artifact.getArtifactId() ).append( "-" ).append( artifact.getVersion() );

if ( artifact.hasClassifier() )
{
filename.append( "-" ).append( artifact.getClassifier() );
}

filename.append( "." );

// TODO: use the ArtifactExtensionMapping object
if ( "maven-plugin".equals( artifact.getType() ) || "maven-archetype".equals( artifact.getType() ) )
{
filename.append( "jar" );
}
else
{
filename.append( artifact.getType() );
}
filename.append( ".txt" );

File dumpFile = new File( dumpDir, filename.toString() );

if ( !dumpFile.exists() )
{
throw new AssertionFailedError(
"Dump file " + dumpFile.getAbsolutePath() + " does not exist (should it?)." );
}

return dumpFile;
}

private ArchivaArtifact createArchivaArtifact( String groupId, String artifactId, String version, String classifier,
String type )
{
ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
return artifact;
}
}

+ 0
- 38
archiva-modules/archiva-base/archiva-indexer/src/test/java/org/apache/maven/archiva/indexer/search/IndexPopulator.java View File

@@ -1,38 +0,0 @@
package org.apache.maven.archiva.indexer.search;

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;

import java.io.File;
import java.util.Map;

/**
* IndexPopulator
*
* @version $Id$
*/
public interface IndexPopulator
{
public Map<String, ArchivaArtifact> getObjectMap();

public Map<String, ? extends LuceneRepositoryContentRecord> populate( File basedir );
}

+ 32
- 1
archiva-modules/archiva-base/archiva-model/src/main/java/org/apache/maven/archiva/model/AbstractArtifactKey.java View File

@@ -105,6 +105,11 @@ public class AbstractArtifactKey
*/
public String type = "";

/**
* The Repository Id (JPOX Requires this remain public)
*/
public String repositoryId = "";

/**
* Default Constructor. Required by JPOX.
*/
@@ -126,19 +131,22 @@ public class AbstractArtifactKey
version = parts[2];
classifier = parts[3];
type = parts[4];
repositoryId = parts[5];
}

/**
* Get the String representation of this object. - Required by JPOX.
*/
@Override
public String toString()
{
return StringUtils.join( new String[] { groupId, artifactId, version, classifier, type }, ':' );
return StringUtils.join( new String[] { groupId, artifactId, version, classifier, type, repositoryId }, ':' );
}

/**
* Get the hashcode for this object's values - Required by JPOX.
*/
@Override
public int hashCode()
{
final int PRIME = 31;
@@ -148,12 +156,14 @@ public class AbstractArtifactKey
result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
result = PRIME * result + ( ( classifier == null ) ? 0 : classifier.hashCode() );
result = PRIME * result + ( ( type == null ) ? 0 : type.hashCode() );
result = PRIME * result + ( ( repositoryId == null ) ? 0 : repositoryId.hashCode() );
return result;
}

/**
* Get the equals for this object's values - Required by JPOX.
*/
@Override
public boolean equals( Object obj )
{
if ( this == obj )
@@ -233,6 +243,18 @@ public class AbstractArtifactKey
return false;
}

if ( repositoryId == null )
{
if ( other.repositoryId != null )
{
return false;
}
}
else if ( !repositoryId.equals( other.repositoryId ) )
{
return false;
}

return true;
}

@@ -286,4 +308,13 @@ public class AbstractArtifactKey
}
}

public void setRepositoryId( String repositoryId )
{
this.repositoryId = "";

if ( StringUtils.isNotBlank(repositoryId) )
{
this.repositoryId = repositoryId;
}
}
}

+ 15
- 3
archiva-modules/archiva-base/archiva-model/src/main/java/org/apache/maven/archiva/model/ArchivaArtifact.java View File

@@ -36,7 +36,7 @@ public class ArchivaArtifact
private String baseVersion;

public ArchivaArtifact( String groupId, String artifactId, String version,
String classifier, String type )
String classifier, String type, String repositoryId )
{
if ( empty( groupId ) )
{
@@ -62,6 +62,12 @@ public class ArchivaArtifact
+ Keys.toKey( groupId, artifactId, version, classifier, type ) + "]" );
}

if ( empty( repositoryId ) )
{
throw new IllegalArgumentException( "Unable to create ArchivaArtifact with empty repositoryId ["
+ Keys.toKey( groupId, artifactId, version, classifier, type ) + "]" );
}

model = new ArchivaArtifactModel();

model.setGroupId( groupId );
@@ -70,6 +76,7 @@ public class ArchivaArtifact
model.setClassifier( StringUtils.defaultString( classifier ) );
model.setType( type );
model.setSnapshot( VersionUtil.isSnapshot( version ) );
model.setRepositoryId(repositoryId);
this.baseVersion = VersionUtil.getBaseVersion( version );
}
@@ -81,9 +88,9 @@ public class ArchivaArtifact
this.baseVersion = VersionUtil.getBaseVersion( model.getVersion() );
}
public ArchivaArtifact( ArtifactReference ref )
public ArchivaArtifact( ArtifactReference ref, String repositoryId )
{
this( ref.getGroupId(), ref.getArtifactId(), ref.getVersion(), ref.getClassifier(), ref.getType() );
this( ref.getGroupId(), ref.getArtifactId(), ref.getVersion(), ref.getClassifier(), ref.getType(), repositoryId );
}

public ArchivaArtifactModel getModel()
@@ -131,6 +138,11 @@ public class ArchivaArtifact
return StringUtils.isNotEmpty( model.getClassifier() );
}

public String getRepositoryId()
{
return model.getRepositoryId();
}

@Override
public int hashCode()
{

+ 19
- 3
archiva-modules/archiva-base/archiva-model/src/main/mdo/archiva-base.xml View File

@@ -299,11 +299,14 @@
The type of artifact.
</description>
</field>
<field stash.maxSize="50">
<field stash.maxSize="50"
jpox.primary-key="true"
jpox.value-strategy="off"
jpox.persistence-modifier="persistent">
<name>repositoryId</name>
<identifier>false</identifier>
<identifier>true</identifier>
<version>1.0.0+</version>
<required>false</required>
<required>true</required>
<type>String</type>
<description>
The repository associated with this content.
@@ -513,6 +516,19 @@
The type of artifact.
</description>
</field>
<field stash.maxSize="50"
jpox.primary-key="true"
jpox.value-strategy="off"
jpox.persistence-modifier="persistent">
<name>repositoryId</name>
<identifier>true</identifier>
<version>1.0.0+</version>
<required>true</required>
<type>String</type>
<description>
The repository associated with this content.
</description>
</field>
<field>
<name>checksumBytecode</name>
<identifier>false</identifier>

+ 1
- 1
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/project/resolvers/ManagedRepositoryProjectResolver.java View File

@@ -51,7 +51,7 @@ public class ManagedRepositoryProjectResolver
throws ProjectModelException
{
ArchivaArtifact artifact = new ArchivaArtifact( reference.getGroupId(), reference.getArtifactId(), reference
.getVersion(), "", "pom" );
.getVersion(), "", "pom", repository.getId() );

File repoFile = repository.toFile( artifact );


+ 2
- 2
archiva-modules/archiva-database/src/main/java/org/apache/maven/archiva/database/ArtifactDAO.java View File

@@ -52,10 +52,10 @@ public interface ArtifactDAO
*/

public ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String classifier,
String type );
String type, String repositoryId );

public ArchivaArtifact getArtifact( String groupId, String artifactId, String version, String classifier,
String type )
String type, String repositoryId )
throws ObjectNotFoundException, ArchivaDatabaseException;

public List /*<ArchivaArtifact>*/queryArtifacts( Constraint constraint )

+ 1
- 3
archiva-modules/archiva-database/src/main/java/org/apache/maven/archiva/database/RepositoryDatabaseEventListener.java View File

@@ -42,14 +42,12 @@ public class RepositoryDatabaseEventListener
{
ArchivaArtifact queriedArtifact =
artifactDAO.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
artifact.getClassifier(), artifact.getType() );
artifact.getClassifier(), artifact.getType() , repository.getId());
artifactDAO.deleteArtifact( queriedArtifact );
}
catch ( ArchivaDatabaseException e )
{
// ignored
}

// TODO [MRM-37]: re-run the database consumers to clean up
}
}

+ 30
- 29
archiva-modules/archiva-database/src/main/java/org/apache/maven/archiva/database/browsing/DefaultRepositoryBrowsing.java View File

@@ -65,7 +65,7 @@ public class DefaultRepositoryBrowsing
*/
private DatabaseUpdater dbUpdater;

public BrowsingResults getRoot( String principle, List<String> observableRepositoryIds )
public BrowsingResults getRoot( final String principle, final List<String> observableRepositoryIds )
{
List<String> groups = dao.query( new UniqueGroupIdConstraint( observableRepositoryIds ) );

@@ -77,8 +77,8 @@ public class DefaultRepositoryBrowsing
return results;
}

public BrowsingResults selectArtifactId( String principle, List<String> observableRepositoryIds, String groupId,
String artifactId )
public BrowsingResults selectArtifactId( final String principle, final List<String> observableRepositoryIds, final String groupId,
final String artifactId )
{
// NOTE: No group Id or artifact Id's should be returned here.
List<String> versions = dao.query( new UniqueVersionConstraint( observableRepositoryIds, groupId, artifactId ) );
@@ -93,7 +93,7 @@ public class DefaultRepositoryBrowsing
return results;
}

public BrowsingResults selectGroupId( String principle, List<String> observableRepositoryIds, String groupId )
public BrowsingResults selectGroupId( final String principle, final List<String> observableRepositoryIds, final String groupId )
{
List<String> groups = dao.query( new UniqueGroupIdConstraint( observableRepositoryIds, groupId ) );
List<String> artifacts = dao.query( new UniqueArtifactIdConstraint( observableRepositoryIds, groupId ) );
@@ -110,14 +110,13 @@ public class DefaultRepositoryBrowsing
return results;
}

public ArchivaProjectModel selectVersion( String principle, List<String> observableRepositoryIds, String groupId,
String artifactId, String version )
public ArchivaProjectModel selectVersion( final String principle, final List<String> observableRepositoryIds, final String groupId,
final String artifactId, final String version )
throws ObjectNotFoundException, ArchivaDatabaseException
{
ArchivaArtifact pomArtifact = getArtifact( principle, observableRepositoryIds, groupId, artifactId, version );

ArchivaProjectModel model;
version = pomArtifact.getVersion();

if ( !pomArtifact.getModel().isProcessed() )
{
@@ -125,13 +124,13 @@ public class DefaultRepositoryBrowsing
dbUpdater.updateUnprocessed( pomArtifact );
}

model = getProjectModel( groupId, artifactId, version );
model = getProjectModel( groupId, artifactId, pomArtifact.getVersion() );

return model;
}
public String getRepositoryId( String principle, List<String> observableRepositoryIds, String groupId,
String artifactId, String version )
public String getRepositoryId( final String principle, final List<String> observableRepositoryIds, final String groupId,
final String artifactId, final String version )
throws ObjectNotFoundException, ArchivaDatabaseException
{
ArchivaArtifact pomArchivaArtifact =
@@ -140,21 +139,25 @@ public class DefaultRepositoryBrowsing
return pomArchivaArtifact.getModel().getRepositoryId();
}
private ArchivaArtifact getArtifact( String principle, List<String> observableRepositoryIds, String groupId,
String artifactId, String version )
private ArchivaArtifact getArtifact( final String principal, final List<String> observableRepositoryIds, final String groupId,
final String artifactId, final String version )
throws ObjectNotFoundException, ArchivaDatabaseException
{
ArchivaArtifact pomArtifact = null;

try
{
pomArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, null, "pom" );
}
catch ( ObjectNotFoundException e )
for (final String repositoryId : observableRepositoryIds)
{
pomArtifact = handleGenericSnapshots( groupId, artifactId, version, pomArtifact );
try
{
pomArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, null, "pom", repositoryId );
}
catch ( ObjectNotFoundException e )
{
pomArtifact = handleGenericSnapshots( groupId, artifactId, version, repositoryId );
}
}


if ( pomArtifact == null )
{
throw new ObjectNotFoundException( "Unable to find artifact [" + Keys.toKey( groupId, artifactId, version )
@@ -170,12 +173,12 @@ public class DefaultRepositoryBrowsing
{
throw new ObjectNotFoundException( "Unable to find artifact " + Keys.toKey( groupId, artifactId, version )
+ " in observable repository [" + StringUtils.join( observableRepositoryIds.iterator(), ", " )
+ "] for user " + principle );
+ "] for user " + principal );
}
}

public List<ArchivaProjectModel> getUsedBy( String principle, List<String> observableRepositoryIds, String groupId,
String artifactId, String version )
public List<ArchivaProjectModel> getUsedBy( final String principle, final List<String> observableRepositoryIds, final String groupId,
final String artifactId, final String version )
throws ArchivaDatabaseException
{
ProjectsByArtifactUsageConstraint constraint = new ProjectsByArtifactUsageConstraint( groupId, artifactId,
@@ -208,7 +211,7 @@ public class DefaultRepositoryBrowsing
*
* @param versions
*/
private void processSnapshots( List<String> versions )
private void processSnapshots( final List<String> versions )
{
Map<String, String> snapshots = new HashMap<String, String>();

@@ -248,10 +251,11 @@ public class DefaultRepositoryBrowsing
* @param pomArtifact
* @throws ArchivaDatabaseException
*/
private ArchivaArtifact handleGenericSnapshots( String groupId, String artifactId, String version,
ArchivaArtifact pomArtifact )
private ArchivaArtifact handleGenericSnapshots( final String groupId, final String artifactId, final String version, final String repositoryId )
throws ArchivaDatabaseException
{
ArchivaArtifact result = null;

if ( VersionUtil.isGenericSnapshot( version ) )
{
List<String> versions = dao.query( new UniqueVersionConstraint( groupId, artifactId ) );
@@ -263,14 +267,11 @@ public class DefaultRepositoryBrowsing
if ( VersionUtil.getBaseVersion( uniqueVersion ).equals( version ) )
{
log.info( "Retrieving artifact with version " + uniqueVersion );
pomArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, uniqueVersion, null, "pom" );

return pomArtifact;
result = dao.getArtifactDAO().getArtifact( groupId, artifactId, uniqueVersion, null, "pom", repositoryId );
}
}
}

return null;
return result;
}

/**

+ 5
- 4
archiva-modules/archiva-database/src/main/java/org/apache/maven/archiva/database/jdo/JdoArtifactDAO.java View File

@@ -49,24 +49,24 @@ public class JdoArtifactDAO
/* .\ Archiva Artifact \. _____________________________________________________________ */

public ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String classifier,
String type )
String type, String repositoryId )
{
ArchivaArtifact artifact;

try
{
artifact = getArtifact( groupId, artifactId, version, classifier, type );
artifact = getArtifact( groupId, artifactId, version, classifier, type, repositoryId );
}
catch ( ArchivaDatabaseException e )
{
artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type );
artifact = new ArchivaArtifact( groupId, artifactId, version, classifier, type, repositoryId );
}

return artifact;
}

public ArchivaArtifact getArtifact( String groupId, String artifactId, String version, String classifier,
String type )
String type, String repositoryId )
throws ObjectNotFoundException, ArchivaDatabaseException
{
ArchivaArtifactModelKey key = new ArchivaArtifactModelKey();
@@ -75,6 +75,7 @@ public class JdoArtifactDAO
key.setVersion( version );
key.setClassifier( classifier );
key.setType( type );
key.setRepositoryId( repositoryId );

ArchivaArtifactModel model = (ArchivaArtifactModel) jdo.getObjectById( ArchivaArtifactModel.class, key, null );


+ 3
- 2
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/AbstractArchivaDatabaseTestCase.java View File

@@ -54,6 +54,7 @@ public abstract class AbstractArchivaDatabaseTestCase

protected ArchivaDAO dao;

@Override
protected void setUp()
throws Exception
{
@@ -186,7 +187,7 @@ public abstract class AbstractArchivaDatabaseTestCase
protected ArtifactReference toArtifactReference( String id )
{
String parts[] = StringUtils.splitPreserveAllTokens( id, ':' );
assertEquals( "Should have 5 parts [" + id + "]", 5, parts.length );
assertEquals( "Should have 6 parts [" + id + "]", 6, parts.length );
ArtifactReference ref = new ArtifactReference();
ref.setGroupId( parts[0] );
@@ -194,7 +195,7 @@ public abstract class AbstractArchivaDatabaseTestCase
ref.setVersion( parts[2] );
ref.setClassifier( parts[3] );
ref.setType( parts[4] );
assertTrue( "Group ID should not be blank [" + id + "]", StringUtils.isNotBlank( ref.getGroupId() ) );
assertTrue( "Artifact ID should not be blank [" + id + "]", StringUtils.isNotBlank( ref.getArtifactId() ) );
assertTrue( "Version should not be blank [" + id + "]", StringUtils.isNotBlank( ref.getVersion() ) );

+ 9
- 4
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/RepositoryDatabaseEventListenerTest.java View File

@@ -22,6 +22,7 @@ package org.apache.maven.archiva.database;
import java.util.Date;
import java.util.List;

import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
@@ -54,7 +55,7 @@ public class RepositoryDatabaseEventListenerTest
public ArchivaArtifact createArtifact( String artifactId, String version, ArtifactDAO artifactDao )
{
ArchivaArtifact artifact =
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar", "testable_repo" );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
@@ -70,16 +71,20 @@ public class RepositoryDatabaseEventListenerTest
artifactDao.saveArtifact( artifact );

assertEquals( artifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null,
"jar" ) );
"jar", "testable_repo" ) );

artifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
artifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar", "testable_repo" );
ManagedRepositoryContent repository =
(ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
ManagedRepositoryConfiguration configuration = new ManagedRepositoryConfiguration();
configuration.setId("testable_repo");
repository.setRepository(configuration);
listener.deleteArtifact( repository, artifact );

try
{
artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar", "testable_repo" );
fail( "Should not find artifact" );
}
catch ( ObjectNotFoundException e )

+ 2
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/browsing/RepositoryBrowsingTest.java View File

@@ -51,7 +51,7 @@ public class RepositoryBrowsingTest

public ArchivaArtifact createArtifact( String groupId, String artifactId, String version )
{
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, "", "jar" );
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, "", "jar", "central" );
artifact.getModel().setLastModified( new Date() ); // mandatory field.
artifact.getModel().setRepositoryId( "central" );
return artifact;
@@ -141,6 +141,7 @@ public class RepositoryBrowsingTest
}
}

@Override
protected void setUp()
throws Exception
{

+ 2
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactVersionsConstraintTest.java View File

@@ -40,6 +40,7 @@ public class ArtifactVersionsConstraintTest
public static final String TEST_REPO = "test-repo";
@Override
public void setUp()
throws Exception
{
@@ -51,7 +52,7 @@ public class ArtifactVersionsConstraintTest
private ArchivaArtifact createArtifact( String groupId, String artifactId, String version )
{
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, null, "jar" );
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, null, "jar", TEST_REPO );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( TEST_REPO );


+ 2
- 2
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactsByChecksumConstraintTest.java View File

@@ -49,7 +49,7 @@ public class ArtifactsByChecksumConstraintTest

private ArtifactDAO artifactDao;

@Override
protected void setUp()
throws Exception
{
@@ -62,7 +62,7 @@ public class ArtifactsByChecksumConstraintTest
public ArchivaArtifact createArtifact( String artifactId, String version )
{
ArchivaArtifact artifact =
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar", "testable_repo" );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;

+ 1
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactsByRepositoryConstraintTest.java View File

@@ -49,7 +49,7 @@ public class ArtifactsByRepositoryConstraintTest

private ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
{
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, null, type );
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, null, type, "test-repo" );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "test-repo" );


+ 2
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactsProcessedConstraintTest.java View File

@@ -38,7 +38,7 @@ public class ArtifactsProcessedConstraintTest
public ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String whenProcessed )
throws Exception
{
ArchivaArtifact artifact = dao.getArtifactDAO().createArtifact( groupId, artifactId, version, "", "jar" );
ArchivaArtifact artifact = dao.getArtifactDAO().createArtifact( groupId, artifactId, version, "", "jar", "testrepo" );
assertNotNull( "Artifact should not be null.", artifact );
Date dateWhenProcessed = null;

@@ -88,6 +88,7 @@ public class ArtifactsProcessedConstraintTest
}
}

@Override
protected void setUp()
throws Exception
{

+ 1
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ArtifactsRelatedConstraintTest.java View File

@@ -51,7 +51,7 @@ public class ArtifactsRelatedConstraintTest
public ArchivaArtifact createArtifact( String artifactId, String version, String classifier, String type )
{
ArchivaArtifact artifact = artifactDao.createArtifact( TEST_GROUPID, artifactId, version,
classifier, type );
classifier, type, "testable_repo" );
Calendar cal = Calendar.getInstance();
artifact.getModel().setLastModified( cal.getTime() );
artifact.getModel().setRepositoryId( "testable_repo" );

+ 1
- 0
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/MostRecentRepositoryScanStatisticsTest.java View File

@@ -46,6 +46,7 @@ public class MostRecentRepositoryScanStatisticsTest
return stats;
}

@Override
protected void setUp()
throws Exception
{

+ 1
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/OlderArtifactsByAgeConstraintTest.java View File

@@ -50,7 +50,7 @@ public class OlderArtifactsByAgeConstraintTest
public ArchivaArtifact createArtifact( String artifactId, String version, int daysOld )
{
ArchivaArtifact artifact = artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version,
"", "jar" );
"", "jar", "testable_repo" );
Calendar cal = Calendar.getInstance();
cal.add( Calendar.DAY_OF_MONTH, ( -1 ) * daysOld );
artifact.getModel().setLastModified( cal.getTime() );

+ 1
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/OlderSnapshotArtifactsByAgeConstraintTest.java View File

@@ -50,7 +50,7 @@ public class OlderSnapshotArtifactsByAgeConstraintTest
public ArchivaArtifact createArtifact( String artifactId, String version, int daysOld )
{
ArchivaArtifact artifact = artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version,
"", "jar" );
"", "jar", "testable_repo" );
Calendar cal = Calendar.getInstance();
cal.add( Calendar.DAY_OF_MONTH, ( -1 ) * daysOld );
artifact.getModel().setLastModified( cal.getTime() );

+ 7
- 6
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/ProjectsByArtifactUsageConstraintTest.java View File

@@ -38,6 +38,7 @@ import java.util.List;
public class ProjectsByArtifactUsageConstraintTest
extends AbstractArchivaDatabaseTestCase
{
@Override
protected void setUp()
throws Exception
{
@@ -80,7 +81,7 @@ public class ProjectsByArtifactUsageConstraintTest
ArtifactReference ref = toArtifactReference( id );

ArchivaArtifact artifact = new ArchivaArtifact( ref.getGroupId(), ref.getArtifactId(), ref.getVersion(), ref
.getClassifier(), ref.getType() );
.getClassifier(), ref.getType(), "testable_repo" );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
@@ -90,17 +91,17 @@ public class ProjectsByArtifactUsageConstraintTest
throws Exception
{
saveModel( "org.apache.maven.archiva:archiva-configuration:1.0",
new String[] { "org.codehaus.plexus:plexus-digest:1.0::jar" } );
new String[] { "org.codehaus.plexus:plexus-digest:1.0::jar:" } );

saveModel( "org.apache.maven.archiva:archiva-common:1.0", new String[] {
"org.codehaus.plexus:plexus-digest:1.0::jar",
"junit:junit:3.8.1::jar" } );
"org.codehaus.plexus:plexus-digest:1.0::jar:",
"junit:junit:3.8.1::jar:" } );

ArchivaArtifact artifact;

artifact = toArtifact( "org.foo:bar:4.0::jar" );
artifact = toArtifact( "org.foo:bar:4.0::jar:" );
assertConstraint( 0, new ProjectsByArtifactUsageConstraint( artifact ) );
artifact = toArtifact( "org.codehaus.plexus:plexus-digest:1.0::jar" );
artifact = toArtifact( "org.codehaus.plexus:plexus-digest:1.0::jar:testable_repo" );
assertConstraint( 2, new ProjectsByArtifactUsageConstraint( artifact ) );
}


+ 2
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/RecentArtifactsByAgeConstraintTest.java View File

@@ -38,6 +38,7 @@ public class RecentArtifactsByAgeConstraintTest
{
private ArtifactDAO artifactDao;

@Override
protected void setUp()
throws Exception
{
@@ -50,7 +51,7 @@ public class RecentArtifactsByAgeConstraintTest
public ArchivaArtifact createArtifact( String artifactId, String version, int daysOld )
{
ArchivaArtifact artifact = artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version,
"", "jar" );
"", "jar", "testable_repo" );
Calendar cal = Calendar.getInstance();
cal.add( Calendar.DAY_OF_MONTH, ( -1 ) * daysOld );
artifact.getModel().setLastModified( cal.getTime() );

+ 1
- 0
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/RepositoryContentStatisticsByRepositoryConstraintTest.java View File

@@ -47,6 +47,7 @@ public class RepositoryContentStatisticsByRepositoryConstraintTest
return stats;
}

@Override
protected void setUp()
throws Exception
{

+ 1
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/UniqueArtifactIdConstraintTest.java View File

@@ -53,7 +53,7 @@ public class UniqueArtifactIdConstraintTest

public ArchivaArtifact createArtifact( String groupId, String artifactId, String version )
{
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, "", "jar" );
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, "", "jar", "testable_repo" );
artifact.getModel().setLastModified( new Date() ); // mandatory field.
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;

+ 1
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/UniqueFieldConstraintTest.java View File

@@ -61,7 +61,7 @@ public class UniqueFieldConstraintTest

public ArchivaArtifact createArtifact( String groupId )
{
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, "artifactId", "version", "classifier", "jar" );
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, "artifactId", "version", "classifier", "jar", "testrepo" );

artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "repoId" );

+ 1
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/UniqueGroupIdConstraintTest.java View File

@@ -254,7 +254,7 @@ public class UniqueGroupIdConstraintTest

private ArchivaArtifact createArtifact( String repoId, String groupId, String artifactId, String version )
{
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, "", "jar" );
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, "", "jar", "testrepo" );
artifact.getModel().setLastModified( new Date() ); // mandatory field.
artifact.getModel().setRepositoryId( repoId );
return artifact;

+ 1
- 1
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/constraints/UniqueVersionConstraintTest.java View File

@@ -128,7 +128,7 @@ public class UniqueVersionConstraintTest

private ArchivaArtifact createArtifact( String repoId, String groupId, String artifactId, String version )
{
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, "", "jar" );
ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, "", "jar", "testrepo" );
artifact.getModel().setLastModified( new Date() ); // mandatory field.
artifact.getModel().setRepositoryId( repoId );
return artifact;

+ 5
- 4
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/jdo/JdoArtifactDAOTest.java View File

@@ -41,7 +41,7 @@ public class JdoArtifactDAOTest
{
public void testArtifactKey()
{
Object o = JDOImplHelper.getInstance().newObjectIdInstance( ArchivaArtifactModel.class, "foo:bar:1.0::jar" );
Object o = JDOImplHelper.getInstance().newObjectIdInstance( ArchivaArtifactModel.class, "foo:bar:1.0::jar:testrepo" );
assertNotNull( "Key should not be null.", o );
assertTrue( "Key should be an instance of " + ArchivaArtifactModelKey.class.getName(),
( o instanceof ArchivaArtifactModelKey ) );
@@ -52,6 +52,7 @@ public class JdoArtifactDAOTest
assertEquals( "1.0", key.version );
assertEquals( "", key.classifier );
assertEquals( "jar", key.type );
assertEquals("testrepo", key.repositoryId);
}

public void testArtifactCRUD()
@@ -61,7 +62,7 @@ public class JdoArtifactDAOTest

// Create it
ArchivaArtifact artifact = artiDao.createArtifact( "org.apache.maven.archiva", "archiva-test-module", "1.0",
"", "jar" );
"", "jar", "testrepo" );
assertNotNull( artifact );

// Set some mandatory values
@@ -72,7 +73,7 @@ public class JdoArtifactDAOTest
ArchivaArtifact savedArtifact = artiDao.saveArtifact( artifact );
assertNotNull( savedArtifact );
String savedKeyId = JDOHelper.getObjectId( savedArtifact.getModel() ).toString();
assertEquals( "org.apache.maven.archiva:archiva-test-module:1.0::jar", savedKeyId );
assertEquals( "org.apache.maven.archiva:archiva-test-module:1.0::jar:testrepo", savedKeyId );

// Test that something has been saved.
List artifacts = artiDao.queryArtifacts( null );
@@ -97,7 +98,7 @@ public class JdoArtifactDAOTest

// Get the specific artifact.
ArchivaArtifact actualArtifact = artiDao.getArtifact( "org.apache.maven.archiva", "archiva-test-module", "1.0",
null, "jar" );
null, "jar", "testrepo" );
assertNotNull( actualArtifact );

// Test expected values.

+ 4
- 3
archiva-modules/archiva-database/src/test/java/org/apache/maven/archiva/database/updater/DatabaseUpdaterTest.java View File

@@ -38,7 +38,7 @@ public class DatabaseUpdaterTest
public ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String whenProcessed )
throws Exception
{
ArchivaArtifact artifact = dao.getArtifactDAO().createArtifact( groupId, artifactId, version, "", "jar" );
ArchivaArtifact artifact = dao.getArtifactDAO().createArtifact( groupId, artifactId, version, "", "jar", "testrepo" );
assertNotNull( "Artifact should not be null.", artifact );
Date dateWhenProcessed = null;

@@ -55,6 +55,7 @@ public class DatabaseUpdaterTest
return artifact;
}

@Override
protected void setUp()
throws Exception
{
@@ -86,14 +87,14 @@ public class DatabaseUpdaterTest

// Check the state of the artifact in the DB.
ArchivaArtifact savedArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, classifier,
type );
type, "testrepo" );
assertFalse( "Artifact should not be considered processed (yet).", savedArtifact.getModel().isProcessed() );

// Update the artifact
dbupdater.updateUnprocessed( savedArtifact );

// Check the update.
ArchivaArtifact processed = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, classifier, type );
ArchivaArtifact processed = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, classifier, type, "testrepo" );
assertTrue( "Artifact should be flagged as processed.", processed.getModel().isProcessed() );

// Did the unprocessed consumer do it's thing?

+ 2
- 2
archiva-modules/archiva-reporting/archiva-artifact-reports/src/test/java/org/apache/maven/archiva/reporting/artifact/DuplicateArtifactReportTest.java View File

@@ -51,6 +51,7 @@ public class DuplicateArtifactReportTest

private ArtifactDAO artifactDao;

@Override
protected void setUp()
throws Exception
{
@@ -72,9 +73,8 @@ public class DuplicateArtifactReportTest
public ArchivaArtifact createArtifact( String artifactId, String version )
{
ArchivaArtifact artifact =
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar", TESTABLE_REPO );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( TESTABLE_REPO );
return artifact;
}


+ 0
- 0
archiva-modules/archiva-reporting/archiva-report-manager/src/test/java/org/apache/maven/archiva/reporting/SimpleRepositoryStatisticsReportGeneratorTest.java View File


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save