package org.apache.maven.archiva.consumers.core.repository;
/*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied. See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*/
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
+import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
+import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
+import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import java.io.File;
import java.io.FilenameFilter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
import java.util.Set;
/**
protected ArtifactDAO artifactDao;
- public AbstractRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao )
+ private Map<String, RepositoryContentIndex> indices;
+
+ public AbstractRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
+ Map<String, RepositoryContentIndex> indices )
{
this.repository = repository;
this.artifactDao = artifactDao;
+ this.indices = indices;
}
/**
* Get all files from the directory that matches the specified filename.
- *
- * @param dir the directory to be scanned
+ *
+ * @param dir the directory to be scanned
* @param filename the filename to be matched
* @return
*/
/**
* Purge the repo. Update db and index of removed artifacts.
- *
+ *
* @param artifactFiles
* @throws RepositoryIndexException
*/
protected void purge( Set<ArtifactReference> references )
{
+ List<LuceneRepositoryContentRecord> fileContentRecords = new ArrayList<LuceneRepositoryContentRecord>();
+ List<LuceneRepositoryContentRecord> hashcodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
+ List<LuceneRepositoryContentRecord> bytecodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
+
for ( ArtifactReference reference : references )
{
File artifactFile = repository.toFile( reference );
+ ArchivaArtifact artifact =
+ new ArchivaArtifact( reference.getGroupId(), reference.getArtifactId(), reference.getVersion(),
+ reference.getClassifier(), reference.getType() );
+
+ FileContentRecord fileContentRecord = new FileContentRecord();
+ fileContentRecord.setFilename( repository.toPath( artifact ) );
+ fileContentRecords.add( fileContentRecord );
+
+ HashcodesRecord hashcodesRecord = new HashcodesRecord();
+ hashcodesRecord.setArtifact( artifact );
+ hashcodeRecords.add( hashcodesRecord );
+
+ BytecodeRecord bytecodeRecord = new BytecodeRecord();
+ bytecodeRecord.setArtifact( artifact );
+ bytecodeRecords.add( bytecodeRecord );
+
artifactFile.delete();
purgeSupportFiles( artifactFile );
// Ignore
}
}
+
+ try
+ {
+ updateIndices( fileContentRecords, hashcodeRecords, bytecodeRecords );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ // Ignore
+ }
}
/**
* <p>
* This find support files for the artifactFile and deletes them.
* </p>
- *
* <p>
* Support Files are things like ".sha1", ".md5", ".asc", etc.
* </p>
throws ArchivaDatabaseException, LayoutException
{
ArtifactReference artifact = repository.toArtifactReference( path );
- ArchivaArtifact queriedArtifact = artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(),
- artifact.getVersion(), artifact.getClassifier(),
- artifact.getType() );
+ ArchivaArtifact queriedArtifact =
+ artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
+ artifact.getClassifier(), artifact.getType() );
artifactDao.deleteArtifact( queriedArtifact );
// TODO [MRM-37]: re-run the database consumers to clean up
}
+
+ private void updateIndices( List<LuceneRepositoryContentRecord> fileContentRecords,
+ List<LuceneRepositoryContentRecord> hashcodeRecords,
+ List<LuceneRepositoryContentRecord> bytecodeRecords )
+ throws RepositoryIndexException
+ {
+ RepositoryContentIndex index = indices.get( "filecontent" );
+ index.deleteRecords( fileContentRecords );
+
+ index = indices.get( "hashcodes" );
+ index.deleteRecords( hashcodeRecords );
+
+ index = indices.get( "bytecode" );
+ index.deleteRecords( bytecodeRecords );
+ }
}
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.model.VersionedReference;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Map;
/**
* <p>
private MetadataTools metadataTools;
public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- MetadataTools metadataTools )
+ MetadataTools metadataTools, Map<String, RepositoryContentIndex> indices )
{
- super( repository, artifactDao );
+ super( repository, artifactDao, indices );
this.metadataTools = metadataTools;
}
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
+import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
private int daysOlder;
public DaysOldRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- int daysOlder )
+ int daysOlder, Map<String, RepositoryContentIndex> indices )
{
- super( repository, artifactDao );
+ super( repository, artifactDao, indices );
this.daysOlder = daysOlder;
timestampParser = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
timestampParser.setTimeZone( DateUtils.UTC_TIME_ZONE );
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
+import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.codehaus.plexus.registry.RegistryListener;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
/**
* Consumer for removing old snapshots in the repository based on the criteria
private RepositoryPurge cleanUp;
private boolean deleteReleasedSnapshots;
+
+ /**
+ * @plexus.requirement role-hint="lucene"
+ */
+ private RepositoryContentIndexFactory indexFactory;
public String getId()
{
{
try
{
+ Map<String, RepositoryContentIndex> indices = new HashMap<String, RepositoryContentIndex>();
+ indices.put( "bytecode", indexFactory.createBytecodeIndex( repository ) );
+ indices.put( "hashcodes", indexFactory.createHashcodeIndex( repository ) );
+ indices.put( "filecontent", indexFactory.createFileContentIndex( repository ) );
+
ManagedRepositoryContent repositoryContent = repositoryFactory.getManagedRepositoryContent( repository
.getId() );
if ( repository.getDaysOlder() != 0 )
{
repoPurge = new DaysOldRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository
- .getDaysOlder() );
+ .getDaysOlder(), indices );
}
else
{
repoPurge = new RetentionCountRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository
- .getRetentionCount() );
+ .getRetentionCount(), indices );
}
cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, dao.getArtifactDAO(),
- metadataTools );
+ metadataTools, indices );
deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
}
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Map;
import java.util.Set;
/**
private int retentionCount;
public RetentionCountRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- int retentionCount )
+ int retentionCount, Map<String, RepositoryContentIndex> indices )
{
- super( repository, artifactDao );
+ super( repository, artifactDao, indices );
this.retentionCount = retentionCount;
}
*/
import org.apache.commons.io.FileUtils;
+import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.custommonkey.xmlunit.XMLAssert;
import java.io.File;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
public class CleanupReleasedSnapshotsRepositoryPurgeTest
extends AbstractRepositoryPurgeTest
-{
+{
protected void setUp()
throws Exception
{
super.setUp();
+ Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
+ map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
+ map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
+ map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
+
MetadataTools metadataTools = (MetadataTools) lookup( MetadataTools.class );
- repoPurge = new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), dao, metadataTools );
+ repoPurge = new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), dao, metadataTools, map );
}
public void testReleasedSnapshots()
populateReleasedSnapshotsTest();
String repoRoot = prepareTestRepo();
+
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT );
import java.io.File;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+
+import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
{
super.setUp();
+ Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
+ map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
+ map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
+ map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
+
repoPurge =
- new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration().getDaysOlder() );
+ new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration().getDaysOlder(), map );
}
private void setLastModified( String dirPath )
*/
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+
+import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
/**
* Test RetentionsCountRepositoryPurgeTest
{
super.setUp();
+ Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
+ map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
+ map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
+ map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
+
repoPurge = new RetentionCountRepositoryPurge( getRepository(), dao,
- getRepoConfiguration().getRetentionCount() );
+ getRepoConfiguration().getRetentionCount(), map );
}
/**
--- /dev/null
+package org.apache.maven.archiva.consumers.core.repository.stubs;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
+import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
+
+/**
+ * LuceneRepositoryContenIndexFactoryStub
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version
+ */
+public class LuceneRepositoryContentIndexFactoryStub
+ implements RepositoryContentIndexFactory
+{
+
+ public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
+ {
+ // TODO Auto-generated method stub
+ return new LuceneRepositoryContentIndexStub();
+ }
+
+ public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
+ {
+ // TODO Auto-generated method stub
+ return new LuceneRepositoryContentIndexStub();
+ }
+
+ public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
+ {
+ // TODO Auto-generated method stub
+ return new LuceneRepositoryContentIndexStub();
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.consumers.core.repository.stubs;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.util.Collection;
+
+import junit.framework.Assert;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Searchable;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.indexer.RepositoryContentIndex;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
+import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
+
+/**
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version
+ */
+public class LuceneRepositoryContentIndexStub
+ implements RepositoryContentIndex
+{
+
+ public void deleteRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ // TODO Auto-generated method stub
+ Assert.assertEquals( 2, records.size() );
+ }
+
+ public boolean exists()
+ throws RepositoryIndexException
+ {
+ // TODO Auto-generated method stub
+ return false;
+ }
+
+ public Collection getAllRecordKeys()
+ throws RepositoryIndexException
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public Analyzer getAnalyzer()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public LuceneEntryConverter getEntryConverter()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public String getId()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public File getIndexDirectory()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public QueryParser getQueryParser()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public ManagedRepositoryConfiguration getRepository()
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public Searchable getSearchable()
+ throws RepositoryIndexSearchException
+ {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public void indexRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ // TODO Auto-generated method stub
+
+ }
+
+ public void modifyRecord( LuceneRepositoryContentRecord record )
+ throws RepositoryIndexException
+ {
+ // TODO Auto-generated method stub
+
+ }
+
+ public void modifyRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ // TODO Auto-generated method stub
+
+ }
+
+}
<requirement>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
</requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
+ <role-hint>lucene</role-hint>
+ <field-name>indexFactory</field-name>
+ </requirement>
</requirements>
<configuration>
<id>repository-purge</id>
<requirement>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
</requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
+ <role-hint>lucene</role-hint>
+ <field-name>indexFactory</field-name>
+ </requirement>
</requirements>
<configuration>
<id>repository-purge</id>
</otherProperties>
</configuration>
</component>
+
+ <!-- LuceneRepositoryIndexFactory -->
+ <component>
+ <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
+ <role-hint>lucene</role-hint>
+ <implementation>org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexFactoryStub</implementation>
+ </component>
</components>
</component-set>
public void modifyRecords( Collection records )
throws RepositoryIndexException
{
- IndexModifier indexModifier = null;
- try
+ synchronized( repository )
{
- indexModifier = new IndexModifier( indexLocation, indexHandlers.getAnalyzer(), !exists() );
- indexModifier.setMaxFieldLength( MAX_FIELD_LENGTH );
-
- for ( Iterator i = records.iterator(); i.hasNext(); )
+ IndexModifier indexModifier = null;
+ try
{
- LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
-
- if ( record != null )
+ indexModifier = new IndexModifier( indexLocation, indexHandlers.getAnalyzer(), !exists() );
+ indexModifier.setMaxFieldLength( MAX_FIELD_LENGTH );
+
+ for ( Iterator i = records.iterator(); i.hasNext(); )
{
- Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
-
- indexModifier.deleteDocuments( term );
-
- Document document = indexHandlers.getConverter().convert( record );
-
- indexModifier.addDocument( document );
+ LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
+
+ if ( record != null )
+ {
+ Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
+
+ indexModifier.deleteDocuments( term );
+
+ Document document = indexHandlers.getConverter().convert( record );
+
+ indexModifier.addDocument( document );
+ }
}
+ indexModifier.optimize();
}
- indexModifier.optimize();
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( indexModifier );
- }
- }
-
- public void modifyRecord( LuceneRepositoryContentRecord record )
- throws RepositoryIndexException
- {
- IndexModifier indexModifier = null;
- try
- {
- indexModifier = new IndexModifier( indexLocation, indexHandlers.getAnalyzer(), !exists() );
- indexModifier.setMaxFieldLength( MAX_FIELD_LENGTH );
-
- if ( record != null )
+ catch ( IOException e )
{
- Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
-
- indexModifier.deleteDocuments( term );
-
- Document document = indexHandlers.getConverter().convert( record );
-
- indexModifier.addDocument( document );
+ throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexModifier );
}
- indexModifier.optimize();
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( indexModifier );
}
}
- private void addRecords( Collection records )
+ public void modifyRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
- IndexWriter indexWriter;
- try
- {
- indexWriter = new IndexWriter( indexLocation, indexHandlers.getAnalyzer(), !exists() );
- indexWriter.setMaxFieldLength( MAX_FIELD_LENGTH );
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Unable to open index", e );
- }
-
- try
+ synchronized( repository )
{
- for ( Iterator i = records.iterator(); i.hasNext(); )
+ IndexModifier indexModifier = null;
+ try
{
- LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
-
+ indexModifier = new IndexModifier( indexLocation, indexHandlers.getAnalyzer(), !exists() );
+ indexModifier.setMaxFieldLength( MAX_FIELD_LENGTH );
+
if ( record != null )
{
+ Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
+
+ indexModifier.deleteDocuments( term );
+
Document document = indexHandlers.getConverter().convert( record );
-
- indexWriter.addDocument( document );
+
+ indexModifier.addDocument( document );
}
+ indexModifier.optimize();
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexModifier );
}
-
- indexWriter.optimize();
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Failed to add an index document", e );
- }
- finally
- {
- closeQuietly( indexWriter );
}
}
+
- public void deleteRecords( Collection records )
+ private void addRecords( Collection records )
throws RepositoryIndexException
{
- if ( exists() )
+ synchronized( repository )
{
- IndexReader indexReader = null;
+ IndexWriter indexWriter;
+ try
+ {
+ indexWriter = new IndexWriter( indexLocation, indexHandlers.getAnalyzer(), !exists() );
+ indexWriter.setMaxFieldLength( MAX_FIELD_LENGTH );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Unable to open index", e );
+ }
+
try
{
- indexReader = IndexReader.open( indexLocation );
-
for ( Iterator i = records.iterator(); i.hasNext(); )
{
LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
-
+
if ( record != null )
{
- Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
-
- indexReader.deleteDocuments( term );
+ Document document = indexHandlers.getConverter().convert( record );
+
+ indexWriter.addDocument( document );
}
}
+
+ indexWriter.optimize();
}
catch ( IOException e )
{
- throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
+ throw new RepositoryIndexException( "Failed to add an index document", e );
}
finally
{
- closeQuietly( indexReader );
+ closeQuietly( indexWriter );
}
}
}
+ public void deleteRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ synchronized( repository )
+ {
+ if ( exists() )
+ {
+ IndexReader indexReader = null;
+ try
+ {
+ indexReader = IndexReader.open( indexLocation );
+
+ for ( Iterator i = records.iterator(); i.hasNext(); )
+ {
+ LuceneRepositoryContentRecord record = (LuceneRepositoryContentRecord) i.next();
+
+ if ( record != null )
+ {
+ Term term = new Term( LuceneDocumentMaker.PRIMARY_KEY, record.getPrimaryKey() );
+
+ indexReader.deleteDocuments( term );
+ }
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexReader );
+ }
+ }
+ }
+ }
+
public Collection getAllRecordKeys()
throws RepositoryIndexException
{
private List getAllFieldValues( String fieldName )
throws RepositoryIndexException
{
- List keys = new ArrayList();
-
- if ( exists() )
+ synchronized( repository )
{
- IndexReader indexReader = null;
- TermEnum terms = null;
- try
+ List keys = new ArrayList();
+
+ if ( exists() )
{
- indexReader = IndexReader.open( indexLocation );
-
- terms = indexReader.terms( new Term( fieldName, "" ) );
- while ( fieldName.equals( terms.term().field() ) )
+ IndexReader indexReader = null;
+ TermEnum terms = null;
+ try
{
- keys.add( terms.term().text() );
-
- if ( !terms.next() )
+ indexReader = IndexReader.open( indexLocation );
+
+ terms = indexReader.terms( new Term( fieldName, "" ) );
+ while ( fieldName.equals( terms.term().field() ) )
{
- break;
+ keys.add( terms.term().text() );
+
+ if ( !terms.next() )
+ {
+ break;
+ }
}
}
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexReader );
+ closeQuietly( terms );
+ }
}
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( indexReader );
- closeQuietly( terms );
- }
+ return keys;
}
- return keys;
}
public Searchable getSearchable()