<name>Archiva Web :: RSS</name>
<url>http://maven.apache.org</url>
<dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-model</artifactId>
+ </dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
private String guid;
private String source;
+
+ public RssFeedEntry()
+ {
+
+ }
+
+ public RssFeedEntry( String title, String link )
+ {
+ this.title = title;
+ this.link = link;
+ }
public String getTitle()
{
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.feed.synd.SyndFeedImpl;
import com.sun.syndication.io.FeedException;
+import com.sun.syndication.io.SyndFeedInput;
import com.sun.syndication.io.SyndFeedOutput;
+import com.sun.syndication.io.XmlReader;
/**
* Generates RSS feeds.
public static String DEFAULT_FEEDTYPE = "rss_2.0";
public static String DEFAULT_LANGUAGE = "en-us";
+
+ /**
+ * @plexus.configuration default-value="${appserver.base}/data/rss"
+ */
+ private String rssDirectory;
public void generateFeed( String title, String link, String description, List<RssFeedEntry> dataEntries,
- File outputFile )
- {
- SyndFeed feed = new SyndFeedImpl();
- feed.setFeedType( DEFAULT_FEEDTYPE );
+ String outputFilename )
+ {
+ File outputFile = new File( rssDirectory, outputFilename );
+ SyndFeed feed = null;
+ List<SyndEntry> existingEntries = null;
+
+ if( outputFile.exists() )
+ {
+ try
+ {
+ SyndFeedInput input = new SyndFeedInput();
+ feed = input.build( new XmlReader( outputFile ) );
+ existingEntries = feed.getEntries();
+ }
+ catch ( IOException ie )
+ {
+ log.error( "Error occurred while reading existing feed : " + ie.getLocalizedMessage() );
+ }
+ catch ( FeedException fe )
+ {
+ log.error( "Error occurred while reading existing feed : " + fe.getLocalizedMessage() );
+ }
+ }
+ else
+ {
+ feed = new SyndFeedImpl();
+
+ feed.setTitle( title );
+ feed.setLink( link );
+ feed.setDescription( description );
+ feed.setLanguage( DEFAULT_LANGUAGE );
+ }
- feed.setTitle( title );
- feed.setLink( link );
- feed.setDescription( description );
- feed.setLanguage( DEFAULT_LANGUAGE );
+ feed.setFeedType( DEFAULT_FEEDTYPE );
feed.setPublishedDate( Calendar.getInstance().getTime() );
-
- feed.setEntries( getEntries( dataEntries ) );
-
+ feed.setEntries( getEntries( dataEntries, existingEntries ) );
+
try
- {
+ {
Writer writer = new FileWriter( outputFile );
SyndFeedOutput output = new SyndFeedOutput();
output.output( feed, writer );
}
}
- private List<SyndEntry> getEntries( List<RssFeedEntry> dataEntries )
- {
- List<SyndEntry> entries = new ArrayList<SyndEntry>();
+ private List<SyndEntry> getEntries( List<RssFeedEntry> dataEntries, List<SyndEntry> existingEntries )
+ {
+ List<SyndEntry> entries = existingEntries;
+ if( entries == null )
+ {
+ entries = new ArrayList<SyndEntry>();
+ }
+
SyndEntry entry;
SyndContent description;
return entries;
}
+ public void setRssDirectory( String rssDirectory )
+ {
+ this.rssDirectory = rssDirectory;
+ }
}
--- /dev/null
+package org.apache.archiva.rss.processor;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.archiva.rss.RssFeedEntry;
+import org.apache.archiva.rss.RssFeedGenerator;
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+
+/**
+ * Process new artifacts in the repository and generate RSS feeds.
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version
+ * @plexus.component role="org.apache.archiva.rss.processor.RssFeedProcessor" role-hint="new-artifacts"
+ */
+public class NewArtifactsRssFeedProcessor
+ implements RssFeedProcessor
+{
+ public static final String NEW_ARTIFACTS_IN_REPO = "New Artifacts in Repository ";
+
+ public static final String NEW_VERSIONS_OF_ARTIFACT = "New Versions of Artifact ";
+
+ /**
+ * @plexus.requirement
+ */
+ private RssFeedGenerator generator;
+
+ /**
+ * Process the newly discovered artifacts in the repository. Generate feeds for new artifacts in the repository and
+ * new versions of artifact.
+ */
+ public void process( List<ArchivaArtifact> data )
+ {
+ processNewArtifactsInRepo( data );
+ processNewVersionsOfArtifact( data );
+ }
+
+ private void processNewArtifactsInRepo( List<ArchivaArtifact> data )
+ {
+ List<RssFeedEntry> entries = new ArrayList<RssFeedEntry>();
+ String repoId = getRepoId( data );
+
+ RssFeedEntry entry =
+ new RssFeedEntry( NEW_ARTIFACTS_IN_REPO + "\'" + repoId + "\'" + " as of " +
+ Calendar.getInstance().getTime(), "http://localhost:8080/archiva/repository/" + repoId );
+ String description = "These are the new artifacts found in repository " + "\'" + repoId + "\'" + ": \n";
+
+ for ( ArchivaArtifact artifact : data )
+ {
+ description = description + artifact.toString() + "\n";
+ }
+ entry.setDescription( description );
+ entries.add( entry );
+
+ generateFeed( "new_artifacts_" + repoId + ".xml", NEW_ARTIFACTS_IN_REPO + "\'" + repoId + "\'",
+ "http://localhost:8080/archiva/repository/" + repoId, "New artifacts found in repository " +
+ "\'" + repoId + "\'" + " during repository scan.", entries );
+ }
+
+ private void processNewVersionsOfArtifact( List<ArchivaArtifact> data )
+ {
+ String repoId = getRepoId( data );
+
+ List<String> artifacts = new ArrayList<String>();
+
+ for ( ArchivaArtifact artifact : data )
+ {
+ artifacts.add( artifact.toString() );
+ }
+
+ Collections.sort( artifacts );
+
+ Map<String, String> artifactsMap = toMap( artifacts );
+
+ for ( String key : artifactsMap.keySet() )
+ {
+ List<RssFeedEntry> entries = new ArrayList<RssFeedEntry>();
+ String artifactPath = getArtifactPath( key );
+ RssFeedEntry entry =
+ new RssFeedEntry( NEW_VERSIONS_OF_ARTIFACT + "\'" + key + "\'" + " as of " +
+ Calendar.getInstance().getTime(), "http://localhost:8080/archiva/repository/" + repoId + "/" +
+ artifactPath );
+
+ String description =
+ "These are the new versions of artifact " + "\'" + key + "\'" + " in the repository: \n" +
+ StringUtils.replace( ( (String) artifactsMap.get( key ) ), "|", "\n" );
+
+ entry.setDescription( description );
+ entries.add( entry );
+
+ generateFeed( "new_versions_" + repoId + "_" + key + ".xml", NEW_VERSIONS_OF_ARTIFACT + "\'" + key + "\'",
+ "http://localhost:8080/archiva/repository/" + repoId + "/" + artifactPath,
+ "New versions of artifact " + "\'" + key + "\' found in repository " + "\'" + repoId + "\'" +
+ " during repository scan.", entries );
+ }
+ }
+
+ private String getRepoId( List<ArchivaArtifact> data )
+ {
+ String repoId = "";
+ if ( !data.isEmpty() )
+ {
+ repoId = ( (ArchivaArtifact) data.get( 0 ) ).getModel().getRepositoryId();
+ }
+
+ return repoId;
+ }
+
+ private void generateFeed( String filename, String title, String link, String description,
+ List<RssFeedEntry> dataEntries )
+ {
+ generator.generateFeed( title, link, description, dataEntries, filename );
+ }
+
+ private Map<String, String> toMap( List<String> artifacts )
+ {
+ Map<String, String> artifactsMap = new HashMap<String, String>();
+ for ( String id : artifacts )
+ {
+ String key = StringUtils.substringBefore( id, ":" );
+ key = key + ":" + StringUtils.substringBefore( StringUtils.substringAfter( id, ":" ), ":" );
+
+ String value = (String) artifactsMap.get( key );
+ if ( value != null )
+ {
+ value = value + "|" + id;
+ }
+ else
+ {
+ value = id;
+ }
+ artifactsMap.put( key, value );
+ }
+
+ return artifactsMap;
+ }
+
+ private String getArtifactPath( String key )
+ {
+ return StringUtils.replace( StringUtils.replace( key, ".", "/" ), ":", "/" );
+ }
+
+ public RssFeedGenerator getGenerator()
+ {
+ return generator;
+ }
+
+ public void setGenerator( RssFeedGenerator generator )
+ {
+ this.generator = generator;
+ }
+
+}
--- /dev/null
+package org.apache.archiva.rss.processor;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.List;
+
+import org.apache.maven.archiva.model.ArchivaArtifact;
+
+/**
+ * Process data that will be fed into the RssFeedGenerator
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version
+ */
+public interface RssFeedProcessor
+{
+ void process( List<ArchivaArtifact> data );
+}
public void testGenerateFeed()
throws Exception
- {
- File outputFile = new File( getBasedir(), "/target/test-classes/rss-feeds/generated-rss2.0-feed.xml" );
-
+ {
+ generator.setRssDirectory( getBasedir() + "/target/test-classes/rss-feeds/" );
+
List<RssFeedEntry> entries = new ArrayList<RssFeedEntry>();
- RssFeedEntry entry = new RssFeedEntry();
+ RssFeedEntry entry = new RssFeedEntry( "Item 1", "http://rss-2.0-test-feed.com");
- entry.setTitle( "Item 1" );
- entry.setLink( "http://rss-2.0-test-feed.com" );
entry.setDescription( "RSS 2.0 feed item 1." );
entry.setGuid( "http://rss-2.0-test-feed.com/item1" );
entries.add( entry );
- entry = new RssFeedEntry();
- entry.setTitle( "Item 2" );
- entry.setLink( "http://rss-2.0-test-feed.com" );
+ entry = new RssFeedEntry( "Item 2", "http://rss-2.0-test-feed.com" );
entry.setDescription( "RSS 2.0 feed item 2." );
entry.setGuid( "http://rss-2.0-test-feed.com/item2" );
entries.add( entry );
- entry = new RssFeedEntry();
- entry.setTitle( "Item 3" );
- entry.setLink( "http://rss-2.0-test-feed.com" );
+ entry = new RssFeedEntry( "Item 3", "http://rss-2.0-test-feed.com" );
entry.setDescription( "RSS 2.0 feed item 3." );
entry.setGuid( "http://rss-2.0-test-feed.com/item3" );
entries.add( entry );
generator.generateFeed( "Test Feed", "http://localhost:8080/archiva", "The test feed from Archiva.", entries,
- outputFile );
+ "generated-rss2.0-feed.xml" );
+ File outputFile = new File( getBasedir(), "/target/test-classes/rss-feeds/generated-rss2.0-feed.xml" );
String generatedContent = FileUtils.readFileToString( outputFile );
XMLAssert.assertXpathEvaluatesTo( "Test Feed", "//channel/title", generatedContent );
"<channel><item><title>Item 1</title></item><item><title>Item 2</title></item>"
+ "<item><title>Item 3</title></item></channel>";
XMLAssert.assertXpathsEqual( "//channel/item/title", expectedItem1, "//channel/item/title", generatedContent );
+
+ outputFile.deleteOnExit();
}
}
--- /dev/null
+package org.apache.archiva.rss.processor;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.archiva.rss.RssFeedGenerator;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.codehaus.plexus.PlexusTestCase;
+
+/**
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version
+ */
+public class NewArtifactsRssFeedProcessorTest
+ extends PlexusTestCase
+{
+ private RssFeedProcessor newArtifactsProcessor;
+
+ private String rssDirectory;
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ newArtifactsProcessor = (RssFeedProcessor) lookup( RssFeedProcessor.class, "new-artifacts" );
+ rssDirectory = getBasedir() + "/target/test-classes/rss-feeds/";
+
+ RssFeedGenerator generator = ( ( NewArtifactsRssFeedProcessor ) newArtifactsProcessor ).getGenerator();
+ generator.setRssDirectory( rssDirectory );
+ ( (NewArtifactsRssFeedProcessor) newArtifactsProcessor ).setGenerator( generator );
+ }
+
+ public void testProcess()
+ throws Exception
+ {
+ List<ArchivaArtifact> newArtifacts = new ArrayList<ArchivaArtifact>();
+
+ ArchivaArtifact artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-one", "1.0", "", "jar" );
+ artifact.getModel().setRepositoryId( "test-repo" );
+ newArtifacts.add( artifact );
+
+ artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-one", "1.1", "", "jar" );
+ artifact.getModel().setRepositoryId( "test-repo" );
+ newArtifacts.add( artifact );
+
+ artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-one", "2.0", "", "jar" );
+ artifact.getModel().setRepositoryId( "test-repo" );
+ newArtifacts.add( artifact );
+
+ artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.1", "", "jar" );
+ artifact.getModel().setRepositoryId( "test-repo" );
+ newArtifacts.add( artifact );
+
+ artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.2", "", "jar" );
+ artifact.getModel().setRepositoryId( "test-repo" );
+ newArtifacts.add( artifact );
+
+ artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-two", "1.0.3-SNAPSHOT", "", "jar" );
+ artifact.getModel().setRepositoryId( "test-repo" );
+ newArtifacts.add( artifact );
+
+ artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-three", "2.0-SNAPSHOT", "", "jar" );
+ artifact.getModel().setRepositoryId( "test-repo" );
+ newArtifacts.add( artifact );
+
+ artifact = new ArchivaArtifact( "org.apache.archiva", "artifact-four", "1.1-beta-2", "", "jar" );
+ artifact.getModel().setRepositoryId( "test-repo" );
+ newArtifacts.add( artifact );
+
+ newArtifactsProcessor.process( newArtifacts );
+
+ File outputFile = new File( rssDirectory, "new_artifacts_test-repo.xml" );
+ assertTrue( outputFile.exists() );
+
+ outputFile = new File( rssDirectory, "new_versions_test-repo_org.apache.archiva:artifact-one.xml" );
+ assertTrue( outputFile.exists() );
+
+ outputFile = new File( rssDirectory, "new_versions_test-repo_org.apache.archiva:artifact-two.xml" );
+ assertTrue( outputFile.exists() );
+
+ outputFile = new File( rssDirectory, "new_versions_test-repo_org.apache.archiva:artifact-three.xml" );
+ assertTrue( outputFile.exists() );
+
+ outputFile = new File( rssDirectory, "new_versions_test-repo_org.apache.archiva:artifact-four.xml" );
+ assertTrue( outputFile.exists() );
+ }
+}