artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
+ Date olderWhenGathered = Calendar.getInstance().getTime();
+ olderWhenGathered.setTime( 123456789 );
+
artifact = createArtifact( "org.apache.archiva", "artifact-two", "1.1-SNAPSHOT" );
- artifact.getModel().setWhenGathered( whenGathered );
+ artifact.getModel().setWhenGathered( olderWhenGathered );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "org.apache.archiva", "artifact-three", "2.0-beta-1" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
- assertConstraint( "Artifacts By Repository and When Gathered", 5,
- new ArtifactsByRepositoryConstraint( "test-repo" ) );
+ assertConstraint( "Artifacts By Repository and When Gathered", 4,
+ new ArtifactsByRepositoryConstraint( "test-repo", whenGathered, "repositoryId" ) );
}
private void assertConstraint( String msg, int count, ArtifactsByRepositoryConstraint constraint )
public static String DEFAULT_LANGUAGE = "en-us";
- private String DEFAULT_LINK = "http://localhost:8080/archiva/rss/";
+ //private String DEFAULT_LINK = "http://localhost:8080/archiva/rss/";
- public SyndFeed generateFeed( String title, String description, List<RssFeedEntry> dataEntries,
- String queryString )
+ public SyndFeed generateFeed( String title, String description, List<RssFeedEntry> dataEntries )
{
SyndFeed feed = new SyndFeedImpl();
feed.setTitle( title );
- feed.setLink( DEFAULT_LINK + queryString );
+ //feed.setLink( DEFAULT_LINK + queryString );
feed.setDescription( description );
feed.setLanguage( DEFAULT_LANGUAGE );
feed.setPublishedDate( dataEntries.get( dataEntries.size() - 1 ).getPublishedDate() );
import java.util.Map;
import org.apache.archiva.rss.RssFeedEntry;
+import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.model.ArchivaArtifact;
import com.sun.syndication.feed.synd.SyndFeed;
public abstract class AbstractArtifactsRssFeedProcessor
implements RssFeedProcessor
{
- public abstract SyndFeed process( Map<String, String> reqParams );
+ public abstract SyndFeed process( Map<String, String> reqParams ) throws ArchivaDatabaseException;
protected List<RssFeedEntry> processData( List<ArchivaArtifact> artifacts, boolean isRepoLevel )
{
public class NewArtifactsRssFeedProcessor
extends AbstractArtifactsRssFeedProcessor
{
- public static int numberOfDaysBeforeNow = 100;
+ private int numberOfDaysBeforeNow = 30;
- private String title = "New Artifacts in Repository ";
+ private static final String title = "New Artifacts in Repository ";
- private String desc = "These are the new artifacts found in the repository ";
+ private static final String desc = "These are the new artifacts found in the repository ";
/**
* @plexus.requirement
* Process the newly discovered artifacts in the repository. Generate feeds for new artifacts in the repository and
* new versions of artifact.
*/
- public SyndFeed process( Map<String, String> reqParams )
+ public SyndFeed process( Map<String, String> reqParams ) throws ArchivaDatabaseException
{
log.debug( "Process new artifacts into rss feeds." );
return null;
}
- private SyndFeed processNewArtifactsInRepo( String repoId )
+ private SyndFeed processNewArtifactsInRepo( String repoId ) throws ArchivaDatabaseException
{
- try
- {
- Calendar greaterThanThisDate = Calendar.getInstance( DateUtils.UTC_TIME_ZONE );
- greaterThanThisDate.add( Calendar.DATE, -numberOfDaysBeforeNow );
-
- Constraint artifactsByRepo = new ArtifactsByRepositoryConstraint( repoId, greaterThanThisDate.getTime(), "whenGathered" );
- List<ArchivaArtifact> artifacts = artifactDAO.queryArtifacts( artifactsByRepo );
+
+ Calendar greaterThanThisDate = Calendar.getInstance( DateUtils.UTC_TIME_ZONE );
+ greaterThanThisDate.add( Calendar.DATE, -( getNumberOfDaysBeforeNow() ) );
+
+ Constraint artifactsByRepo = new ArtifactsByRepositoryConstraint( repoId, greaterThanThisDate.getTime(), "whenGathered" );
+ List<ArchivaArtifact> artifacts = artifactDAO.queryArtifacts( artifactsByRepo );
- List<RssFeedEntry> entries = processData( artifacts, true );
+ List<RssFeedEntry> entries = processData( artifacts, true );
- return generator.generateFeed( getTitle() + "\'" + repoId + "\'", "New artifacts found in repository " +
- "\'" + repoId + "\'" + " during repository scan.", entries, "rss_feeds?repoId=" + repoId );
- }
- catch ( ArchivaDatabaseException ae )
- {
- log.error( ae.getMessage() );
- }
-
- return null;
+ return generator.generateFeed( getTitle() + "\'" + repoId + "\'", "New artifacts found in repository " +
+ "\'" + repoId + "\'" + " during repository scan.", entries );
}
public String getTitle()
{
this.artifactDAO = artifactDAO;
}
+
+ public int getNumberOfDaysBeforeNow()
+ {
+ return numberOfDaysBeforeNow;
+ }
+
+ public void setNumberOfDaysBeforeNow( int numberOfDaysBeforeNow )
+ {
+ this.numberOfDaysBeforeNow = numberOfDaysBeforeNow;
+ }
+
}
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.Constraint;
-import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.ArtifactVersionsConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.slf4j.Logger;
public class NewVersionsOfArtifactRssFeedProcessor
extends AbstractArtifactsRssFeedProcessor
{
- private String title = "New Versions of Artifact ";
+ private static final String title = "New Versions of Artifact ";
- private String desc = "These are the new versions of artifact ";
+ private static final String desc = "These are the new versions of artifact ";
/**
* @plexus.requirement
/**
* Process all versions of the artifact which had a rss feed request.
*/
- public SyndFeed process( Map<String, String> reqParams )
+ public SyndFeed process( Map<String, String> reqParams ) throws ArchivaDatabaseException
{
String repoId = reqParams.get( RssFeedProcessor.KEY_REPO_ID );
String groupId = reqParams.get( RssFeedProcessor.KEY_GROUP_ID );
}
private SyndFeed processNewVersionsOfArtifact( String repoId, String groupId, String artifactId )
+ throws ArchivaDatabaseException
{
- try
- {
- Constraint artifactVersions = new ArtifactVersionsConstraint( repoId, groupId, artifactId, "whenGathered" );
- List<ArchivaArtifact> artifacts = artifactDAO.queryArtifacts( artifactVersions );
-
- List<RssFeedEntry> entries = processData( artifacts, false );
- String key = groupId + ":" + artifactId;
- return generator.generateFeed( getTitle() + "\'" + key + "\'", "New versions of artifact " + "\'" + key +
- "\' found in repository " + "\'" + repoId + "\'" + " during repository scan.", entries,
- "rss_feeds?groupId=" + groupId + "&artifactId=" + artifactId );
- }
- catch ( ObjectNotFoundException oe )
- {
- oe.printStackTrace();
- log.error( oe.getMessage() );
- }
- catch ( ArchivaDatabaseException ae )
- {
- ae.printStackTrace();
- log.error( ae.getMessage() );
- }
-
- return null;
+
+ Constraint artifactVersions = new ArtifactVersionsConstraint( repoId, groupId, artifactId, "whenGathered" );
+ List<ArchivaArtifact> artifacts = artifactDAO.queryArtifacts( artifactVersions );
+
+ List<RssFeedEntry> entries = processData( artifacts, false );
+ String key = groupId + ":" + artifactId;
+
+ return generator.generateFeed( getTitle() + "\'" + key + "\'", "New versions of artifact " + "\'" + key +
+ "\' found in repository " + "\'" + repoId + "\'" + " during repository scan.", entries );
}
public String getTitle()
import java.util.Map;
+import org.apache.maven.archiva.database.ArchivaDatabaseException;
+
import com.sun.syndication.feed.synd.SyndFeed;
/**
public static final String KEY_ARTIFACT_ID = "artifactId";
- SyndFeed process( Map<String, String> reqParams );
+ SyndFeed process( Map<String, String> reqParams ) throws ArchivaDatabaseException;
}
entries.add( entry );
SyndFeed feed =
- generator.generateFeed( "Test Feed", "The test feed from Archiva.", entries, "generated-rss2.0-feed.xml" );
+ generator.generateFeed( "Test Feed", "The test feed from Archiva.", entries );
assertEquals( "Test Feed", feed.getTitle() );
- assertEquals( "http://localhost:8080/archiva/rss/generated-rss2.0-feed.xml", feed.getLink() );
+ //assertEquals( "http://localhost:8080/archiva/rss/generated-rss2.0-feed.xml", feed.getLink() );
assertEquals( "The test feed from Archiva.", feed.getDescription() );
assertEquals( "en-us", feed.getLanguage() );
assertEquals( entries.get( 2 ).getPublishedDate(), feed.getPublishedDate() );
SyndFeed feed = newArtifactsProcessor.process( reqParams );
assertTrue( feed.getTitle().equals( "New Artifacts in Repository 'test-repo'" ) );
- assertTrue( feed.getLink().equals( "http://localhost:8080/archiva/rss/rss_feeds?repoId=test-repo" ) );
+ //assertTrue( feed.getLink().equals( "http://localhost:8080/archiva/rss/rss_feeds?repoId=test-repo" ) );
assertTrue( feed.getDescription().equals(
"New artifacts found in repository 'test-repo' during repository scan." ) );
assertTrue( feed.getLanguage().equals( "en-us" ) );
SyndFeed feed = newVersionsProcessor.process( reqParams );
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two'", feed.getTitle() );
- assertEquals( "http://localhost:8080/archiva/rss/rss_feeds?groupId=org.apache.archiva&artifactId=artifact-two",
- feed.getLink() );
+ //assertEquals( "http://localhost:8080/archiva/rss/rss_feeds?groupId=org.apache.archiva&artifactId=artifact-two",
+ // feed.getLink() );
assertEquals(
"New versions of artifact 'org.apache.archiva:artifact-two' found in repository 'test-repo' during repository scan.",
feed.getDescription() );
import org.apache.commons.codec.Decoder;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Base64;
+import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.security.AccessDeniedException;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.apache.maven.archiva.security.ArchivaSecurityException;
SyndFeedOutput output = new SyndFeedOutput();
output.output( feed, res.getWriter() );
}
+ catch ( ArchivaDatabaseException e )
+ {
+ log.debug( COULD_NOT_GENERATE_FEED_ERROR, e );
+ res.sendError( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, COULD_NOT_GENERATE_FEED_ERROR );
+ }
catch ( UserNotFoundException unfe )
{
- log.error( COULD_NOT_AUTHENTICATE_USER, unfe );
+ log.debug( COULD_NOT_AUTHENTICATE_USER, unfe );
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
}
catch ( AccountLockedException acce )
- {
- log.error( COULD_NOT_AUTHENTICATE_USER, acce );
+ {
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
}
catch ( AuthenticationException authe )
- {
- authe.printStackTrace();
- log.error( COULD_NOT_AUTHENTICATE_USER, authe );
+ {
+ log.debug( COULD_NOT_AUTHENTICATE_USER, authe );
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
}
catch ( FeedException ex )
{
- log.error( COULD_NOT_GENERATE_FEED_ERROR, ex );
+ log.debug( COULD_NOT_GENERATE_FEED_ERROR, ex );
res.sendError( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, COULD_NOT_GENERATE_FEED_ERROR );
}
catch ( MustChangePasswordException e )
- {
- log.error( COULD_NOT_AUTHENTICATE_USER, e );
+ {
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
}
catch ( UnauthorizedException e )
{
- log.error( e.getMessage() );
+ log.debug( e.getMessage() );
if ( repoId != null )
{
res.setHeader("WWW-Authenticate", "Basic realm=\"Repository Archiva Managed " + repoId + " Repository" );
}
catch ( DecoderException ie )
{
- log.error( "Error decoding username and password.", ie.getMessage() );
+ log.warn( "Error decoding username and password.", ie.getMessage() );
}
if ( usernamePassword == null || usernamePassword.trim().equals( "" ) )
}
catch ( AuthorizationException e )
{
- log.error( "Fatal Authorization Subsystem Error." );
+
}
catch ( UnauthorizedException e )
{
- log.error( e.getMessage() );
+
}
}