import java.text.SimpleDateFormat;
import java.util.List;
+import java.util.Map;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
*/
private long totalSize = 0;
+ private Map<String, Long> consumerCounts;
+
+ private Map<String, Long> consumerTimings;
+
public void triggerStart()
{
startTimestamp = System.currentTimeMillis();
for ( String id : knownConsumers )
{
buf.append( "\n " ).append( id );
+ if ( consumerTimings.containsKey( id ) )
+ {
+ long time = consumerTimings.get( id );
+ buf.append( " (Total: " ).append( time ).append( "ms" );
+ if ( consumerCounts.containsKey( id ) )
+ {
+ long total = consumerCounts.get( id );
+ buf.append( "; Avg.: " + ( time / total ) + "; Count: " + total );
+ }
+ buf.append( ")" );
+ }
}
}
else
for ( String id : invalidConsumers )
{
buf.append( "\n " ).append( id );
+ if ( consumerTimings.containsKey( id ) )
+ {
+ long time = consumerTimings.get( id );
+ buf.append( " (Total: " ).append( time ).append( "ms" );
+ if ( consumerCounts.containsKey( id ) )
+ {
+ long total = consumerCounts.get( id );
+ buf.append( "; Avg.: " + ( time / total ) + "ms; Count: " + total );
+ }
+ buf.append( ")" );
+ }
}
}
else
{
return totalSize;
}
+
+ public void setConsumerCounts( Map<String, Long> consumerCounts )
+ {
+ this.consumerCounts = consumerCounts;
+ }
+
+ public void setConsumerTimings( Map<String, Long> consumerTimings )
+ {
+ this.consumerTimings = consumerTimings;
+ }
}
import java.io.File;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import org.apache.archiva.repository.scanner.functors.ConsumerProcessFileClosure;
import org.apache.archiva.repository.scanner.functors.TriggerBeginScanClosure;
private ConsumerWantsFilePredicate consumerWantsFile;
+ private Map<String, Long> consumerTimings;
+
+ private Map<String, Long> consumerCounts;
+
public RepositoryScannerInstance( ManagedRepositoryConfiguration repository,
List<KnownRepositoryContentConsumer> knownConsumerList,
List<InvalidRepositoryContentConsumer> invalidConsumerList )
this.knownConsumers = knownConsumerList;
this.invalidConsumers = invalidConsumerList;
+ consumerTimings = new HashMap<String,Long>();
+ consumerCounts = new HashMap<String,Long>();
+
this.consumerProcessFile = new ConsumerProcessFileClosure();
+ consumerProcessFile.setExecuteOnEntireRepo( true );
+ consumerProcessFile.setConsumerTimings( consumerTimings );
+ consumerProcessFile.setConsumerCounts( consumerCounts );
+
this.consumerWantsFile = new ConsumerWantsFilePredicate();
stats = new RepositoryScanStatistics();
return stats;
}
+ public Map<String, Long> getConsumerTimings()
+ {
+ return consumerTimings;
+ }
+
+ public Map<String, Long> getConsumerCounts()
+ {
+ return consumerCounts;
+ }
+
public void directoryWalkStarting( File basedir )
{
log.info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getLocation() );
// consume files regardless - the predicate will check the timestamp
BaseFile basefile = new BaseFile( repository.getLocation(), file );
-
+
// Timestamp finished points to the last successful scan, not this current one.
if ( file.lastModified() >= changesSince )
{
- stats.increaseNewFileCount();
+ stats.increaseNewFileCount();
}
-
+
consumerProcessFile.setBasefile( basefile );
- consumerProcessFile.setExecuteOnEntireRepo( true );
consumerWantsFile.setBasefile( basefile );
-
+
Closure processIfWanted = IfClosure.getInstance( consumerWantsFile, consumerProcessFile );
CollectionUtils.forAllDo( this.knownConsumers, processIfWanted );
-
+
if ( consumerWantsFile.getWantedFileCount() <= 0 )
{
// Nothing known processed this file. It is invalid!
CollectionUtils.forAllDo( knownConsumers, scanCompletedClosure );
CollectionUtils.forAllDo( invalidConsumers, scanCompletedClosure );
+ stats.setConsumerTimings( consumerTimings );
+ stats.setConsumerCounts( consumerCounts );
+
log.info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getLocation() );
stats.triggerFinished();
}
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.HashMap;
+import java.util.Map;
+
/**
* ConsumerProcessFileClosure
*
private boolean executeOnEntireRepo;
+ private Map<String,Long> consumerTimings;
+
+ private Map<String,Long> consumerCounts;
+
public void execute( Object input )
{
if ( input instanceof RepositoryContentConsumer )
{
RepositoryContentConsumer consumer = (RepositoryContentConsumer) input;
+ String id = consumer.getId();
try
{
- log.debug( "Sending to consumer: " + consumer.getId() );
+ log.debug( "Sending to consumer: " + id );
+ long startTime = System.currentTimeMillis();
consumer.processFile( basefile.getRelativePath(), executeOnEntireRepo );
+ long endTime = System.currentTimeMillis();
+
+ if ( consumerTimings != null )
+ {
+ Long value = consumerTimings.get( id );
+ consumerTimings.put( id, ( value != null ? value : 0 ) + endTime - startTime );
+ }
+
+ if ( consumerCounts != null )
+ {
+ Long value = consumerCounts.get( id );
+ consumerCounts.put( id, ( value != null ? value : 0 ) + 1 );
+ }
}
catch ( Exception e )
{
/* Intentionally Catch all exceptions.
* So that the discoverer processing can continue.
*/
- log.error( "Consumer [" + consumer.getId() + "] had an error when processing file ["
+ log.error( "Consumer [" + id + "] had an error when processing file ["
+ basefile.getAbsolutePath() + "]: " + e.getMessage(), e );
}
}
this.executeOnEntireRepo = executeOnEntireRepo;
}
+ public void setConsumerTimings( Map<String, Long> consumerTimings )
+ {
+ this.consumerTimings = consumerTimings;
+ }
+
+ public void setConsumerCounts( Map<String, Long> consumerCounts )
+ {
+ this.consumerCounts = consumerCounts;
+ }
+
public Logger getLogger()
{
return log;
<c:set var="queue" value="${queueEntry.value.queueSnapshot}"/>
<tr>
<td>${queueEntry.key}</td>
- <td>${fn:length(queue)}</td>
+ <td align="right">${fn:length(queue)}</td>
</tr>
</c:forEach>
</table>
<c:forEach var="scan" items="${scanner.inProgressScans}">
<tr>
<td>${scan.repository.name} (${scan.repository.id})</td>
- <td>${scan.stats.totalFileCount}</td>
- <td>${scan.stats.newFileCount}</td>
+ <td align="right">${scan.stats.totalFileCount}</td>
+ <td align="right">${scan.stats.newFileCount}</td>
+ </tr>
+ <tr>
+ <td colspan="3">
+ <table>
+ <tr>
+ <th>Name</th>
+ <th>Total</th>
+ <th>Average</th>
+ <th>Invocations</th>
+ </tr>
+ <c:forEach var="entry" items="${scan.consumerTimings}">
+ <tr>
+ <c:set var="total" value="${scan.consumerCounts[entry.key]}"/>
+ <td>${entry.key}</td>
+ <td align="right">${entry.value}ms</td>
+ <td align="right"><fmt:formatNumber value="${entry.value / total}" pattern="#"/>ms</td>
+ <td align="right">${total}</td>
+ </tr>
+ </c:forEach>
+ </table>
+ </td>
</tr>
</c:forEach>
</table>
<c:forEach var="cacheEntry" items="${caches}">
<tr>
<td>${cacheEntry.key}</td>
- <td>${cacheEntry.value.statistics.size}</td>
- <td>${cacheEntry.value.statistics.cacheHits}</td>
- <td>${cacheEntry.value.statistics.cacheMiss}</td>
- <td><fmt:formatNumber value="${cacheEntry.value.statistics.cacheHitRate}" pattern="#%"/></td>
+ <td align="right">${cacheEntry.value.statistics.size}</td>
+ <td align="right">${cacheEntry.value.statistics.cacheHits}</td>
+ <td align="right">${cacheEntry.value.statistics.cacheMiss}</td>
+ <td align="right"><fmt:formatNumber value="${cacheEntry.value.statistics.cacheHitRate}" pattern="#%"/></td>
<td><a href="javascript:alert('Not yet implemented')">Flush</a></td>
</tr>
</c:forEach>