]> source.dussan.org Git - archiva.git/commitdiff
[MRM-1442] track time spent in each consumer during a scan, to help diagnose poor...
authorBrett Porter <brett@apache.org>
Mon, 6 Dec 2010 15:56:35 +0000 (15:56 +0000)
committerBrett Porter <brett@apache.org>
Mon, 6 Dec 2010 15:56:35 +0000 (15:56 +0000)
git-svn-id: https://svn.apache.org/repos/asf/archiva/branches/archiva-1.3.x@1042689 13f79535-47bb-0310-9956-ffa450edef68

archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/RepositoryScanStatistics.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/RepositoryScannerInstance.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/maven/archiva/repository/scanner/functors/ConsumerProcessFileClosure.java
archiva-modules/archiva-web/archiva-webapp/src/main/webapp/WEB-INF/jsp/admin/systemStatus.jsp

index 5d4fd3f0658a5d1edb6f509282c41ec860209542..4f18ff2837d3ae4d9ad0a78cf04aef8277d8fe62 100644 (file)
@@ -25,6 +25,7 @@ import org.apache.maven.archiva.model.RepositoryContentStatistics;
 
 import java.text.SimpleDateFormat;
 import java.util.List;
+import java.util.Map;
 
 /**
  * RepositoryScanStatistics - extension to the RepositoryContentStatistics model.
@@ -42,6 +43,10 @@ public class RepositoryScanStatistics
     
     private SimpleDateFormat df = new SimpleDateFormat();
 
+    private Map<String, Long> consumerCounts;
+
+    private Map<String, Long> consumerTimings;
+
     public void triggerStart()
     {
         startTimestamp = System.currentTimeMillis();
@@ -94,6 +99,17 @@ public class RepositoryScanStatistics
             for ( String id : knownConsumers )
             {
                 buf.append( "\n                      " ).append( id );
+                if ( consumerTimings.containsKey( id ) )
+                {
+                    long time = consumerTimings.get( id );
+                    buf.append( " (Total: " ).append( time ).append( "ms" );
+                    if ( consumerCounts.containsKey( id ) )
+                    {
+                        long total = consumerCounts.get( id );
+                        buf.append( "; Avg.: " + ( time / total ) + "; Count: " + total );
+                    }
+                    buf.append( ")" );
+                }
             }
         }
         else
@@ -108,6 +124,17 @@ public class RepositoryScanStatistics
             for ( String id : invalidConsumers )
             {
                 buf.append( "\n                      " ).append( id );
+                if ( consumerTimings.containsKey( id ) )
+                {
+                    long time = consumerTimings.get( id );
+                    buf.append( " (Total: " ).append( time ).append( "ms" );
+                    if ( consumerCounts.containsKey( id ) )
+                    {
+                        long total = consumerCounts.get( id );
+                        buf.append( "; Avg.: " + ( time / total ) + "ms; Count: " + total );
+                    }
+                    buf.append( ")" );
+                }
             }
         }
         else
@@ -142,4 +169,14 @@ public class RepositoryScanStatistics
 
         return buf.toString();
     }
+
+    public void setConsumerCounts( Map<String, Long> consumerCounts )
+    {
+        this.consumerCounts = consumerCounts;
+    }
+
+    public void setConsumerTimings( Map<String, Long> consumerTimings )
+    {
+        this.consumerTimings = consumerTimings;
+    }
 }
index cd25dc396671b9b5e857bad07f754bda512d7a7e..f9de67fa6314b83769b297c5de66f7c9c4a27eb2 100644 (file)
@@ -21,7 +21,9 @@ package org.apache.maven.archiva.repository.scanner;
 
 import java.io.File;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.archiva.repository.scanner.functors.TriggerScanCompletedClosure;
 import org.apache.commons.collections.Closure;
@@ -32,6 +34,7 @@ import org.apache.maven.archiva.common.utils.BaseFile;
 import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
 import org.apache.maven.archiva.consumers.InvalidRepositoryContentConsumer;
 import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
+import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
 import org.apache.maven.archiva.repository.scanner.functors.ConsumerProcessFileClosure;
 import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate;
 import org.apache.maven.archiva.repository.scanner.functors.TriggerBeginScanClosure;
@@ -68,6 +71,10 @@ public class RepositoryScannerInstance
 
     private ConsumerWantsFilePredicate consumerWantsFile;
 
+    private Map<String, Long> consumerTimings;
+
+    private Map<String, Long> consumerCounts;
+
     public RepositoryScannerInstance( ManagedRepositoryConfiguration repository,
                                       List<KnownRepositoryContentConsumer> knownConsumerList,
                                       List<InvalidRepositoryContentConsumer> invalidConsumerList )
@@ -76,7 +83,14 @@ public class RepositoryScannerInstance
         this.knownConsumers = knownConsumerList;
         this.invalidConsumers = invalidConsumerList;
 
+        consumerTimings = new HashMap<String,Long>();
+        consumerCounts = new HashMap<String,Long>();
+
         this.consumerProcessFile = new ConsumerProcessFileClosure();
+        consumerProcessFile.setExecuteOnEntireRepo( true );
+        consumerProcessFile.setConsumerTimings( consumerTimings );
+        consumerProcessFile.setConsumerCounts( consumerCounts );
+
         this.consumerWantsFile = new ConsumerWantsFilePredicate();
 
         stats = new RepositoryScanStatistics();
@@ -109,6 +123,16 @@ public class RepositoryScannerInstance
         return stats;
     }
 
+    public Map<String, Long> getConsumerTimings()
+    {
+        return consumerTimings;
+    }
+
+    public Map<String, Long> getConsumerCounts()
+    {
+        return consumerCounts;
+    }
+
     public void directoryWalkStarting( File basedir )
     {
         log.info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getLocation() );
@@ -123,20 +147,19 @@ public class RepositoryScannerInstance
 
         // consume files regardless - the predicate will check the timestamp
         BaseFile basefile = new BaseFile( repository.getLocation(), file );
-        
+
         // Timestamp finished points to the last successful scan, not this current one.
         if ( file.lastModified() >= changesSince )
         {
-            stats.increaseNewFileCount();             
+            stats.increaseNewFileCount();
         }
-        
+
         consumerProcessFile.setBasefile( basefile );
-        consumerProcessFile.setExecuteOnEntireRepo( true );
         consumerWantsFile.setBasefile( basefile );
-        
+
         Closure processIfWanted = IfClosure.getInstance( consumerWantsFile, consumerProcessFile );
         CollectionUtils.forAllDo( this.knownConsumers, processIfWanted );
-        
+
         if ( consumerWantsFile.getWantedFileCount() <= 0 )
         {
             // Nothing known processed this file.  It is invalid!
@@ -150,6 +173,9 @@ public class RepositoryScannerInstance
         
         CollectionUtils.forAllDo( knownConsumers, scanCompletedClosure );
         CollectionUtils.forAllDo( invalidConsumers, scanCompletedClosure );
+
+        stats.setConsumerTimings( consumerTimings );
+        stats.setConsumerCounts( consumerCounts );
         
         log.info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getLocation() );
         stats.triggerFinished();
index e1a49392606fcda352fe85df1567d3d8ec6827f1..ec39d952cb56e50c732afa6e8a36429698998dea 100644 (file)
@@ -25,6 +25,9 @@ import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * ConsumerProcessFileClosure 
  *
@@ -39,24 +42,43 @@ public class ConsumerProcessFileClosure
 
     private boolean executeOnEntireRepo;
 
+    private Map<String,Long> consumerTimings;
+    
+    private Map<String,Long> consumerCounts;
+
     public void execute( Object input )
     {
         if ( input instanceof RepositoryContentConsumer )
         {
             RepositoryContentConsumer consumer = (RepositoryContentConsumer) input;
 
+            String id = consumer.getId();
             try
             {
-                log.debug( "Sending to consumer: " + consumer.getId() );
+                log.debug( "Sending to consumer: " + id );
 
+                long startTime = System.currentTimeMillis();
                 consumer.processFile( basefile.getRelativePath(), executeOnEntireRepo );
+                long endTime = System.currentTimeMillis();
+
+                if ( consumerTimings != null )
+                {
+                    Long value = consumerTimings.get( id );
+                    consumerTimings.put( id, ( value != null ? value : 0 ) + endTime - startTime );
+                }
+
+                if ( consumerCounts != null )
+                {
+                    Long value = consumerCounts.get( id );
+                    consumerCounts.put( id, ( value != null ? value : 0 ) + 1 );
+                }
             }
             catch ( Exception e )
             {
                 /* Intentionally Catch all exceptions.
                  * So that the discoverer processing can continue.
                  */
-                log.error( "Consumer [" + consumer.getId() + "] had an error when processing file ["
+                log.error( "Consumer [" + id + "] had an error when processing file ["
                     + basefile.getAbsolutePath() + "]: " + e.getMessage(), e );
             }
         }
@@ -82,6 +104,16 @@ public class ConsumerProcessFileClosure
         this.executeOnEntireRepo = executeOnEntireRepo;
     }
 
+    public void setConsumerTimings( Map<String, Long> consumerTimings )
+    {
+        this.consumerTimings = consumerTimings;
+    }
+
+    public void setConsumerCounts( Map<String, Long> consumerCounts )
+    {
+        this.consumerCounts = consumerCounts;
+    }
+
     public Logger getLogger()
     {
         return log;
index c9a9fd8cb3e416b3a2b38d87da24e903266cfbce..e5fd5041d649391ce6ec0f9a4be9b9ab7aefb28e 100644 (file)
@@ -49,7 +49,7 @@
       <c:set var="queue" value="${queueEntry.value.queueSnapshot}"/>
       <tr>
         <td>${queueEntry.key}</td>
-        <td>${fn:length(queue)}</td>
+        <td align="right">${fn:length(queue)}</td>
       </tr>
     </c:forEach>
   </table>
     <c:forEach var="scan" items="${scanner.inProgressScans}">
       <tr>
         <td>${scan.repository.name} (${scan.repository.id})</td>
-        <td>${scan.stats.totalFileCount}</td>
-        <td>${scan.stats.newFileCount}</td>
+        <td align="right">${scan.stats.totalFileCount}</td>
+        <td align="right">${scan.stats.newFileCount}</td>
+      </tr>
+      <tr>
+        <td colspan="3">
+          <table>
+            <tr>
+              <th>Name</th>
+              <th>Total</th>
+              <th>Average</th>
+              <th>Invocations</th>
+            </tr>
+            <c:forEach var="entry" items="${scan.consumerTimings}">
+              <tr>
+                <c:set var="total" value="${scan.consumerCounts[entry.key]}"/>
+                <td>${entry.key}</td>
+                <td align="right">${entry.value}ms</td>
+                <td align="right"><fmt:formatNumber value="${entry.value / total}" pattern="#"/>ms</td>
+                <td align="right">${total}</td>
+              </tr>
+            </c:forEach>
+          </table>
+        </td>
       </tr>
     </c:forEach>
   </table>
     <c:forEach var="cacheEntry" items="${caches}">
       <tr>
         <td>${cacheEntry.key}</td>
-        <td>${cacheEntry.value.statistics.size}</td>
-        <td>${cacheEntry.value.statistics.cacheHits}</td>
-        <td>${cacheEntry.value.statistics.cacheMiss}</td>
-        <td><fmt:formatNumber value="${cacheEntry.value.statistics.cacheHitRate}" pattern="#%"/></td>
+        <td align="right">${cacheEntry.value.statistics.size}</td>
+        <td align="right">${cacheEntry.value.statistics.cacheHits}</td>
+        <td align="right">${cacheEntry.value.statistics.cacheMiss}</td>
+        <td align="right"><fmt:formatNumber value="${cacheEntry.value.statistics.cacheHitRate}" pattern="#%"/></td>
         <td><a href="javascript:alert('Not yet implemented')">Flush</a></td>
       </tr>
     </c:forEach>