You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sling.apache.org by ro...@apache.org on 2017/11/07 10:27:01 UTC

[sling-tooling-jenkins] 21/27: SLING-4494 - Performance: update performance test framework to allow comparison of results

This is an automated email from the ASF dual-hosted git repository.

rombert pushed a commit to annotated tag org.apache.sling.performance.base-0.0.2
in repository https://gitbox.apache.org/repos/asf/sling-tooling-jenkins.git

commit 7a5cbfc76de8817fe439e37aab0f587c50c628b5
Author: Radu Cotescu <ra...@apache.org>
AuthorDate: Thu Mar 12 16:39:36 2015 +0000

    SLING-4494 - Performance: update performance test framework to allow comparison of results
    
    * performance testing features which allow to not only log tests run time but also compare
    them and fail in case a certain threshold ratio is surpassed
    
    (applied patch sent by Vlad Băilescu - closes #69)
    
    git-svn-id: https://svn.apache.org/repos/asf/sling/trunk/performance/base@1666250 13f79535-47bb-0310-9956-ffa450edef68
---
 .../performance/FrameworkPerformanceMethod.java    |  17 +-
 .../sling/performance/PerformanceRecord.java       |  61 +++++
 .../sling/performance/PerformanceRunner.java       |  51 +++-
 .../org/apache/sling/performance/ReportLogger.java | 296 +++++++++++++++++++--
 .../performance/annotation/PerformanceTest.java    |   5 +-
 5 files changed, 392 insertions(+), 38 deletions(-)

diff --git a/src/main/java/org/apache/sling/performance/FrameworkPerformanceMethod.java b/src/main/java/org/apache/sling/performance/FrameworkPerformanceMethod.java
index 1e002f4..75e69af 100644
--- a/src/main/java/org/apache/sling/performance/FrameworkPerformanceMethod.java
+++ b/src/main/java/org/apache/sling/performance/FrameworkPerformanceMethod.java
@@ -38,15 +38,18 @@ class FrameworkPerformanceMethod extends FrameworkMethod {
     private Object target;
     private PerformanceSuiteState performanceSuiteState;
     private PerformanceRunner.ReportLevel reportLevel = PerformanceRunner.ReportLevel.ClassLevel;
+    private String referenceMethod = null;
     private String testCaseName = "";
     private String className;
 
     public FrameworkPerformanceMethod(Method method, Object target,
-            PerformanceSuiteState performanceSuiteState, PerformanceRunner.ReportLevel reportLevel) {
+                                      PerformanceSuiteState performanceSuiteState, PerformanceRunner.ReportLevel reportLevel,
+                                      String referenceMethod) {
         super(method);
         this.target = target;
         this.performanceSuiteState = performanceSuiteState;
         this.reportLevel = reportLevel;
+        this.referenceMethod = referenceMethod;
         if (target instanceof IdentifiableTestCase) {
             this.testCaseName = ((IdentifiableTestCase) target).testCaseName();
         }
@@ -109,6 +112,7 @@ class FrameworkPerformanceMethod extends FrameworkMethod {
         int runtime = performanceAnnotation.runtime();
         int warmupinvocations = performanceAnnotation.warmupinvocations();
         int runinvocations = performanceAnnotation.runinvocations();
+        double threshold = performanceAnnotation.threshold();
 
         DescriptiveStatistics statistics = new DescriptiveStatistics();
 
@@ -181,8 +185,13 @@ class FrameworkPerformanceMethod extends FrameworkMethod {
         }
 
         if (statistics.getN() > 0) {
-            ReportLogger.writeReport(this.performanceSuiteState.testSuiteName, testCaseName, className, getMethod().getName(),
-                    statistics, ReportLogger.ReportType.TXT, reportLevel);
+            if (referenceMethod == null) {
+                ReportLogger.writeReport(this.performanceSuiteState.testSuiteName, testCaseName, className, getMethod().getName(),
+                        statistics, ReportLogger.ReportType.TXT, reportLevel);
+            } else {
+                ReportLogger reportLogger = ReportLogger.getOrCreate(this.performanceSuiteState.testSuiteName, testCaseName, getMethod().getDeclaringClass().getName(), referenceMethod);
+                reportLogger.recordStatistics(getMethod().getName(), statistics, threshold);
+            }
         }
 
         // In case of a PerformanceSuite we need to run the methods annotated
@@ -345,6 +354,6 @@ class FrameworkPerformanceMethod extends FrameworkMethod {
         if (testCaseName == null || "".equals(testCaseName.trim())) { return super.getName(); }
         return String.format("%s  [%s.%s]", testCaseName, target.getClass().getSimpleName(),
                 getMethod().getName());
-}
+    }
 
 }
diff --git a/src/main/java/org/apache/sling/performance/PerformanceRecord.java b/src/main/java/org/apache/sling/performance/PerformanceRecord.java
new file mode 100644
index 0000000..c62640f
--- /dev/null
+++ b/src/main/java/org/apache/sling/performance/PerformanceRecord.java
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ ******************************************************************************/
+package org.apache.sling.performance;
+
+import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
+
+/**
+ * Wrapper for recorded performance statistics and defined threshold
+ */
+public class PerformanceRecord {
+
+    private final DescriptiveStatistics statistics;
+
+    private final Number threshold;
+
+    public PerformanceRecord(final DescriptiveStatistics statistics, final Number threshold) {
+        this.statistics = statistics;
+        this.threshold = threshold;
+    }
+
+    public DescriptiveStatistics getStatistics() {
+        return this.statistics;
+    }
+
+    public Number getThreshold() {
+        return this.threshold;
+    }
+
+    /**
+     * Checks internal statistics against <code>reference</code>. Current implementation looks at 50 percentile.
+     *
+     * @param reference Reference statistics
+     * @return An error string if threshold is exceeded, <code>null</code> if not
+     */
+    public String checkThreshold(DescriptiveStatistics reference) {
+        if (threshold == null || threshold.doubleValue() <= 0) {
+            return null;
+        }
+        double ratio = this.statistics.getPercentile(50) / reference.getPercentile(50);
+        if (ratio > threshold.doubleValue()) {
+            return String.format("Threshold exceeded! Expected <%6.2f, actual %6.2f", threshold.doubleValue(), ratio);
+        }
+        return null;
+    }
+}
diff --git a/src/main/java/org/apache/sling/performance/PerformanceRunner.java b/src/main/java/org/apache/sling/performance/PerformanceRunner.java
index bb646fe..21edf01 100644
--- a/src/main/java/org/apache/sling/performance/PerformanceRunner.java
+++ b/src/main/java/org/apache/sling/performance/PerformanceRunner.java
@@ -23,10 +23,13 @@ import org.apache.sling.performance.annotation.PerformanceTestFactory;
 import org.apache.sling.performance.annotation.PerformanceTestSuite;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.runner.notification.Failure;
+import org.junit.runner.notification.RunNotifier;
 import org.junit.runners.BlockJUnit4ClassRunner;
 import org.junit.runners.model.FrameworkMethod;
 import org.junit.runners.model.InitializationError;
-import org.junit.runners.model.TestClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.lang.annotation.Annotation;
 import java.lang.annotation.ElementType;
@@ -34,7 +37,6 @@ import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 import java.lang.reflect.Method;
-import java.lang.reflect.Modifier;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -44,15 +46,18 @@ import java.util.List;
 
 /**
  * The custom JUnit runner that collects the performance tests
- * 
+ *
  */
 
 
 
 public class PerformanceRunner extends BlockJUnit4ClassRunner {
+    private static final Logger logger = LoggerFactory.getLogger(ReportLogger.class);
+
     protected LinkedList<FrameworkMethod> tests = new LinkedList<FrameworkMethod>();
     private List<PerformanceSuiteState> suitesState = new ArrayList<PerformanceSuiteState>();
     public ReportLevel reportLevel = ReportLevel.ClassLevel;
+    public String referenceMethod = null;
 
     public static enum ReportLevel{
         ClassLevel,
@@ -63,6 +68,8 @@ public class PerformanceRunner extends BlockJUnit4ClassRunner {
     @Target(ElementType.TYPE)
     public @interface Parameters {
         public ReportLevel reportLevel() default ReportLevel.ClassLevel;
+        /** This is the name of the reference method used to compute statistics */
+        public String referenceMethod() default "";
     }
 
     public PerformanceRunner(Class<?> clazz) throws InitializationError {
@@ -72,6 +79,21 @@ public class PerformanceRunner extends BlockJUnit4ClassRunner {
         // by default set to class level for legacy tests compatibility
         if (clazz.getAnnotation(Parameters.class) != null){
             reportLevel = clazz.getAnnotation(Parameters.class).reportLevel();
+            referenceMethod = clazz.getAnnotation(Parameters.class).referenceMethod();
+            if ("".equals(referenceMethod)) {
+                referenceMethod = null;
+            } else {
+                boolean found = false;
+                for (Method method : clazz.getMethods()) {
+                    if (method.getName().equals(referenceMethod) && method.getParameterTypes().length == 0) {
+                        found = true;
+                        break;
+                    }
+                }
+                if (!found) {
+                    referenceMethod = null;
+                }
+            }
         }
 
         try {
@@ -81,6 +103,23 @@ public class PerformanceRunner extends BlockJUnit4ClassRunner {
         }
     }
 
+    @Override
+    public void run(RunNotifier notifier) {
+        super.run(notifier);
+        try {
+            ReportLogger.writeAllResults();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+        try {
+            for (Failure failure : ReportLogger.checkAllThresholds()) {
+                notifier.fireTestFailure(failure);
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
     /**
      * Compute the tests that will be run
      *
@@ -219,7 +258,7 @@ public class PerformanceRunner extends BlockJUnit4ClassRunner {
 
                 for (Method method : testMethods) {
                     FrameworkPerformanceMethod performaceTestMethod =
-                            new FrameworkPerformanceMethod(method, testObject, current, reportLevel);
+                            new FrameworkPerformanceMethod(method, testObject, current, reportLevel, referenceMethod);
                     tests.add(performaceTestMethod);
                 }
 
@@ -237,7 +276,7 @@ public class PerformanceRunner extends BlockJUnit4ClassRunner {
         for (FrameworkMethod method : getTestClass().getAnnotatedMethods(PerformanceTest.class)) {
             Object targetObject = getTestClass().getJavaClass().newInstance();
             FrameworkPerformanceMethod performanceTestMethod = new FrameworkPerformanceMethod(
-                    method.getMethod(), targetObject, current, reportLevel);
+                    method.getMethod(), targetObject, current, reportLevel, referenceMethod);
             tests.add(performanceTestMethod);
         }
 
@@ -245,7 +284,7 @@ public class PerformanceRunner extends BlockJUnit4ClassRunner {
     }
 
 
-        /**
+    /**
      * Retrieve specific method from test class
      *
      * @param testClass
diff --git a/src/main/java/org/apache/sling/performance/ReportLogger.java b/src/main/java/org/apache/sling/performance/ReportLogger.java
index 64dccd9..d93109b 100644
--- a/src/main/java/org/apache/sling/performance/ReportLogger.java
+++ b/src/main/java/org/apache/sling/performance/ReportLogger.java
@@ -5,10 +5,18 @@ import java.io.IOException;
 import java.io.PrintWriter;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
 
+import org.apache.commons.collections.map.MultiKeyMap;
 import org.apache.commons.io.output.FileWriterWithEncoding;
 import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
+import org.junit.runner.Description;
+import org.junit.runner.notification.Failure;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -16,13 +24,77 @@ public class ReportLogger {
 
     private static boolean reportFolderLogged = false;
     private static final Logger logger = LoggerFactory.getLogger(ReportLogger.class);
-    
+
     public static final String REPORTS_DIR = "performance-reports";
 
+    /** Multi map of all ReportLogger instances created by getOrCreate(..) */
+    private static final MultiKeyMap reportLoggers = new MultiKeyMap();
+
     public enum ReportType {
         TXT
     }
 
+    /** Name of test suite */
+    private final String testSuiteName;
+
+    /** Name of test case */
+    private final String testCaseName;
+
+    /** Class name */
+    private final String className;
+
+    /** Name of test method to which all other tests will be compared */
+    private final String referenceMethod;
+
+    /** Recorded stats for ran tests */
+    private final Map<String, PerformanceRecord> records = new LinkedHashMap<String, PerformanceRecord>();
+
+    /**
+     * Do not allow instances to be created directly, use the getOrCreate(..) static method
+     */
+    private ReportLogger() {
+        this.testSuiteName = null;
+        this.testCaseName = null;
+        this.className = null;
+        this.referenceMethod = null;
+    }
+
+    /**
+     * Create a new ReportLogger, will be called by getOrCreate(..)
+     *
+     * @param testSuiteName
+     * @param testCaseName
+     * @param className
+     * @param referenceMethod
+     */
+    private ReportLogger(final String testSuiteName, final String testCaseName, final String className,
+                         final String referenceMethod) {
+        this.testSuiteName = testSuiteName;
+        this.testCaseName = testCaseName;
+        this.className = className;
+        this.referenceMethod = referenceMethod;
+    }
+
+    /**
+     * Factory method for ReportRecorder. Will return an existing ReportLogger for given parameters or create a new
+     * instance and register it internally.
+     *
+     * @param testSuiteName
+     * @param testCaseName
+     * @param className
+     * @param referenceMethod
+     * @return
+     */
+    public static ReportLogger getOrCreate(final String testSuiteName, final String testCaseName,
+                                           final String className, final String referenceMethod) {
+        Object reportLogger = reportLoggers.get(testSuiteName, testCaseName, className, referenceMethod);
+        if (reportLogger == null) {
+            reportLogger = new ReportLogger(testSuiteName, testCaseName, className, referenceMethod);
+            reportLoggers.put(testSuiteName, testCaseName, className, referenceMethod, reportLogger);
+        }
+        return (ReportLogger)reportLogger;
+    }
+
     /**
      * Method the writes the performance report after a test is run
      * @param testSuiteName
@@ -35,7 +107,7 @@ public class ReportLogger {
      * @throws Exception
      */
     public static void writeReport(String testSuiteName, String testCaseName, String className, String methodName,
-            DescriptiveStatistics statistics, ReportType reportType, PerformanceRunner.ReportLevel reportLevel) throws Exception {
+                                   DescriptiveStatistics statistics, ReportType reportType, PerformanceRunner.ReportLevel reportLevel) throws Exception {
         switch (reportType) {
             case TXT:
                 writeReportTxt(testSuiteName, testCaseName, className, methodName, statistics, reportLevel);
@@ -57,11 +129,64 @@ public class ReportLogger {
      * @throws Exception
      */
     public static void writeReportTxt(String testSuiteName, String testCaseName, String className, String methodName,
-            DescriptiveStatistics statistics, PerformanceRunner.ReportLevel reportLevel) throws Exception {
+                                      DescriptiveStatistics statistics, PerformanceRunner.ReportLevel reportLevel) throws Exception {
+        writeReportTxt(testSuiteName,
+                testCaseName,
+                className,
+                methodName,
+                statistics.getMin(),
+                statistics.getPercentile(10),
+                statistics.getPercentile(50),
+                statistics.getPercentile(90),
+                statistics.getMax(),
+                reportLevel);
+    }
 
+    /**
+     * Method that writes the performance report
+     *
+     * @param testSuiteName
+     * @param testCaseName
+     * @param className
+     * @param methodName
+     * @param min
+     * @param percentile10
+     * @param percentile50
+     * @param percentile90
+     * @param max
+     * @param reportLevel
+     * @throws Exception
+     */
+    public static void writeReportTxt(String testSuiteName, String testCaseName, String className, String methodName,
+                                      double min, double percentile10, double percentile50, double percentile90, double max,
+                                      PerformanceRunner.ReportLevel reportLevel) throws Exception {
+        writeReportTxt(testSuiteName, testCaseName, className, methodName,
+                min, percentile10, percentile50, percentile90, max,
+                reportLevel, false);
+    }
+
+    /**
+     * Method that writes the performance report
+     *
+     * @param testSuiteName
+     * @param testCaseName
+     * @param className
+     * @param methodName
+     * @param min
+     * @param percentile10
+     * @param percentile50
+     * @param percentile90
+     * @param max
+     * @param reportLevel
+     * @param showDecimals
+     * @throws Exception
+     */
+    public static void writeReportTxt(String testSuiteName, String testCaseName, String className, String methodName,
+                                      double min, double percentile10, double percentile50, double percentile90, double max,
+                                      PerformanceRunner.ReportLevel reportLevel, boolean showDecimals) throws Exception {
         File reportDir = new File("target/" + REPORTS_DIR);
         if (!reportDir.exists() && !reportDir.mkdir()) {
-                throw new IOException("Unable to create " + REPORTS_DIR + " directory");
+            throw new IOException("Unable to create " + REPORTS_DIR + " directory");
         }
 
         // need this in the case a user wants to set the suite name from the
@@ -76,10 +201,11 @@ public class ReportLogger {
 
         if (reportLevel.equals(PerformanceRunner.ReportLevel.ClassLevel)) {
             String resultFileName = className;
-            writeReportClassLevel(resultFileName, testSuiteName, statistics);
+            writeReportClassLevel(resultFileName, testSuiteName, min, percentile10, percentile50, percentile90, max);
         } else if (reportLevel.equals(PerformanceRunner.ReportLevel.MethodLevel)) {
             String resultFileName = className + "." + methodName;
-            writeReportMethodLevel(resultFileName, testSuiteName, testCaseName, className, methodName, statistics);
+            writeReportMethodLevel(resultFileName, testSuiteName, testCaseName, className, methodName,
+                    min, percentile10, percentile50, percentile90, max, showDecimals);
         }
     }
 
@@ -88,10 +214,15 @@ public class ReportLogger {
      *
      * @param resultFileName the name of the result file (without extension)
      * @param testSuiteName the name of the test suite name
-     * @param statistics the statistics object used to compute different medians
+     * @param min
+     * @param percentile10
+     * @param percentile50
+     * @param percentile90
+     * @param max
+
      */
     private static void writeReportClassLevel(String resultFileName, String testSuiteName,
-            DescriptiveStatistics statistics) throws IOException {
+                                              double min, double percentile10, double percentile50, double percentile90, double max) throws IOException {
 
         File report = getReportFile(resultFileName, ".txt");
         boolean needsPrefix = !report.exists();
@@ -105,16 +236,16 @@ public class ReportLogger {
             writer.format(
                     "%-52.52s  %6.0f  %6.0f  %6.0f  %6.0f  %6.0f%n",
                     testSuiteName,
-                    statistics.getMin(),
-                    statistics.getPercentile(10.0),
-                    statistics.getPercentile(50.0),
-                    statistics.getPercentile(90.0),
-                    statistics.getMax());
+                    min,
+                    percentile10,
+                    percentile50,
+                    percentile90,
+                    max);
         } finally {
             writer.close();
         }
     }
-    
+
     /**
      * Write report for method level tests
      *
@@ -123,10 +254,17 @@ public class ReportLogger {
      * @param testCaseName
      * @param className
      * @param methodName
-     * @param statistics the statistics object used to compute different medians
+     * @param min
+     * @param percentile10
+     * @param percentile50
+     * @param percentile90
+     * @param max
+
      */
-    private static void writeReportMethodLevel(String resultFileName, String testSuiteName, String testCaseName, String className,
-            String methodName, DescriptiveStatistics statistics) throws IOException {
+    private static void writeReportMethodLevel(String resultFileName, String testSuiteName,
+                                               String testCaseName, String className, String methodName,
+                                               double min, double percentile10, double percentile50, double percentile90, double max,
+                                               boolean showDecimals) throws IOException {
         File report = getReportFile(resultFileName, ".txt");
 
         boolean needsPrefix = !report.exists();
@@ -143,20 +281,22 @@ public class ReportLogger {
             }
 
             writer.format(
-                    "%-40.40s|%-120.120s|%-80.80s|%-40.40s|%-20.20s|%7.0f|%9.0f|%9.0f|%9.0f|%9.0f%n",
+                    showDecimals ?
+                            "%-40.40s|%-120.120s|%-80.80s|%-40.40s|%-20.20s|%7.2f|%9.2f|%9.2f|%9.2f|%9.2f%n":
+                            "%-40.40s|%-120.120s|%-80.80s|%-40.40s|%-20.20s|%7.0f|%9.0f|%9.0f|%9.0f|%9.0f%n",
                     testSuiteName,
                     (testCaseName.length() < 120) ? (testCaseName) : (testCaseName.substring(0, 115) + "[...]"),
                     className,
                     methodName,
                     getDate(),
-                    statistics.getMin(),
-                    statistics.getPercentile(10.0),
-                    statistics.getPercentile(50.0),
-                    statistics.getPercentile(90.0),
-                    statistics.getMax());
-            } finally {
-                writer.close();
-            }
+                    min,
+                    percentile10,
+                    percentile50,
+                    percentile90,
+                    max);
+        } finally {
+            writer.close();
+        }
     }
 
 
@@ -180,4 +320,108 @@ public class ReportLogger {
         return new File(folder, filename);
     }
 
+    /**
+     * Write results from all registered loggers
+     *
+     * @throws Exception
+     */
+    public static void writeAllResults() throws Exception {
+        for (Object reportLogger : reportLoggers.values()) {
+            ((ReportLogger)reportLogger).writeResults();
+        }
+    }
+
+    /**
+     * Check all thresholds for all records in all registered loggers
+     *
+     * @return
+     */
+    public static List<Failure> checkAllThresholds() throws ClassNotFoundException {
+        List<Failure> failures = new ArrayList<Failure>();
+        for (Object reportLogger : reportLoggers.values()) {
+            failures.addAll(((ReportLogger) reportLogger).checkThresholds());
+        }
+        return failures;
+    }
+
+    /**
+     * Record statistics for given method
+     *
+     * @param methodName
+     * @param statistics
+     */
+    public void recordStatistics(final String methodName, final DescriptiveStatistics statistics, final double threshold) {
+        records.put(methodName, new PerformanceRecord(statistics, threshold));
+    }
+
+    /**
+     * Write all records to file in TXT format
+     *
+     * @throws Exception
+     */
+    public void writeResults() throws Exception {
+        PerformanceRecord referenceRecord = records.get(referenceMethod);
+        for (String methodName : records.keySet()) {
+            DescriptiveStatistics statistics = records.get(methodName).getStatistics();
+            double min = statistics.getMin();
+            double percentile10 = statistics.getPercentile(10);
+            double percentile50 = statistics.getPercentile(50);
+            double percentile90 = statistics.getPercentile(90);
+            double max = statistics.getMax();
+            boolean showDecimals = false;
+            if (referenceRecord != null && !referenceMethod.equals(methodName)) {
+                DescriptiveStatistics referenceStatistics = referenceRecord.getStatistics();
+                double ref = referenceStatistics.getMin();
+                min = ref == 0 ? Double.POSITIVE_INFINITY : min/ref;
+
+                ref = referenceStatistics.getPercentile(10);
+                percentile10 = ref == 0 ? Double.POSITIVE_INFINITY : percentile10/ref;
+
+                ref = referenceStatistics.getPercentile(50);
+                percentile50 = ref == 0 ? Double.POSITIVE_INFINITY : percentile50/ref;
+
+                ref = referenceStatistics.getPercentile(90);
+                percentile90 = ref == 0 ? Double.POSITIVE_INFINITY : percentile90/ref;
+
+                ref = referenceStatistics.getMax();
+                max = ref == 0 ? Double.POSITIVE_INFINITY : max /referenceStatistics.getMax();
+
+                showDecimals = true;
+            }
+            ReportLogger.writeReportTxt(testSuiteName,
+                    testCaseName,
+                    Class.forName(className).getSimpleName(),
+                    methodName,
+                    min,
+                    percentile10,
+                    percentile50,
+                    percentile90,
+                    max,
+                    PerformanceRunner.ReportLevel.MethodLevel,
+                    showDecimals);
+        }
+    }
+
+    /**
+     * Test if any of the <link>PerformanceRecord</link> exceeds their threshold against the reference
+     *
+     * @return
+     */
+    public List<Failure> checkThresholds() throws ClassNotFoundException {
+        PerformanceRecord referenceRecord = records.get(referenceMethod);
+        if (referenceRecord == null) {
+            return Collections.EMPTY_LIST;
+        }
+        DescriptiveStatistics referenceStatistics = referenceRecord.getStatistics();
+        List<Failure> failures = new ArrayList<Failure>();
+        for (String methodName : records.keySet()) {
+            PerformanceRecord performanceRecord = records.get(methodName);
+            String result = performanceRecord.checkThreshold(referenceStatistics);
+            if (result != null) {
+                failures.add(new Failure(Description.createTestDescription(Class.forName(className), methodName),
+                        new Exception(result)));
+            }
+        }
+        return failures;
+    }
 }
\ No newline at end of file
diff --git a/src/main/java/org/apache/sling/performance/annotation/PerformanceTest.java b/src/main/java/org/apache/sling/performance/annotation/PerformanceTest.java
index 3810234..1fec58c 100644
--- a/src/main/java/org/apache/sling/performance/annotation/PerformanceTest.java
+++ b/src/main/java/org/apache/sling/performance/annotation/PerformanceTest.java
@@ -42,6 +42,7 @@ public @interface PerformanceTest {
 	// set the number of invocations to run
 	// in the warm up phase
 	int warmupinvocations() default 0;
-	
-	
+
+	// set the performance threshold
+	double threshold() default 0;
 }

-- 
To stop receiving notification emails like this one, please contact
"commits@sling.apache.org" <co...@sling.apache.org>.