package com.carrotsearch.junitbenchmarks;
import org.junit.Rule;
import org.junit.rules.MethodRule;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.Statement;
/**
* A benchmark rule (causes tests to be repeated and measured). Benchmark rule should be
* placed in the test class as a field, with annotation. Example:
*
* <pre>
* {@link Rule}
* public {@link MethodRule} runBenchmarks = new BenchmarkRule();
* </pre>
*/
public final class BenchmarkRule implements MethodRule
{
private final IResultsConsumer [] consumers;
private BenchmarkStatement currentStatement;
/**
* Creates a benchmark rule with the default sink for benchmark results (the default
* sink is taken from global properties).
*/
public BenchmarkRule()
{
this(BenchmarkOptionsSystemProperties.getDefaultConsumers());
}
/**
* Creates a benchmark rule with a given sink for benchmark results.
*/
public BenchmarkRule(IResultsConsumer... consumers)
{
if (consumers == null || consumers.length == 0)
throw new IllegalArgumentException("There needs to be at least one consumer.");
this.consumers = consumers;
}
/**
* Apply benchmarking to the given method and target.
*/
public Statement apply(Statement base, FrameworkMethod method, Object target)
{
currentStatement = new BenchmarkStatement(base, method, target, consumers);
return currentStatement;
}
/**
* Reset start time for the current run. Use this to strip off setup overhead.
*/
public void reset()
{
currentStatement.reset();
}
}