package org.infinispan.stress;
import static java.lang.Math.sqrt;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import org.infinispan.Cache;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.eviction.EvictionStrategy;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.test.SingleCacheManagerTest;
import org.infinispan.test.fwk.TestCacheManagerFactory;
import org.infinispan.util.logging.Log;
import org.infinispan.util.logging.LogFactory;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import com.github.benmanes.caffeine.cache.Caffeine;
/**
* Stress test different maps for container implementations
*
* @author Manik Surtani
* @author Dan Berindei <dberinde@redhat.com>
* @since 4.0
*/
@Test(testName = "stress.MapStressTest", groups = "profiling")
public class MapStressTest extends SingleCacheManagerTest {
private static Log log = LogFactory.getLog(MapStressTest.class);
static final float MAP_LOAD_FACTOR = 0.75f;
static final int LOOP_FACTOR = 10;
static final long RUNNING_TIME = Integer.getInteger("time", 30) * 1000;
final int CAPACITY = Integer.getInteger("size", 100000);
private static final Random RANDOM = new Random(12345);
private volatile CountDownLatch latch;
private List<String> keys = new ArrayList<String>();
public MapStressTest() {
log.tracef("MapStressTest configuration: capacity %d, test running time %d seconds\n",
CAPACITY, RUNNING_TIME / 1000);
}
private void generateKeyList(int numKeys) {
// without this we keep getting OutOfMemoryErrors
keys = null;
keys = new ArrayList<String>(numKeys * LOOP_FACTOR);
for (int i = 0; i < numKeys * LOOP_FACTOR; i++) {
keys.add("key" + nextIntGaussian(numKeys));
}
}
private int nextIntGaussian(int numKeys) {
double gaussian = RANDOM.nextGaussian();
if (gaussian < -3 || gaussian > 3)
return nextIntGaussian(numKeys);
return (int) Math.abs((gaussian + 3) * numKeys / 6);
}
private Map<String, Integer> synchronizedLinkedHashMap(final int capacity, float loadFactor) {
return Collections.synchronizedMap(new LinkedHashMap<String, Integer>(capacity, loadFactor, true) {
@Override
protected boolean removeEldestEntry(Entry<String, Integer> eldest) {
return size() > capacity;
}
});
}
private Cache<String, Integer> configureAndBuildCache(int capacity) {
ConfigurationBuilder config = new ConfigurationBuilder();
config
.eviction().maxEntries(capacity).strategy(EvictionStrategy.LRU)
.expiration().wakeUpInterval(5000L).maxIdle(120000L);
String cacheName = "cache" + capacity;
cacheManager.defineConfiguration(cacheName, config.build());
return cacheManager.getCache(cacheName);
}
@DataProvider(name = "readWriteRemove")
public Object[][] independentReadWriteRemoveParams() {
return new Object[][]{
new Object[]{CAPACITY, 3 * CAPACITY, 32, 90, 9, 3},
new Object[]{CAPACITY, 3 * CAPACITY, 32, 9, 1, 1},
};
}
@DataProvider(name = "readOnly")
public Object[][] readOnly() {
return new Object[][]{
new Object[]{CAPACITY, CAPACITY * 3 / 2, 32, 100},
new Object[]{CAPACITY, CAPACITY * 3 / 2, 32, 10},
};
}
@DataProvider(name = "readWriteRatio")
public Object[][] readWriteRatioParams() {
return new Object[][]{
new Object[]{CAPACITY, 3 * CAPACITY, 32, 100, 9},
new Object[]{CAPACITY, 3 * CAPACITY, 32, 10, 9},
};
}
@DataProvider(name = "writeOnMiss")
public Object[][] writeOnMissParams() {
return new Object[][]{
new Object[]{CAPACITY, 3 * CAPACITY, 32, 100},
new Object[]{CAPACITY, 3 * CAPACITY, 32, 10},
};
}
private Map<String, Map<String, Integer>> createMaps(int capacity, int numKeys, int concurrency) {
Map<String, Map<String, Integer>> maps = new TreeMap<String, Map<String, Integer>>();
com.github.benmanes.caffeine.cache.Cache<String, Integer> caffeineCache =
Caffeine.newBuilder().maximumSize(capacity).build();
maps.put("Caffeine", caffeineCache.asMap());
// CHM doesn't have eviction, so we size it to the capacity to allow for dynamic resize
maps.put("CHM", new ConcurrentHashMap<String, Integer>(capacity, MAP_LOAD_FACTOR, concurrency));
maps.put("SLHM", synchronizedLinkedHashMap(capacity, MAP_LOAD_FACTOR));
maps.put("CACHE", configureAndBuildCache(capacity));
return maps;
}
@Test(dataProvider = "readWriteRemove")
public void testReadWriteRemove(int capacity, int numKeys, int concurrency, int readerThreads, int writerThreads, int removerThreads) throws Exception {
System.out.printf("Testing independent read/write/remove performance with capacity %d, keys %d, concurrency level %d, readers %d, writers %d, removers %d\n",
capacity, numKeys, concurrency, readerThreads, writerThreads, removerThreads);
generateKeyList(numKeys);
Map<String, Map<String, Integer>> maps = createMaps(capacity, numKeys, concurrency);
for (Entry<String, Map<String, Integer>> e : maps.entrySet()) {
mapTestReadWriteRemove(e.getKey(), e.getValue(), numKeys, readerThreads, writerThreads, removerThreads);
e.setValue(null);
}
}
private void mapTestReadWriteRemove(String name, Map<String, Integer> map, int numKeys, int readerThreads, int writerThreads, int removerThreads) throws Exception {
// warm up for 1 second
runMapTestReadWriteRemove(map, readerThreads, writerThreads, removerThreads, 1000);
// real test
TotalStats perf = runMapTestReadWriteRemove(map, readerThreads, writerThreads, removerThreads, RUNNING_TIME);
System.out.printf("Container %-12s ", name);
System.out.printf("Ops/s %10.2f ", perf.getTotalOpsPerSec());
System.out.printf("Gets/s %10.2f ", perf.getOpsPerSec("GET"));
System.out.printf("Puts/s %10.2f ", perf.getOpsPerSec("PUT"));
System.out.printf("Removes/s %10.2f ", perf.getOpsPerSec("REMOVE"));
System.out.printf("HitRatio %10.2f ", perf.getTotalHitRatio() * 100);
System.out.printf("Size %10d ", map.size());
double stdDev = computeStdDev(map, numKeys);
System.out.printf("StdDev %10.2f\n", stdDev);
}
private TotalStats runMapTestReadWriteRemove(final Map<String, Integer> map, int numReaders, int numWriters,
int numRemovers, final long runningTimeout) throws Exception {
latch = new CountDownLatch(1);
final TotalStats perf = new TotalStats();
List<Thread> threads = new LinkedList<Thread>();
for (int i = 0; i < numReaders; i++) {
Thread reader = new WorkerThread(runningTimeout, perf, readOperation(map));
threads.add(reader);
}
for (int i = 0; i < numWriters; i++) {
Thread writer = new WorkerThread(runningTimeout, perf, writeOperation(map));
threads.add(writer);
}
for (int i = 0; i < numRemovers; i++) {
Thread remover = new WorkerThread(runningTimeout, perf, removeOperation(map));
threads.add(remover);
}
for (Thread t : threads)
t.start();
latch.countDown();
for (Thread t : threads) {
t.join();
}
return perf;
}
@Test(dataProvider = "readOnly")
public void testReadOnly(int capacity, int numKeys, int concurrency, int threads) throws Exception {
System.out.printf("Testing read only performance with capacity %d, keys %d, concurrency level %d, threads %d\n",
capacity, numKeys, concurrency, threads);
generateKeyList(numKeys);
Map<String, Map<String, Integer>> maps = createMaps(capacity, numKeys, concurrency);
for (Entry<String, Map<String, Integer>> e : maps.entrySet()) {
mapTestReadOnly(e.getKey(), e.getValue(), numKeys, threads);
e.setValue(null);
}
}
private void mapTestReadOnly(String name, Map<String, Integer> map, int numKeys, int threads) throws Exception {
// warm up for 1 second - by doing reads and writes
runMapTestMixedReadWrite(map, threads, 9, 1000);
// real test
TotalStats perf = runMapTestReadOnly(map, threads, RUNNING_TIME);
System.out.printf("Container %-12s ", name);
System.out.printf("Ops/s %10.2f ", perf.getTotalOpsPerSec());
System.out.printf("Gets/s %10.2f ", perf.getTotalOpsPerSec());
System.out.printf("HitRatio %10.2f ", perf.getTotalHitRatio() * 100);
System.out.printf("Size %10d ", map.size());
double stdDev = computeStdDev(map, numKeys);
System.out.printf("stdDev %10.2f\n", stdDev);
}
private TotalStats runMapTestReadOnly(final Map<String, Integer> map, int numThreads,
final long runningTimeout) throws Exception {
latch = new CountDownLatch(1);
final TotalStats perf = new TotalStats();
List<Thread> threads = new LinkedList<Thread>();
for (int i = 0; i < numThreads; i++) {
Thread thread = new WorkerThread(runningTimeout, perf, readOperation(map));
threads.add(thread);
}
for (Thread t : threads)
t.start();
latch.countDown();
for (Thread t : threads)
t.join();
return perf;
}
@Test(dataProvider = "readWriteRatio")
public void testMixedReadWrite(int capacity, int numKeys, int concurrency, int threads, int readToWriteRatio) throws Exception {
System.out.printf("Testing mixed read/write performance with capacity %d, keys %d, concurrency level %d, threads %d, read:write ratio %d:1\n",
capacity, numKeys, concurrency, threads, readToWriteRatio);
generateKeyList(numKeys);
Map<String, Map<String, Integer>> maps = createMaps(capacity, numKeys, concurrency);
for (Entry<String, Map<String, Integer>> e : maps.entrySet()) {
mapTestMixedReadWrite(e.getKey(), e.getValue(), numKeys, threads, readToWriteRatio);
e.setValue(null);
}
}
private void mapTestMixedReadWrite(String name, Map<String, Integer> map, int numKeys, int threads, int readToWriteRatio) throws Exception {
// warm up for 1 second
runMapTestMixedReadWrite(map, threads, readToWriteRatio, 1000);
// real test
TotalStats perf = runMapTestMixedReadWrite(map, threads, readToWriteRatio, RUNNING_TIME);
System.out.printf("Container %-12s ", name);
System.out.printf("Ops/s %10.2f ", perf.getTotalOpsPerSec());
System.out.printf("Gets/s %10.2f ", perf.getTotalOpsPerSec() * readToWriteRatio / (readToWriteRatio + 1));
System.out.printf("Puts/s %10.2f ", perf.getTotalOpsPerSec() * 1 / (readToWriteRatio + 1));
System.out.printf("HitRatio %10.2f ", perf.getTotalHitRatio() * 100);
System.out.printf("Size %10d ", map.size());
double stdDev = computeStdDev(map, numKeys);
System.out.printf("stdDev %10.2f\n", stdDev);
}
private TotalStats runMapTestMixedReadWrite(final Map<String, Integer> map, int numThreads,
int readToWriteRatio, final long runningTimeout) throws Exception {
latch = new CountDownLatch(1);
final TotalStats perf = new TotalStats();
List<Thread> threads = new LinkedList<Thread>();
for (int i = 0; i < numThreads; i++) {
Thread thread = new WorkerThread(runningTimeout, perf, readWriteOperation(map, readToWriteRatio));
threads.add(thread);
}
for (Thread t : threads)
t.start();
latch.countDown();
for (Thread t : threads)
t.join();
return perf;
}
@Test(dataProvider = "writeOnMiss")
public void testWriteOnMiss(int capacity, int numKeys, int concurrency, int threads) throws Exception {
System.out.printf("Testing write on miss performance with capacity %d, keys %d, concurrency level %d, threads %d\n",
capacity, numKeys, concurrency, threads);
generateKeyList(numKeys);
Map<String, Map<String, Integer>> maps = createMaps(capacity, numKeys, concurrency);
for (Entry<String, Map<String, Integer>> e : maps.entrySet()) {
mapTestWriteOnMiss(e.getKey(), e.getValue(), numKeys, threads);
e.setValue(null);
}
}
private void mapTestWriteOnMiss(String name, Map<String, Integer> map, int numKeys, int threads) throws Exception {
// warm up for 1 second
runMapTestWriteOnMiss(map, threads, 1000);
// real test
TotalStats perf = runMapTestWriteOnMiss(map, threads, RUNNING_TIME);
System.out.printf("Container %-12s ", name);
System.out.printf("Ops/s %10.2f ", perf.getTotalOpsPerSec());
System.out.printf("HitRatio %10.2f ", perf.getTotalHitRatio() * 100);
System.out.printf("Size %10d ", map.size());
double stdDev = computeStdDev(map, numKeys);
System.out.printf("stdDev %10.2f\n", stdDev);
}
private TotalStats runMapTestWriteOnMiss(final Map<String, Integer> map, int numThreads,
final long runningTimeout) throws Exception {
latch = new CountDownLatch(1);
final TotalStats perf = new TotalStats();
List<Thread> threads = new LinkedList<Thread>();
for (int i = 0; i < numThreads; i++) {
Thread thread = new WorkerThread(runningTimeout, perf, writeOnMissOperation(map));
threads.add(thread);
}
for (Thread t : threads)
t.start();
latch.countDown();
for (Thread t : threads)
t.join();
return perf;
}
private double computeStdDev(Map<String, Integer> map, int numKeys) {
// The keys closest to the mean are suposed to be accessed more often
// So we score each map by the standard deviation of the keys in the map
// at the end of the test
double variance = 0;
for (String key : map.keySet()) {
double value = Integer.parseInt(key.substring(3));
variance += (value - numKeys / 2) * (value - numKeys / 2);
}
return sqrt(variance / map.size());
}
private void waitForStart() {
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private Operation<String, Integer> readOperation(Map<String, Integer> map) {
return new Operation<String, Integer>(map, "GET") {
@Override
public boolean call(String key, long run) {
return map.get(key) != null;
}
};
}
private Operation<String, Integer> writeOperation(Map<String, Integer> map) {
return new Operation<String, Integer>(map, "PUT") {
@Override
public boolean call(String key, long run) {
return map.put(key, (int) run) != null;
}
};
}
private Operation<String, Integer> removeOperation(Map<String, Integer> map) {
return new Operation<String, Integer>(map, "REMOVE") {
@Override
public boolean call(String key, long run) {
return map.remove(key) != null;
}
};
}
private Operation<String, Integer> readWriteOperation(final Map<String, Integer> map, final int readToWriteRatio) {
return new Operation<String, Integer>(map, "READ/WRITE:" + readToWriteRatio + "/1") {
@Override
public boolean call(String key, long run) {
if (run % (readToWriteRatio + 1) == 0) {
return map.put(key, (int)run) != null;
} else {
return map.get(key) != null;
}
}
};
}
private Operation<String, Integer> writeOnMissOperation(final Map<String, Integer> map) {
return new Operation<String, Integer>(map, "PUTMISSING") {
@Override
public boolean call(String key, long run) {
boolean hit = map.get(key) != null;
if (!hit) {
map.put(key, (int)run);
}
return hit;
}
};
}
@Override
protected EmbeddedCacheManager createCacheManager() throws Exception {
return TestCacheManagerFactory.createCacheManager();
}
private class WorkerThread extends Thread {
private final long runningTimeout;
private final TotalStats perf;
private Operation<String, Integer> op;
public WorkerThread(long runningTimeout, TotalStats perf, Operation<String, Integer> op) {
this.runningTimeout = runningTimeout;
this.perf = perf;
this.op = op;
}
@Override
public void run() {
long startMilis = System.currentTimeMillis();
long endMillis = startMilis + runningTimeout;
waitForStart();
int keyIndex = RANDOM.nextInt(keys.size());
long runs = 0;
long missCount = 0;
while ((runs & 0x3FFF) != 0 || System.currentTimeMillis() < endMillis) {
boolean hit = op.call(keys.get(keyIndex), runs);
if (!hit) missCount++;
keyIndex++;
runs++;
if (keyIndex >= keys.size()) {
keyIndex = 0;
}
}
perf.addStats(op.getName(), runs, System.currentTimeMillis() - startMilis, missCount);
}
}
private static abstract class Operation<K, V> {
protected final Map<K, V> map;
protected final String name;
public Operation(Map<K, V> map, String name) {
this.map = map;
this.name = name;
}
/**
* @return Return true for a hit, false for a miss.
*/
public abstract boolean call(K key, long run);
public String getName() {
return name;
}
}
private static class TotalStats {
private ConcurrentHashMap<String, OpStats> statsMap = new ConcurrentHashMap<String, OpStats>();
public void addStats(String opName, long opCount, long runningTime, long missCount) {
OpStats s = new OpStats(opName, opCount, runningTime, missCount);
OpStats old = statsMap.putIfAbsent(opName, s);
boolean replaced = old == null;
while (!replaced) {
old = statsMap.get(opName);
s = new OpStats(old, opCount, runningTime, missCount);
replaced = statsMap.replace(opName, old, s);
}
}
public double getOpsPerSec(String opName) {
OpStats s = statsMap.get(opName);
if (s == null) return 0;
return s.opCount * 1000. / s.runningTime * s.threadCount;
}
public double getTotalOpsPerSec() {
long totalOpCount = 0;
long totalRunningTime = 0;
long totalThreadCount = 0;
for (Map.Entry<String, OpStats> e : statsMap.entrySet()) {
OpStats s = e.getValue();
totalOpCount += s.opCount;
totalRunningTime += s.runningTime;
totalThreadCount += s.threadCount;
}
return totalOpCount * 1000. / totalRunningTime * totalThreadCount;
}
public double getHitRatio(String opName) {
OpStats s = statsMap.get(opName);
if (s == null) return 0;
return 1 - 1. * s.missCount / s.opCount;
}
public double getTotalHitRatio() {
long totalOpCount = 0;
long totalMissCount = 0;
for (Map.Entry<String, OpStats> e : statsMap.entrySet()) {
OpStats s = e.getValue();
totalOpCount += s.opCount;
totalMissCount += s.missCount;
}
return 1 - 1. * totalMissCount / totalOpCount;
}
}
private static class OpStats {
public final String opName;
public final int threadCount;
public final long opCount;
public final long runningTime;
public final long missCount;
private OpStats(String opName, long opCount, long runningTime, long missCount) {
this.opName = opName;
this.threadCount = 1;
this.opCount = opCount;
this.runningTime = runningTime;
this.missCount = missCount;
}
private OpStats(OpStats base, long opCount, long runningTime, long missCount) {
this.opName = base.opName;
this.threadCount = base.threadCount + 1;
this.opCount = base.opCount + opCount;
this.runningTime = base.runningTime + runningTime;
this.missCount = base.missCount + missCount;
}
}
}