提交 749d0823 authored 作者: Thomas Mueller's avatar Thomas Mueller

LIRS cache: concurrent and concurrent with long key

上级 f3864e40
...@@ -104,6 +104,7 @@ import org.h2.test.server.TestNestedLoop; ...@@ -104,6 +104,7 @@ import org.h2.test.server.TestNestedLoop;
import org.h2.test.server.TestWeb; import org.h2.test.server.TestWeb;
import org.h2.test.server.TestInit; import org.h2.test.server.TestInit;
import org.h2.test.store.TestCacheLIRS; import org.h2.test.store.TestCacheLIRS;
import org.h2.test.store.TestCacheLongKeyLIRS;
import org.h2.test.store.TestConcurrent; import org.h2.test.store.TestConcurrent;
import org.h2.test.store.TestDataUtils; import org.h2.test.store.TestDataUtils;
import org.h2.test.store.TestMVStore; import org.h2.test.store.TestMVStore;
...@@ -668,6 +669,7 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1` ...@@ -668,6 +669,7 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
private void testUnit() { private void testUnit() {
// mv store // mv store
new TestCacheLIRS().runTest(this); new TestCacheLIRS().runTest(this);
new TestCacheLongKeyLIRS().runTest(this);
new TestConcurrent().runTest(this); new TestConcurrent().runTest(this);
new TestDataUtils().runTest(this); new TestDataUtils().runTest(this);
new TestMVRTree().runTest(this); new TestMVRTree().runTest(this);
......
...@@ -8,8 +8,8 @@ package org.h2.test.store; ...@@ -8,8 +8,8 @@ package org.h2.test.store;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.h2.dev.store.btree.DataType; import org.h2.dev.store.btree.DataType;
import org.h2.dev.store.btree.MapFactory;
import org.h2.dev.store.btree.DataUtils; import org.h2.dev.store.btree.DataUtils;
import org.h2.dev.store.btree.MapFactory;
import org.h2.util.StringUtils; import org.h2.util.StringUtils;
/** /**
......
...@@ -8,7 +8,7 @@ package org.h2.test.store; ...@@ -8,7 +8,7 @@ package org.h2.test.store;
import java.util.Random; import java.util.Random;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import org.h2.dev.store.btree.CacheLongKeyLIRS; import org.h2.dev.store.cache.CacheLongKeyLIRS;
import org.h2.test.TestBase; import org.h2.test.TestBase;
import org.h2.util.Task; import org.h2.util.Task;
......
...@@ -10,7 +10,7 @@ import java.util.HashSet; ...@@ -10,7 +10,7 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Random; import java.util.Random;
import org.h2.dev.store.btree.CacheLIRS; import org.h2.dev.store.cache.CacheLIRS;
import org.h2.test.TestBase; import org.h2.test.TestBase;
import org.h2.util.New; import org.h2.util.New;
...@@ -29,6 +29,10 @@ public class TestCacheLIRS extends TestBase { ...@@ -29,6 +29,10 @@ public class TestCacheLIRS extends TestBase {
} }
public void test() throws Exception { public void test() throws Exception {
testCache();
}
private void testCache() {
testEdgeCases(); testEdgeCases();
testSize(); testSize();
testClear(); testClear();
...@@ -42,7 +46,7 @@ public class TestCacheLIRS extends TestBase { ...@@ -42,7 +46,7 @@ public class TestCacheLIRS extends TestBase {
} }
private void testEdgeCases() { private void testEdgeCases() {
CacheLIRS<Integer, Integer> test = CacheLIRS.newInstance(1, 1); CacheLIRS<Integer, Integer> test = createCache(1);
test.put(1, 10, 100); test.put(1, 10, 100);
assertEquals(10, test.get(1).intValue()); assertEquals(10, test.get(1).intValue());
try { try {
...@@ -82,19 +86,19 @@ public class TestCacheLIRS extends TestBase { ...@@ -82,19 +86,19 @@ public class TestCacheLIRS extends TestBase {
verifyMapSize(769, 2048); verifyMapSize(769, 2048);
CacheLIRS<Integer, Integer> test; CacheLIRS<Integer, Integer> test;
test = CacheLIRS.newInstance(3, 10); test = createCache(3, 10);
test.put(0, 0, 9); test.put(0, 0, 9);
test.put(1, 10, 9); test.put(1, 10, 9);
test.put(2, 20, 9); test.put(2, 20, 9);
test.put(3, 30, 9); test.put(3, 30, 9);
test.put(4, 40, 9); test.put(4, 40, 9);
test = CacheLIRS.newInstance(1, 1); test = createCache(1, 1);
test.put(1, 10); test.put(1, 10);
test.put(0, 0); test.put(0, 0);
test.get(0); test.get(0);
test = CacheLIRS.newInstance(1000, 1); test = createCache(1000);
for (int j = 0; j < 2000; j++) { for (int j = 0; j < 2000; j++) {
test.put(j, j); test.put(j, j);
} }
...@@ -106,18 +110,18 @@ public class TestCacheLIRS extends TestBase { ...@@ -106,18 +110,18 @@ public class TestCacheLIRS extends TestBase {
assertEquals(968, test.sizeNonResident()); assertEquals(968, test.sizeNonResident());
} }
private void verifyMapSize(int elements, int mapSize) { private void verifyMapSize(int elements, int expectedMapSize) {
CacheLIRS<Integer, Integer> test; CacheLIRS<Integer, Integer> test;
test = CacheLIRS.newInstance(elements - 1, 1); test = createCache(elements - 1);
assertTrue(mapSize > test.sizeMapArray()); assertTrue(test.sizeMapArray() < expectedMapSize);
test = CacheLIRS.newInstance(elements, 1); test = createCache(elements);
assertEquals(mapSize, test.sizeMapArray()); assertEquals(expectedMapSize, test.sizeMapArray());
test = CacheLIRS.newInstance(elements * 100, 100); test = createCache(elements * 100, 100);
assertEquals(mapSize, test.sizeMapArray()); assertEquals(expectedMapSize, test.sizeMapArray());
} }
private void testGetPutPeekRemove() { private void testGetPutPeekRemove() {
CacheLIRS<Integer, Integer> test = CacheLIRS.newInstance(4, 1); CacheLIRS<Integer, Integer> test = createCache(4);
test.put(1, 10); test.put(1, 10);
test.put(2, 20); test.put(2, 20);
test.put(3, 30); test.put(3, 30);
...@@ -234,7 +238,7 @@ public class TestCacheLIRS extends TestBase { ...@@ -234,7 +238,7 @@ public class TestCacheLIRS extends TestBase {
} }
private void testPruneStack() { private void testPruneStack() {
CacheLIRS<Integer, Integer> test = CacheLIRS.newInstance(5, 1); CacheLIRS<Integer, Integer> test = createCache(5);
for (int i = 0; i < 7; i++) { for (int i = 0; i < 7; i++) {
test.put(i, i * 10); test.put(i, i * 10);
} }
...@@ -253,7 +257,7 @@ public class TestCacheLIRS extends TestBase { ...@@ -253,7 +257,7 @@ public class TestCacheLIRS extends TestBase {
} }
private void testClear() { private void testClear() {
CacheLIRS<Integer, Integer> test = CacheLIRS.newInstance(40, 10); CacheLIRS<Integer, Integer> test = createCache(40, 10);
for (int i = 0; i < 5; i++) { for (int i = 0; i < 5; i++) {
test.put(i, 10 * i, 9); test.put(i, 10 * i, 9);
} }
...@@ -302,7 +306,7 @@ public class TestCacheLIRS extends TestBase { ...@@ -302,7 +306,7 @@ public class TestCacheLIRS extends TestBase {
} }
private void testLimitHot() { private void testLimitHot() {
CacheLIRS<Integer, Integer> test = CacheLIRS.newInstance(100, 1); CacheLIRS<Integer, Integer> test = createCache(100);
for (int i = 0; i < 300; i++) { for (int i = 0; i < 300; i++) {
test.put(i, 10 * i); test.put(i, 10 * i);
} }
...@@ -312,7 +316,7 @@ public class TestCacheLIRS extends TestBase { ...@@ -312,7 +316,7 @@ public class TestCacheLIRS extends TestBase {
} }
private void testLimitNonResident() { private void testLimitNonResident() {
CacheLIRS<Integer, Integer> test = CacheLIRS.newInstance(4, 1); CacheLIRS<Integer, Integer> test = createCache(4);
for (int i = 0; i < 20; i++) { for (int i = 0; i < 20; i++) {
test.put(i, 10 * i); test.put(i, 10 * i);
} }
...@@ -347,7 +351,7 @@ public class TestCacheLIRS extends TestBase { ...@@ -347,7 +351,7 @@ public class TestCacheLIRS extends TestBase {
} }
CacheLIRS<BadHash, Integer> test = CacheLIRS.newInstance(size * 2, 1); CacheLIRS<BadHash, Integer> test = createCache(size * 2);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
test.put(new BadHash(i), i); test.put(new BadHash(i), i);
} }
...@@ -386,7 +390,7 @@ public class TestCacheLIRS extends TestBase { ...@@ -386,7 +390,7 @@ public class TestCacheLIRS extends TestBase {
boolean log = false; boolean log = false;
int size = 20; int size = 20;
// cache size 11 (10 hot, 1 cold) // cache size 11 (10 hot, 1 cold)
CacheLIRS<Integer, Integer> test = CacheLIRS.newInstance(size / 2 + 1, 1); CacheLIRS<Integer, Integer> test = createCache(size / 2 + 1);
// init the cache with some dummy entries // init the cache with some dummy entries
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
test.put(-i, -i * 10); test.put(-i, -i * 10);
...@@ -440,7 +444,7 @@ public class TestCacheLIRS extends TestBase { ...@@ -440,7 +444,7 @@ public class TestCacheLIRS extends TestBase {
int size = 10; int size = 10;
Random r = new Random(1); Random r = new Random(1);
for (int j = 0; j < 100; j++) { for (int j = 0; j < 100; j++) {
CacheLIRS<Integer, Integer> test = CacheLIRS.newInstance(size / 2, 1); CacheLIRS<Integer, Integer> test = createCache(size / 2);
HashMap<Integer, Integer> good = New.hashMap(); HashMap<Integer, Integer> good = New.hashMap();
for (int i = 0; i < 10000; i++) { for (int i = 0; i < 10000; i++) {
int key = r.nextInt(size); int key = r.nextInt(size);
...@@ -524,4 +528,12 @@ public class TestCacheLIRS extends TestBase { ...@@ -524,4 +528,12 @@ public class TestCacheLIRS extends TestBase {
} }
} }
private static <K, V> CacheLIRS<K, V> createCache(int maxElements) {
return createCache(maxElements, 1);
}
private static <K, V> CacheLIRS<K, V> createCache(int maxSize, int averageSize) {
return CacheLIRS.newInstance(maxSize, averageSize, 1, 0);
}
} }
...@@ -10,7 +10,7 @@ import java.util.HashSet; ...@@ -10,7 +10,7 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Random; import java.util.Random;
import org.h2.dev.store.btree.CacheLongKeyLIRS; import org.h2.dev.store.cache.CacheLongKeyLIRS;
import org.h2.test.TestBase; import org.h2.test.TestBase;
import org.h2.util.New; import org.h2.util.New;
...@@ -29,19 +29,23 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -29,19 +29,23 @@ public class TestCacheLongKeyLIRS extends TestBase {
} }
public void test() throws Exception { public void test() throws Exception {
testCache();
}
private void testCache() {
testEdgeCases(); testEdgeCases();
testSize(); testSize();
testClear(); testClear();
// testGetPutPeekRemove(); testGetPutPeekRemove();
testPruneStack(); testPruneStack();
testLimitHot(); testLimitHot();
testLimitNonResident(); testLimitNonResident();
// testScanResistance(); testScanResistance();
testRandomOperations(); testRandomOperations();
} }
private void testEdgeCases() { private void testEdgeCases() {
CacheLongKeyLIRS<Integer> test = CacheLongKeyLIRS.newInstance(1); CacheLongKeyLIRS<Integer> test = createCache(1);
test.put(1, 10, 100); test.put(1, 10, 100);
assertEquals(10, test.get(1).intValue()); assertEquals(10, test.get(1).intValue());
try { try {
...@@ -65,41 +69,52 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -65,41 +69,52 @@ public class TestCacheLongKeyLIRS extends TestBase {
} }
private void testSize() { private void testSize() {
int todo; verifyMapSize(7, 16);
// verifyMapSize(7, 16); verifyMapSize(13, 32);
// verifyMapSize(13, 32); verifyMapSize(25, 64);
// verifyMapSize(25, 64); verifyMapSize(49, 128);
// verifyMapSize(49, 128); verifyMapSize(97, 256);
// verifyMapSize(97, 256); verifyMapSize(193, 512);
// verifyMapSize(193, 512); verifyMapSize(385, 1024);
// verifyMapSize(385, 1024); verifyMapSize(769, 2048);
// verifyMapSize(769, 2048);
// CacheLongKeyLIRS<Integer> test;
// CacheConcurrentLongKeyLIRS<Integer> test; test = createCache(3, 10);
// test = CacheConcurrentLongKeyLIRS.newInstance(1000, 1); test.put(0, 0, 9);
// for (int j = 0; j < 2000; j++) { test.put(1, 10, 9);
// test.put(j, j); test.put(2, 20, 9);
// } test.put(3, 30, 9);
// // for a cache of size 1000, test.put(4, 40, 9);
// // there are 62 cold entries (about 6.25%).
// assertEquals(62, test.size() - test.sizeHot()); test = createCache(1, 1);
// // at most as many non-resident elements test.put(1, 10);
// // as there are entries in the stack test.put(0, 0);
// assertEquals(968, test.sizeNonResident()); test.get(0);
test = createCache(1000);
for (int j = 0; j < 2000; j++) {
test.put(j, j);
}
// for a cache of size 1000,
// there are 62 cold entries (about 6.25%).
assertEquals(62, test.size() - test.sizeHot());
// at most as many non-resident elements
// as there are entries in the stack
assertEquals(968, test.sizeNonResident());
} }
private void verifyMapSize(int elements, int mapSize) { private void verifyMapSize(int elements, int expectedMapSize) {
CacheLongKeyLIRS<Integer> test; CacheLongKeyLIRS<Integer> test;
test = CacheLongKeyLIRS.newInstance(elements - 1); test = createCache(elements - 1);
assertTrue(mapSize > test.sizeMapArray()); assertTrue(test.sizeMapArray() < expectedMapSize);
test = CacheLongKeyLIRS.newInstance(elements); test = createCache(elements);
assertEquals(mapSize, test.sizeMapArray()); assertEquals(expectedMapSize, test.sizeMapArray());
test = CacheLongKeyLIRS.newInstance(elements * 100, 100, 16, 10); test = createCache(elements * 100, 100);
assertEquals(mapSize, test.sizeMapArray()); assertEquals(expectedMapSize, test.sizeMapArray());
} }
private void testGetPutPeekRemove() { private void testGetPutPeekRemove() {
CacheLongKeyLIRS<Integer> test = CacheLongKeyLIRS.newInstance(4); CacheLongKeyLIRS<Integer> test = createCache(4);
test.put(1, 10); test.put(1, 10);
test.put(2, 20); test.put(2, 20);
test.put(3, 30); test.put(3, 30);
...@@ -216,7 +231,7 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -216,7 +231,7 @@ public class TestCacheLongKeyLIRS extends TestBase {
} }
private void testPruneStack() { private void testPruneStack() {
CacheLongKeyLIRS<Integer> test = CacheLongKeyLIRS.newInstance(5); CacheLongKeyLIRS<Integer> test = createCache(5);
for (int i = 0; i < 7; i++) { for (int i = 0; i < 7; i++) {
test.put(i, i * 10); test.put(i, i * 10);
} }
...@@ -235,7 +250,7 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -235,7 +250,7 @@ public class TestCacheLongKeyLIRS extends TestBase {
} }
private void testClear() { private void testClear() {
CacheLongKeyLIRS<Integer> test = CacheLongKeyLIRS.newInstance(40, 10, 16, 1); CacheLongKeyLIRS<Integer> test = createCache(40, 10);
for (int i = 0; i < 5; i++) { for (int i = 0; i < 5; i++) {
test.put(i, 10 * i, 9); test.put(i, 10 * i, 9);
} }
...@@ -252,10 +267,10 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -252,10 +267,10 @@ public class TestCacheLongKeyLIRS extends TestBase {
} }
assertEquals(40, test.getMaxMemory()); assertEquals(40, test.getMaxMemory());
assertEquals(10, test.getAverageMemory()); assertEquals(10, test.getAverageMemory());
// assertEquals(36, test.getUsedMemory()); assertEquals(36, test.getUsedMemory());
// assertEquals(4, test.size()); assertEquals(4, test.size());
// assertEquals(3, test.sizeHot()); assertEquals(3, test.sizeHot());
// assertEquals(1, test.sizeNonResident()); assertEquals(1, test.sizeNonResident());
assertFalse(test.isEmpty()); assertFalse(test.isEmpty());
// changing the limit is not supposed to modify the map // changing the limit is not supposed to modify the map
...@@ -284,17 +299,17 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -284,17 +299,17 @@ public class TestCacheLongKeyLIRS extends TestBase {
} }
private void testLimitHot() { private void testLimitHot() {
CacheLongKeyLIRS<Integer> test = CacheLongKeyLIRS.newInstance(100); CacheLongKeyLIRS<Integer> test = createCache(100);
for (int i = 0; i < 300; i++) { for (int i = 0; i < 300; i++) {
test.put(i, 10 * i); test.put(i, 10 * i);
} }
// assertEquals(100, test.size()); assertEquals(100, test.size());
// assertEquals(99, test.sizeNonResident()); assertEquals(99, test.sizeNonResident());
// assertEquals(93, test.sizeHot()); assertEquals(93, test.sizeHot());
} }
private void testLimitNonResident() { private void testLimitNonResident() {
CacheLongKeyLIRS<Integer> test = CacheLongKeyLIRS.newInstance(4); CacheLongKeyLIRS<Integer> test = createCache(4);
for (int i = 0; i < 20; i++) { for (int i = 0; i < 20; i++) {
test.put(i, 10 * i); test.put(i, 10 * i);
} }
...@@ -305,7 +320,7 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -305,7 +320,7 @@ public class TestCacheLongKeyLIRS extends TestBase {
boolean log = false; boolean log = false;
int size = 20; int size = 20;
// cache size 11 (10 hot, 1 cold) // cache size 11 (10 hot, 1 cold)
CacheLongKeyLIRS<Integer> test = CacheLongKeyLIRS.newInstance(size / 2 + 1); CacheLongKeyLIRS<Integer> test = createCache(size / 2 + 1);
// init the cache with some dummy entries // init the cache with some dummy entries
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
test.put(-i, -i * 10); test.put(-i, -i * 10);
...@@ -359,7 +374,7 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -359,7 +374,7 @@ public class TestCacheLongKeyLIRS extends TestBase {
int size = 10; int size = 10;
Random r = new Random(1); Random r = new Random(1);
for (int j = 0; j < 100; j++) { for (int j = 0; j < 100; j++) {
CacheLongKeyLIRS<Integer> test = CacheLongKeyLIRS.newInstance(size / 2); CacheLongKeyLIRS<Integer> test = createCache(size / 2);
HashMap<Integer, Integer> good = New.hashMap(); HashMap<Integer, Integer> good = New.hashMap();
for (int i = 0; i < 10000; i++) { for (int i = 0; i < 10000; i++) {
int key = r.nextInt(size); int key = r.nextInt(size);
...@@ -400,7 +415,7 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -400,7 +415,7 @@ public class TestCacheLongKeyLIRS extends TestBase {
} }
} }
private static <V> String toString(CacheLongKeyLIRS<V> cache) { private static <K, V> String toString(CacheLongKeyLIRS<V> cache) {
StringBuilder buff = new StringBuilder(); StringBuilder buff = new StringBuilder();
buff.append("mem: " + cache.getUsedMemory()); buff.append("mem: " + cache.getUsedMemory());
buff.append(" stack:"); buff.append(" stack:");
...@@ -418,10 +433,10 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -418,10 +433,10 @@ public class TestCacheLongKeyLIRS extends TestBase {
return buff.toString(); return buff.toString();
} }
private <V> void verify(CacheLongKeyLIRS<V> cache, String expected) { private <K, V> void verify(CacheLongKeyLIRS<V> cache, String expected) {
if (expected != null) { if (expected != null) {
String got = toString(cache); String got = toString(cache);
// assertEquals(expected, got); assertEquals(expected, got);
} }
int mem = 0; int mem = 0;
for (long k : cache.keySet()) { for (long k : cache.keySet()) {
...@@ -437,10 +452,18 @@ public class TestCacheLongKeyLIRS extends TestBase { ...@@ -437,10 +452,18 @@ public class TestCacheLongKeyLIRS extends TestBase {
hot.removeAll(nonResident); hot.removeAll(nonResident);
assertEquals(hot.size(), cache.sizeHot()); assertEquals(hot.size(), cache.sizeHot());
assertEquals(hot.size() + cold.size(), cache.size()); assertEquals(hot.size() + cold.size(), cache.size());
// if (stack.size() > 0) { if (stack.size() > 0) {
// long lastStack = stack.get(stack.size() - 1); long lastStack = stack.get(stack.size() - 1);
// assertTrue(hot.contains(lastStack)); assertTrue(hot.contains(lastStack));
// } }
}
private static <V> CacheLongKeyLIRS<V> createCache(int maxElements) {
return createCache(maxElements, 1);
}
private static <V> CacheLongKeyLIRS<V> createCache(int maxSize, int averageSize) {
return CacheLongKeyLIRS.newInstance(maxSize, averageSize, 1, 0);
} }
} }
/*
* Copyright 2004-2011 H2 Group. Multiple-Licensed under the H2 License,
* Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.dev.store.btree;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A scan resistant cache. It is meant to cache objects that are relatively
* costly to acquire, for example file content.
* <p>
* This implementation is not multi-threading safe. Null keys or null values are
* not allowed. The map fill factor is at most 75%.
* <p>
* Each entry is assigned a distinct memory size, and the cache will try to use
* at most the specified amount of memory. The memory unit is not relevant,
* however it is suggested to use bytes as the unit.
* <p>
* This class implements the LIRS replacement algorithm invented by Xiaodong
* Zhang and Song Jiang as described in
* http://www.cse.ohio-state.edu/~zhang/lirs-sigmetrics-02.html with a few
* smaller changes: An additional queue for non-resident entries is used, to
* prevent unbound memory usage. The maximum size of this queue is at most the
* size of the rest of the stack. About 6.25% of the mapped entries are cold.
*
* @author Thomas Mueller
* @param <K> the key type
* @param <V> the value type
*/
public class CacheLIRS<K, V> extends AbstractMap<K, V> implements Map<K, V> {
/**
* The maximum memory this cache should use.
*/
private long maxMemory;
/**
* The average memory used by one entry.
*/
private int averageMemory;
/**
* The currently used memory.
*/
private long usedMemory;
/**
* The number of (hot, cold, and non-resident) entries in the map.
*/
private int mapSize;
/**
* The LIRS stack size.
*/
private int stackSize;
/**
* The size of the LIRS queue for resident cold entries.
*/
private int queueSize;
/**
* The size of the LIRS queue for non-resident cold entries.
*/
private int queue2Size;
/**
* The map array. The size is always a power of 2.
*/
private Entry<K, V>[] entries;
/**
* The bit mask that is applied to the key hash code to get the index in the
* map array. The mask is the length of the array minus one.
*/
private int mask;
/**
* The stack of recently referenced elements. This includes all hot entries,
* the recently referenced cold entries, and all non-resident cold entries.
*/
private Entry<K, V> stack;
/**
* The queue of resident cold entries.
*/
private Entry<K, V> queue;
/**
* The queue of non-resident cold entries.
*/
private Entry<K, V> queue2;
/**
* Create a new cache.
*
* @param maxMemory the maximum memory to use
* @param averageMemory the average memory usage of an object
*/
private CacheLIRS(long maxMemory, int averageMemory) {
setMaxMemory(maxMemory);
setAverageMemory(averageMemory);
clear();
}
/**
* Create a new cache with the given memory size. To just limit the number
* of entries, use the required number as the maximum memory, and an average
* size of 1.
*
* @param maxMemory the maximum memory to use (1 or larger)
* @param averageMemory the average memory (1 or larger)
* @return the cache
*/
public static <K, V> CacheLIRS<K, V> newInstance(int maxMemory, int averageMemory) {
return new CacheLIRS<K, V>(maxMemory, averageMemory);
}
/**
* Clear the cache. This method will clear all entries (including
* non-resident keys) and resize the internal array.
**/
public void clear() {
// calculate the size of the map array
// assume a fill factor of at most 80%
long maxLen = (long) (maxMemory / averageMemory / 0.75);
// the size needs to be a power of 2
long l = 8;
while (l < maxLen) {
l += l;
}
// the array size is at most 2^31 elements
int len = (int) Math.min(1L << 31, l);
// the bit mask has all bits set
mask = len - 1;
// initialize the stack and queue heads
stack = new Entry<K, V>();
stack.stackPrev = stack.stackNext = stack;
queue = new Entry<K, V>();
queue.queuePrev = queue.queueNext = queue;
queue2 = new Entry<K, V>();
queue2.queuePrev = queue2.queueNext = queue2;
// first set to null - avoiding out of memory
entries = null;
@SuppressWarnings("unchecked")
Entry<K, V>[] e = new Entry[len];
entries = e;
mapSize = 0;
usedMemory = 0;
stackSize = queueSize = queue2Size = 0;
}
/**
* Get the value for the given key if the entry is cached. This method does
* not modify the internal state.
*
* @param key the key (may not be null)
* @return the value, or null if there is no resident entry
*/
public V peek(K key) {
Entry<K, V> e = find(key);
return e == null ? null : e.value;
}
/**
* Get the memory used for the given key.
*
* @param key the key (may not be null)
* @return the memory, or 0 if there is no resident entry
*/
public int getMemory(K key) {
Entry<K, V> e = find(key);
return e == null ? 0 : e.memory;
}
/**
* Get the value for the given key if the entry is cached. This method
* adjusts the internal state of the cache, to ensure commonly used entries
* stay in the cache.
*
* @param key the key (may not be null)
* @return the value, or null if there is no resident entry
*/
public V get(Object key) {
Entry<K, V> e = find(key);
if (e == null || e.value == null) {
// either the entry was not found, or it was a non-resident entry
return null;
} else if (e.isHot()) {
if (e != stack.stackNext) {
// move a hot entry to the top of the stack
// unless it is already there
boolean wasEnd = e == stack.stackPrev;
removeFromStack(e);
if (wasEnd) {
// if moving the last entry, the last entry
// could not be cold, which is not allowed
pruneStack();
}
addToStack(e);
}
} else {
removeFromQueue(e);
if (e.stackNext != null) {
// resident cold entries become hot
// if they are on the stack
removeFromStack(e);
// which means a hot entry needs to become cold
convertOldestHotToCold();
} else {
// cold entries that are not on the stack
// move to the front of the queue
addToQueue(queue, e);
}
// in any case, the cold entry is moved to the top of the stack
addToStack(e);
}
return e.value;
}
/**
* Add an entry to the cache using the average memory size.
*
* @param key the key (may not be null)
* @param value the value (may not be null)
* @return the old value, or null if there is no resident entry
*/
public V put(K key, V value) {
return put(key, value, averageMemory);
}
/**
* Add an entry to the cache. The entry may or may not exist in the cache
* yet. This method will usually mark unknown entries as cold and known
* entries as hot.
*
* @param key the key (may not be null)
* @param value the value (may not be null)
* @param memory the memory used for the given entry
* @return the old value, or null if there is no resident entry
*/
public V put(K key, V value, int memory) {
if (value == null) {
throw new NullPointerException();
}
V old;
Entry<K, V> e = find(key);
if (e == null) {
old = null;
} else {
old = e.value;
remove(key);
}
e = new Entry<K, V>();
e.key = key;
e.value = value;
e.memory = memory;
int index = getIndex(key);
e.mapNext = entries[index];
entries[index] = e;
usedMemory += memory;
if (usedMemory > maxMemory && mapSize > 0) {
// an old entry needs to be removed
evict(e);
}
mapSize++;
// added entries are always added to the stack
addToStack(e);
return old;
}
private int getIndex(Object key) {
int hash = key.hashCode();
// Doug Lea's supplemental secondaryHash function (inlined)
// to protect against hash codes that don't differ in low order bits
hash ^= (hash >>> 20) ^ (hash >>> 12);
hash ^= (hash >>> 7) ^ (hash >>> 4);
return hash & mask;
}
/**
* Remove an entry. Both resident and non-resident entries can be removed.
*
* @param key the key (may not be null)
* @return the old value, or null if there is no resident entry
*/
public V remove(Object key) {
int index = getIndex(key);
Entry<K, V> e = entries[index];
if (e == null) {
return null;
}
V old;
if (e.key.equals(key)) {
old = e.value;
entries[index] = e.mapNext;
} else {
Entry<K, V> last;
do {
last = e;
e = e.mapNext;
if (e == null) {
return null;
}
} while (!e.key.equals(key));
old = e.value;
last.mapNext = e.mapNext;
}
mapSize--;
usedMemory -= e.memory;
if (e.stackNext != null) {
removeFromStack(e);
}
if (e.isHot()) {
// when removing a hot entry, the newest cold entry gets hot,
// so the number of hot entries does not change
e = queue.queueNext;
if (e != queue) {
removeFromQueue(e);
if (e.stackNext == null) {
addToStackBottom(e);
}
}
} else {
removeFromQueue(e);
}
pruneStack();
return old;
}
/**
* Evict cold entries (resident and non-resident) until the memory limit is
* reached. The new entry is added as a cold entry, except if it is the only
* entry.
*
* @param newEntry a new entry
*/
private void evict(Entry<K, V> newEntry) {
// ensure there are not too many hot entries:
// left shift of 5 is multiplication by 32, that means if there are less
// than 1/32 (3.125%) cold entries, a new hot entry needs to become cold
while ((queueSize << 5) < mapSize) {
convertOldestHotToCold();
}
if (stackSize > 0) {
// the new cold entry is at the top of the queue
addToQueue(queue, newEntry);
}
// the oldest resident cold entries become non-resident
// but at least one cold entry (the new one) must stay
while (usedMemory > maxMemory && queueSize > 1) {
Entry<K, V> e = queue.queuePrev;
usedMemory -= e.memory;
removeFromQueue(e);
e.value = null;
e.memory = 0;
addToQueue(queue2, e);
// the size of the non-resident-cold entries needs to be limited
while (queue2Size + queue2Size > stackSize) {
e = queue2.queuePrev;
remove(e.key);
}
}
}
private void convertOldestHotToCold() {
// the last entry of the stack is known to be hot
Entry<K, V> last = stack.stackPrev;
// remove from stack - which is done anyway in the stack pruning, but we
// can do it here as well
removeFromStack(last);
// adding an entry to the queue will make it cold
addToQueue(queue, last);
pruneStack();
}
/**
* Ensure the last entry of the stack is cold.
*/
private void pruneStack() {
while (true) {
Entry<K, V> last = stack.stackPrev;
if (last == stack || last.isHot()) {
break;
}
// the cold entry is still in the queue
removeFromStack(last);
}
}
/**
* Try to find an entry in the map.
*
* @param key the key
* @return the entry (might be a non-resident)
*/
private Entry<K, V> find(Object key) {
int index = getIndex(key);
Entry<K, V> e = entries[index];
while (e != null && !e.key.equals(key)) {
e = e.mapNext;
}
return e;
}
private void addToStack(Entry<K, V> e) {
e.stackPrev = stack;
e.stackNext = stack.stackNext;
e.stackNext.stackPrev = e;
stack.stackNext = e;
stackSize++;
}
private void addToStackBottom(Entry<K, V> e) {
e.stackNext = stack;
e.stackPrev = stack.stackPrev;
e.stackPrev.stackNext = e;
stack.stackPrev = e;
stackSize++;
}
private void removeFromStack(Entry<K, V> e) {
e.stackPrev.stackNext = e.stackNext;
e.stackNext.stackPrev = e.stackPrev;
e.stackPrev = e.stackNext = null;
stackSize--;
}
private void addToQueue(Entry<K, V> q, Entry<K, V> e) {
e.queuePrev = q;
e.queueNext = q.queueNext;
e.queueNext.queuePrev = e;
q.queueNext = e;
if (e.value != null) {
queueSize++;
} else {
queue2Size++;
}
}
private void removeFromQueue(Entry<K, V> e) {
e.queuePrev.queueNext = e.queueNext;
e.queueNext.queuePrev = e.queuePrev;
e.queuePrev = e.queueNext = null;
if (e.value != null) {
queueSize--;
} else {
queue2Size--;
}
}
/**
* Get the list of keys. This method allows to read the internal state of
* the cache.
*
* @param cold if true, only keys for the cold entries are returned
* @param nonResident true for non-resident entries
* @return the key list
*/
public List<K> keys(boolean cold, boolean nonResident) {
ArrayList<K> s = new ArrayList<K>();
if (cold) {
Entry<K, V> start = nonResident ? queue2 : queue;
for (Entry<K, V> e = start.queueNext; e != start; e = e.queueNext) {
s.add(e.key);
}
} else {
for (Entry<K, V> e = stack.stackNext; e != stack; e = e.stackNext) {
s.add(e.key);
}
}
return s;
}
/**
* Get the number of resident entries.
*
* @return the number of entries
*/
public int size() {
return mapSize - queue2Size;
}
/**
* Check whether there is a resident entry for the given key. This method
* does not adjust the internal state of the cache.
*
* @param key the key (may not be null)
* @return true if there is a resident entry
*/
public boolean containsKey(Object key) {
Entry<K, V> e = find(key);
return e != null && e.value != null;
}
/**
* Get the set of keys for resident entries.
*
* @return the set of keys
*/
public Set<K> keySet() {
HashSet<K> set = new HashSet<K>();
for (Entry<K, V> e = stack.stackNext; e != stack; e = e.stackNext) {
set.add(e.key);
}
for (Entry<K, V> e = queue.queueNext; e != queue; e = e.queueNext) {
set.add(e.key);
}
return set;
}
/**
* Get the entry set for all resident entries.
*
* @return the entry set
*/
public Set<Map.Entry<K, V>> entrySet() {
HashMap<K, V> map = new HashMap<K, V>();
for (K k : keySet()) {
map.put(k, find(k).value);
}
return map.entrySet();
}
/**
* Get the number of hot entries in the cache.
*
* @return the number of hot entries
*/
public int sizeHot() {
return mapSize - queueSize - queue2Size;
}
/**
* Get the number of non-resident entries in the cache.
*
* @return the number of non-resident entries
*/
public int sizeNonResident() {
return queue2Size;
}
/**
* Get the length of the internal map array.
*
* @return the size of the array
*/
public int sizeMapArray() {
return entries.length;
}
/**
* Get the currently used memory.
*
* @return the used memory
*/
public long getUsedMemory() {
return usedMemory;
}
/**
* Set the maximum memory this cache should use. This will not immediately
* cause entries to get removed however; it will only change the limit. To
* resize the internal array, call the clear method.
*
* @param maxMemory the maximum size (1 or larger)
*/
public void setMaxMemory(long maxMemory) {
if (maxMemory <= 0) {
throw new IllegalArgumentException("Max memory must be larger than 0");
}
this.maxMemory = maxMemory;
}
/**
* Get the maximum memory to use.
*
* @return the maximum memory
*/
public long getMaxMemory() {
return maxMemory;
}
/**
* Set the average memory used per entry. It is used to calculate the length
* of the internal array.
*
* @param averageMemory the average memory used (1 or larger)
*/
public void setAverageMemory(int averageMemory) {
if (averageMemory <= 0) {
throw new IllegalArgumentException("Average memory must be larger than 0");
}
this.averageMemory = averageMemory;
}
/**
* Get the average memory used per entry.
*
* @return the average memory
*/
public int getAverageMemory() {
return averageMemory;
}
/**
* A cache entry. Each entry is either hot (low inter-reference recency;
* LIR), cold (high inter-reference recency; HIR), or non-resident-cold. Hot
* entries are in the stack only. Cold entries are in the queue, and may be
* in the stack. Non-resident-cold entries have their value set to null and
* are in the stack and in the non-resident queue.
*
* @param <K> the key type
* @param <V> the value type
*/
static class Entry<K, V> {
/**
* The key.
*/
K key;
/**
* The value. Set to null for non-resident-cold entries.
*/
V value;
/**
* The estimated memory used.
*/
int memory;
/**
* The next entry in the stack.
*/
Entry<K, V> stackNext;
/**
* The previous entry in the stack.
*/
Entry<K, V> stackPrev;
/**
* The next entry in the queue (either the resident queue or the
* non-resident queue).
*/
Entry<K, V> queueNext;
/**
* The previous entry in the queue.
*/
Entry<K, V> queuePrev;
/**
* The next entry in the map
*/
Entry<K, V> mapNext;
/**
* Whether this entry is hot. Cold entries are in one of the two queues.
*
* @return whether the entry is hot
*/
boolean isHot() {
return queueNext == null;
}
}
}
...@@ -15,9 +15,10 @@ import java.util.Collections; ...@@ -15,9 +15,10 @@ import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import org.h2.compress.CompressLZF;
import org.h2.compress.Compressor; import org.h2.compress.Compressor;
import org.h2.dev.store.FilePathCache; import org.h2.dev.store.FilePathCache;
import org.h2.dev.store.cache.CacheLongKeyLIRS;
import org.h2.store.fs.FilePath; import org.h2.store.fs.FilePath;
import org.h2.store.fs.FileUtils; import org.h2.store.fs.FileUtils;
import org.h2.util.New; import org.h2.util.New;
...@@ -90,7 +91,8 @@ public class MVStore { ...@@ -90,7 +91,8 @@ public class MVStore {
private int blockSize = 4 * 1024; private int blockSize = 4 * 1024;
private long rootChunkStart; private long rootChunkStart;
private Map<Long, Page> cache = CacheLIRS.newInstance(readCacheSize, 2048); private CacheLongKeyLIRS<Page> cache = CacheLongKeyLIRS.newInstance(
readCacheSize, 2048, 16, readCacheSize / 100);
private int lastChunkId; private int lastChunkId;
private HashMap<Integer, Chunk> chunks = New.hashMap(); private HashMap<Integer, Chunk> chunks = New.hashMap();
...@@ -124,7 +126,9 @@ public class MVStore { ...@@ -124,7 +126,9 @@ public class MVStore {
private MVStore(String fileName, MapFactory mapFactory) { private MVStore(String fileName, MapFactory mapFactory) {
this.fileName = fileName; this.fileName = fileName;
this.mapFactory = mapFactory; this.mapFactory = mapFactory;
this.compressor = mapFactory.buildCompressor(); this.compressor = mapFactory == null ?
new CompressLZF() :
mapFactory.buildCompressor();
} }
/** /**
......
/* /*
* Copyright 2012 H2 Group (http://h2database.com). * Copyright 2004-2012 H2 Group. Multiple-Licensed under the H2 License,
* All Rights Reserved. * Version 1.0, and under the Eclipse Public License, Version 1.0
* * (http://h2database.com/html/license.html).
* Licensed under the Apache License, Version 2.0 (the "License"); * Initial Developer: H2 Group
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/ */
package org.h2.dev.store.btree; package org.h2.dev.store.cache;
import java.util.AbstractMap; import java.util.AbstractMap;
import java.util.ArrayList; import java.util.ArrayList;
...@@ -23,7 +13,6 @@ import java.util.HashSet; ...@@ -23,7 +13,6 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentMap;
/** /**
* A scan resistant cache. It is meant to cache objects that are relatively * A scan resistant cache. It is meant to cache objects that are relatively
...@@ -54,7 +43,7 @@ import java.util.concurrent.ConcurrentMap; ...@@ -54,7 +43,7 @@ import java.util.concurrent.ConcurrentMap;
* @param <K> the key type * @param <K> the key type
* @param <V> the value type * @param <V> the value type
*/ */
public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements ConcurrentMap<K, V> { public class CacheLIRS<K, V> extends AbstractMap<K, V> implements Map<K, V> {
/** /**
* The maximum memory this cache should use. * The maximum memory this cache should use.
...@@ -73,7 +62,7 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -73,7 +62,7 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
private int segmentMask; private int segmentMask;
private final int stackMoveDistance; private final int stackMoveDistance;
private CacheConcurrentLIRS(long maxMemory, int averageMemory, int segmentCount, int stackMoveDistance) { private CacheLIRS(long maxMemory, int averageMemory, int segmentCount, int stackMoveDistance) {
setMaxMemory(maxMemory); setMaxMemory(maxMemory);
setAverageMemory(averageMemory); setAverageMemory(averageMemory);
if (Integer.bitCount(segmentCount) != 1) { if (Integer.bitCount(segmentCount) != 1) {
...@@ -89,8 +78,9 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -89,8 +78,9 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
segmentMask = segmentCount - 1; segmentMask = segmentCount - 1;
segments = new Segment[segmentCount]; segments = new Segment[segmentCount];
for (int i = 0; i < segmentCount; i++) { for (int i = 0; i < segmentCount; i++) {
long max = Math.max(1, maxMemory / segmentCount);
segments[i] = new Segment<K, V>( segments[i] = new Segment<K, V>(
1 + maxMemory / segmentCount, averageMemory, stackMoveDistance); max, averageMemory, stackMoveDistance);
} }
segmentShift = Integer.numberOfTrailingZeros(segments[0].sizeMapArray()); segmentShift = Integer.numberOfTrailingZeros(segments[0].sizeMapArray());
} }
...@@ -150,26 +140,6 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -150,26 +140,6 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
return put(key, value, averageMemory); return put(key, value, averageMemory);
} }
public V putIfAbsent(K key, V value) {
int hash = getHash(key);
return getSegment(hash).putIfAbsent(key, hash, value);
}
public boolean remove(Object key, Object value) {
int hash = getHash(key);
return getSegment(hash).remove(key, hash, value);
}
public boolean replace(K key, V oldValue, V newValue) {
int hash = getHash(key);
return getSegment(hash).replace(key, hash, oldValue, newValue);
}
public V replace(K key, V value) {
int hash = getHash(key);
return getSegment(hash).replace(key, hash, value);
}
/** /**
* Remove an entry. Both resident and non-resident entries can be removed. * Remove an entry. Both resident and non-resident entries can be removed.
* *
...@@ -233,38 +203,39 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -233,38 +203,39 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
} }
/** /**
* Set the average memory used per entry. It is used to calculate the length * Set the maximum memory this cache should use. This will not immediately
* of the internal array. * cause entries to get removed however; it will only change the limit. To
* resize the internal array, call the clear method.
* *
* @param averageMemory the average memory used (1 or larger) * @param maxMemory the maximum size (1 or larger)
*/ */
public void setAverageMemory(int averageMemory) { public void setMaxMemory(long maxMemory) {
if (averageMemory <= 0) { if (maxMemory <= 0) {
throw new IllegalArgumentException("Average memory must be larger than 0"); throw new IllegalArgumentException("Max memory must be larger than 0");
} }
this.averageMemory = averageMemory; this.maxMemory = maxMemory;
if (segments != null) { if (segments != null) {
long max = 1 + maxMemory / segments.length;
for (Segment<K, V> s : segments) { for (Segment<K, V> s : segments) {
s.setAverageMemory(averageMemory); s.setMaxMemory(max);
} }
} }
} }
/** /**
* Set the maximum memory this cache should use. This will not immediately * Set the average memory used per entry. It is used to calculate the length
* cause entries to get removed however; it will only change the limit. To * of the internal array.
* resize the internal array, call the clear method.
* *
* @param maxMemory the maximum size (1 or larger) * @param averageMemory the average memory used (1 or larger)
*/ */
public void setMaxMemory(long maxMemory) { public void setAverageMemory(int averageMemory) {
if (maxMemory <= 0) { if (averageMemory <= 0) {
throw new IllegalArgumentException("Max memory must be larger than 0"); throw new IllegalArgumentException("Average memory must be larger than 0");
} }
this.maxMemory = maxMemory; this.averageMemory = averageMemory;
if (segments != null) { if (segments != null) {
for (Segment<K, V> s : segments) { for (Segment<K, V> s : segments) {
s.setMaxMemory(1 + maxMemory / segments.length); s.setAverageMemory(averageMemory);
} }
} }
} }
...@@ -295,8 +266,8 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -295,8 +266,8 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
* @param maxEntries the maximum number of entries * @param maxEntries the maximum number of entries
* @return the cache * @return the cache
*/ */
public static <K, V> CacheConcurrentLIRS<K, V> newInstance(int maxEntries) { public static <K, V> CacheLIRS<K, V> newInstance(int maxEntries) {
return new CacheConcurrentLIRS<K, V>(maxEntries, 1, 16, maxEntries / 100); return new CacheLIRS<K, V>(maxEntries, 1, 16, maxEntries / 100);
} }
/** /**
...@@ -309,9 +280,9 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -309,9 +280,9 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
* of the stack before the current item is moved * of the stack before the current item is moved
* @return the cache * @return the cache
*/ */
public static <K, V> CacheConcurrentLIRS<K, V> newInstance(int maxMemory, public static <K, V> CacheLIRS<K, V> newInstance(int maxMemory, int averageMemory,
int averageMemory, int segmentCount, int stackMoveDistance) { int segmentCount, int stackMoveDistance) {
return new CacheConcurrentLIRS<K, V>(maxMemory, averageMemory, segmentCount, stackMoveDistance); return new CacheLIRS<K, V>(maxMemory, averageMemory, segmentCount, stackMoveDistance);
} }
/** /**
...@@ -645,50 +616,6 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -645,50 +616,6 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
return old; return old;
} }
synchronized V putIfAbsent(K key, int hash, V value) {
Entry<K, V> e = find(key, hash);
if (e != null && e.value != null) {
return e.value;
}
return put(key, hash, value, averageMemory);
}
synchronized boolean remove(Object key, int hash, Object value) {
Entry<K, V> e = find(key, hash);
if (e != null) {
V x = e.value;
if (x != null && x.equals(value)) {
remove(key, hash);
return true;
}
}
return false;
}
synchronized boolean replace(K key, int hash, V oldValue, V newValue) {
Entry<K, V> e = find(key, hash);
if (e != null) {
V x = e.value;
if (x != null && x.equals(oldValue)) {
put(key, hash, newValue, averageMemory);
return true;
}
}
return false;
}
synchronized V replace(K key, int hash, V value) {
Entry<K, V> e = find(key, hash);
if (e != null) {
V x = e.value;
if (x != null) {
put(key, hash, value, averageMemory);
return x;
}
}
return null;
}
synchronized V remove(Object key, int hash) { synchronized V remove(Object key, int hash) {
int index = hash & mask; int index = hash & mask;
Entry<K, V> e = entries[index]; Entry<K, V> e = entries[index];
...@@ -891,15 +818,6 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -891,15 +818,6 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
return set; return set;
} }
synchronized Set<Map.Entry<K, V>> entrySet() {
HashMap<K, V> map = new HashMap<K, V>();
for (K k : keySet()) {
int hash = getHash(k);
map.put(k, find(k, hash).value);
}
return map.entrySet();
}
int sizeHot() { int sizeHot() {
return mapSize - queueSize - queue2Size; return mapSize - queueSize - queue2Size;
} }
...@@ -923,10 +841,6 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc ...@@ -923,10 +841,6 @@ public class CacheConcurrentLIRS<K, V> extends AbstractMap<K, V> implements Conc
this.maxMemory = maxMemory; this.maxMemory = maxMemory;
} }
long getMaxMemory() {
return maxMemory;
}
void setAverageMemory(int averageMemory) { void setAverageMemory(int averageMemory) {
if (averageMemory <= 0) { if (averageMemory <= 0) {
throw new IllegalArgumentException("Average memory must be larger than 0"); throw new IllegalArgumentException("Average memory must be larger than 0");
......
/* /*
* Copyright 2004-2011 H2 Group. Multiple-Licensed under the H2 License, * Copyright 2004-2012 H2 Group. Multiple-Licensed under the H2 License,
* Version 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group * Initial Developer: H2 Group
*/ */
package org.h2.dev.store.btree; package org.h2.dev.store.cache;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
...@@ -31,9 +31,11 @@ import java.util.Set; ...@@ -31,9 +31,11 @@ import java.util.Set;
* prevent unbound memory usage. The maximum size of this queue is at most the * prevent unbound memory usage. The maximum size of this queue is at most the
* size of the rest of the stack. About 6.25% of the mapped entries are cold. * size of the rest of the stack. About 6.25% of the mapped entries are cold.
* <p> * <p>
* Internally, the cache is split into 16 segments, and each segment is an * Internally, the cache is split into a number of segments, and each segment is
* individual LIRS cache. Accessed entries are only moved to the top of the * an individual LIRS cache.
* stack if at least 20 other entries have been moved to the front. Write access * <p>
* Accessed entries are only moved to the top of the stack if at least a number
* of other entries have been moved to the front (1% by default). Write access
* and moving entries to the top of the stack is synchronized per segment. * and moving entries to the top of the stack is synchronized per segment.
* *
* @author Thomas Mueller * @author Thomas Mueller
...@@ -74,8 +76,9 @@ public class CacheLongKeyLIRS<V> { ...@@ -74,8 +76,9 @@ public class CacheLongKeyLIRS<V> {
segmentMask = segmentCount - 1; segmentMask = segmentCount - 1;
segments = new Segment[segmentCount]; segments = new Segment[segmentCount];
for (int i = 0; i < segmentCount; i++) { for (int i = 0; i < segmentCount; i++) {
long max = Math.max(1, maxMemory / segmentCount);
segments[i] = new Segment<V>( segments[i] = new Segment<V>(
1 + maxMemory / segmentCount, averageMemory, stackMoveDistance); max, averageMemory, stackMoveDistance);
} }
segmentShift = Integer.numberOfTrailingZeros(segments[0].sizeMapArray()); segmentShift = Integer.numberOfTrailingZeros(segments[0].sizeMapArray());
} }
...@@ -266,12 +269,13 @@ public class CacheLongKeyLIRS<V> { ...@@ -266,12 +269,13 @@ public class CacheLongKeyLIRS<V> {
} }
/** /**
* Create a new cache with the given memory size. To just limit the number * Create a new cache with the given memory size.
* of entries, use the required number as the maximum memory, and an average
* size of 1.
* *
* @param maxMemory the maximum memory to use (1 or larger) * @param maxMemory the maximum memory to use (1 or larger)
* @param averageMemory the average memory (1 or larger) * @param averageMemory the average memory (1 or larger)
* @param segmentCount the number of cache segments (must be a power of 2)
* @param stackMoveDistance how many other item are to be moved to the top
* of the stack before the current item is moved
* @return the cache * @return the cache
*/ */
public static <V> CacheLongKeyLIRS<V> newInstance(int maxMemory, int averageMemory, public static <V> CacheLongKeyLIRS<V> newInstance(int maxMemory, int averageMemory,
...@@ -279,6 +283,32 @@ public class CacheLongKeyLIRS<V> { ...@@ -279,6 +283,32 @@ public class CacheLongKeyLIRS<V> {
return new CacheLongKeyLIRS<V>(maxMemory, averageMemory, segmentCount, stackMoveDistance); return new CacheLongKeyLIRS<V>(maxMemory, averageMemory, segmentCount, stackMoveDistance);
} }
/**
* Get the entry set for all resident entries.
*
* @return the entry set
*/
public synchronized Set<Map.Entry<Long, V>> entrySet() {
HashMap<Long, V> map = new HashMap<Long, V>();
for (long k : keySet()) {
map.put(k, find(k).value);
}
return map.entrySet();
}
/**
* Get the set of keys for resident entries.
*
* @return the set of keys
*/
public synchronized Set<Long> keySet() {
HashSet<Long> set = new HashSet<Long>();
for (Segment<V> s : segments) {
set.addAll(s.keySet());
}
return set;
}
/** /**
* Get the number of non-resident entries in the cache. * Get the number of non-resident entries in the cache.
* *
...@@ -306,16 +336,45 @@ public class CacheLongKeyLIRS<V> { ...@@ -306,16 +336,45 @@ public class CacheLongKeyLIRS<V> {
} }
/** /**
* Get the entry set for all resident entries. * Get the number of hot entries in the cache.
* *
* @return the entry set * @return the number of hot entries
*/ */
public Set<Long> keySet() { public int sizeHot() {
HashSet<Long> set = new HashSet<Long>(); int x = 0;
for (Segment<V> s : segments) { for (Segment<V> s : segments) {
set.addAll(s.keySet()); x += s.sizeHot();
} }
return set; return x;
}
/**
* Get the number of resident entries.
*
* @return the number of entries
*/
public int size() {
int x = 0;
for (Segment<V> s : segments) {
x += s.size();
}
return x;
}
/**
* Get the list of keys. This method allows to read the internal state of
* the cache.
*
* @param cold if true, only keys for the cold entries are returned
* @param nonResident true for non-resident entries
* @return the key list
*/
public synchronized List<Long> keys(boolean cold, boolean nonResident) {
ArrayList<Long> keys = new ArrayList<Long>();
for (Segment<V> s : segments) {
keys.addAll(s.keys(cold, nonResident));
}
return keys;
} }
/** /**
...@@ -338,15 +397,6 @@ public class CacheLongKeyLIRS<V> { ...@@ -338,15 +397,6 @@ public class CacheLongKeyLIRS<V> {
return size() == 0; return size() == 0;
} }
/**
* Get the entry set for all resident entries.
*
* @return the entry set
*/
public Set<Map.Entry<Long, V>> entrySet() {
return getMap().entrySet();
}
public boolean containsValue(Object value) { public boolean containsValue(Object value) {
return getMap().containsValue(value); return getMap().containsValue(value);
} }
...@@ -362,32 +412,6 @@ public class CacheLongKeyLIRS<V> { ...@@ -362,32 +412,6 @@ public class CacheLongKeyLIRS<V> {
return map; return map;
} }
/**
* Get the number of hot entries in the cache.
*
* @return the number of hot entries
*/
public int sizeHot() {
int x = 0;
for (Segment<V> s : segments) {
x += s.sizeHot();
}
return x;
}
/**
* Get the number of resident entries.
*
* @return the number of entries
*/
public int size() {
int x = 0;
for (Segment<V> s : segments) {
x += s.size();
}
return x;
}
public void putAll(Map<Long, ? extends V> m) { public void putAll(Map<Long, ? extends V> m) {
for (Map.Entry<Long, ? extends V> e : m.entrySet()) { for (Map.Entry<Long, ? extends V> e : m.entrySet()) {
// copy only non-null entries // copy only non-null entries
...@@ -395,22 +419,6 @@ public class CacheLongKeyLIRS<V> { ...@@ -395,22 +419,6 @@ public class CacheLongKeyLIRS<V> {
} }
} }
/**
* Get the list of keys. This method allows to read the internal state of
* the cache.
*
* @param cold if true, only keys for the cold entries are returned
* @param nonResident true for non-resident entries
* @return the key list
*/
public synchronized List<Long> keys(boolean cold, boolean nonResident) {
ArrayList<Long> keys = new ArrayList<Long>();
for (Segment<V> s : segments) {
keys.addAll(s.keys(cold, nonResident));
}
return keys;
}
/** /**
* A cache segment * A cache segment
* *
...@@ -838,7 +846,7 @@ public class CacheLongKeyLIRS<V> { ...@@ -838,7 +846,7 @@ public class CacheLongKeyLIRS<V> {
return e != null && e.value != null; return e != null && e.value != null;
} }
Set<Long> keySet() { synchronized Set<Long> keySet() {
HashSet<Long> set = new HashSet<Long>(); HashSet<Long> set = new HashSet<Long>();
for (Entry<V> e = stack.stackNext; e != stack; e = e.stackNext) { for (Entry<V> e = stack.stackNext; e != stack; e = e.stackNext) {
set.add(e.key); set.add(e.key);
...@@ -872,10 +880,6 @@ public class CacheLongKeyLIRS<V> { ...@@ -872,10 +880,6 @@ public class CacheLongKeyLIRS<V> {
this.maxMemory = maxMemory; this.maxMemory = maxMemory;
} }
long getMaxMemory() {
return maxMemory;
}
void setAverageMemory(int averageMemory) { void setAverageMemory(int averageMemory) {
if (averageMemory <= 0) { if (averageMemory <= 0) {
throw new IllegalArgumentException("Average memory must be larger than 0"); throw new IllegalArgumentException("Average memory must be larger than 0");
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论