提交 1a3c5bc5 authored 作者: Thomas Mueller's avatar Thomas Mueller

The LIRS cache now resizes the table automatically in all cases and no longer…

The LIRS cache now resizes the table automatically in all cases and no longer needs the averageMemory configuration.
上级 0e5a7ce9
......@@ -35,8 +35,9 @@ import org.h2.mvstore.DataUtils;
* an individual LIRS cache.
* <p>
* Accessed entries are only moved to the top of the stack if at least a number
* of other entries have been moved to the front (1% by default). Write access
* and moving entries to the top of the stack is synchronized per segment.
* of other entries have been moved to the front (8 per segment by default).
* Write access and moving entries to the top of the stack is synchronized per
* segment.
*
* @author Thomas Mueller
* @param <V> the value type
......@@ -48,11 +49,6 @@ public class CacheLongKeyLIRS<V> {
*/
private long maxMemory;
/**
* The average memory used by one entry.
*/
private int averageMemory;
private final Segment<V>[] segments;
private final int segmentCount;
......@@ -62,29 +58,26 @@ public class CacheLongKeyLIRS<V> {
/**
* Create a new cache with the given number of entries, and the default
* settings (an average size of 1 per entry, 16 segments, and stack move
* distance equals to the maximum number of entries divided by 100).
* settings (16 segments, and stack move distance of 8.
*
* @param maxEntries the maximum number of entries
* @param maxMemory the maximum memory to use (1 or larger)
*/
public CacheLongKeyLIRS(int maxEntries) {
this(maxEntries, 1, 16, maxEntries / 100);
public CacheLongKeyLIRS(int maxMemory) {
this(maxMemory, 16, 8);
}
/**
* Create a new cache with the given memory size.
*
* @param maxMemory the maximum memory to use (1 or larger)
* @param averageMemory the average memory (1 or larger)
* @param segmentCount the number of cache segments (must be a power of 2)
* @param stackMoveDistance how many other item are to be moved to the top
* of the stack before the current item is moved
*/
@SuppressWarnings("unchecked")
public CacheLongKeyLIRS(long maxMemory, int averageMemory,
public CacheLongKeyLIRS(long maxMemory,
int segmentCount, int stackMoveDistance) {
setMaxMemory(maxMemory);
setAverageMemory(averageMemory);
DataUtils.checkArgument(
Integer.bitCount(segmentCount) == 1,
"The segment count must be a power of 2, is {0}", segmentCount);
......@@ -102,24 +95,10 @@ public class CacheLongKeyLIRS<V> {
*/
public void clear() {
long max = Math.max(1, maxMemory / segmentCount);
int segmentLen = getSegmentLen(max);
for (int i = 0; i < segmentCount; i++) {
segments[i] = new Segment<V>(
max, segmentLen, stackMoveDistance);
}
}
private int getSegmentLen(long max) {
// calculate the size of the map array
// assume a fill factor of at most 75%
long maxLen = (long) (max / averageMemory / 0.75);
// the size needs to be a power of 2
long l = 8;
while (l < maxLen) {
l += l;
max, stackMoveDistance, 8);
}
// the array size is at most 2^31 elements
return (int) Math.min(1L << 31, l);
}
private Entry<V> find(long key) {
......@@ -176,32 +155,39 @@ public class CacheLongKeyLIRS<V> {
int hash = getHash(key);
int segmentIndex = getSegmentIndex(hash);
Segment<V> s = segments[segmentIndex];
// check whether resize is required:
// synchronize on s, to avoid concurrent writes also
// resize (concurrent reads read from the old segment)
// check whether resize is required: synchronize on s, to avoid
// concurrent resizes (concurrent reads read
// from the old segment)
synchronized (s) {
if (s.isFull()) {
// another thread might have resized
// (as we retrieved the segment before synchronizing on it)
s = segments[segmentIndex];
if (s.isFull()) {
s = new Segment<V>(s, 2);
segments[segmentIndex] = s;
}
}
s = resizeIfNeeded(s, segmentIndex);
return s.put(key, hash, value, memory);
}
}
private Segment<V> resizeIfNeeded(Segment<V> s, int segmentIndex) {
int newLen = s.getNewMapLen();
if (newLen == 0) {
return s;
}
// another thread might have resized
// (as we retrieved the segment before synchronizing on it)
Segment<V> s2 = segments[segmentIndex];
if (s == s2) {
// no other thread resized, so we do
s = new Segment<V>(s, newLen);
segments[segmentIndex] = s;
}
return s;
}
/**
* Get the size of the given value. The default implementation returns the
* average memory as configured for this cache.
* Get the size of the given value. The default implementation returns 1.
*
* @param value the value
* @return the size
*/
protected int sizeOf(V value) {
return averageMemory;
return 1;
}
/**
......@@ -213,7 +199,15 @@ public class CacheLongKeyLIRS<V> {
*/
public V remove(long key) {
int hash = getHash(key);
return getSegment(hash).remove(key, hash);
int segmentIndex = getSegmentIndex(hash);
Segment<V> s = segments[segmentIndex];
// check whether resize is required: synchronize on s, to avoid
// concurrent resizes (concurrent reads read
// from the old segment)
synchronized (s) {
s = resizeIfNeeded(s, segmentIndex);
return s.remove(key, hash);
}
}
/**
......@@ -298,28 +292,6 @@ public class CacheLongKeyLIRS<V> {
}
}
/**
* Set the average memory used per entry. It is used to calculate the
* length of the internal array.
*
* @param averageMemory the average memory used (1 or larger)
*/
public void setAverageMemory(int averageMemory) {
DataUtils.checkArgument(
averageMemory > 0,
"Average memory must be larger than 0, is {0}", averageMemory);
this.averageMemory = averageMemory;
}
/**
* Get the average memory used per entry.
*
* @return the average memory
*/
public int getAverageMemory() {
return averageMemory;
}
/**
* Get the maximum memory to use.
*
......@@ -347,7 +319,7 @@ public class CacheLongKeyLIRS<V> {
*
* @return the set of keys
*/
public synchronized Set<Long> keySet() {
public Set<Long> keySet() {
HashSet<Long> set = new HashSet<Long>();
for (Segment<V> s : segments) {
set.addAll(s.keySet());
......@@ -415,7 +387,7 @@ public class CacheLongKeyLIRS<V> {
* @param nonResident true for non-resident entries
* @return the key list
*/
public synchronized List<Long> keys(boolean cold, boolean nonResident) {
public List<Long> keys(boolean cold, boolean nonResident) {
ArrayList<Long> keys = new ArrayList<Long>();
for (Segment<V> s : segments) {
keys.addAll(s.keys(cold, nonResident));
......@@ -533,7 +505,7 @@ public class CacheLongKeyLIRS<V> {
* The bit mask that is applied to the key hash code to get the index in
* the map array. The mask is the length of the array minus one.
*/
private int mask;
private final int mask;
/**
* The LIRS stack size.
......@@ -542,26 +514,27 @@ public class CacheLongKeyLIRS<V> {
/**
* The stack of recently referenced elements. This includes all hot
* entries, the recently referenced cold entries, and all non-resident
* cold entries.
* entries, and the recently referenced cold entries. Resident cold
* entries that were not recently referenced, as well as non-resident
* cold entries, are not in the stack.
* <p>
* There is always at least one entry: the head entry.
*/
private Entry<V> stack;
private final Entry<V> stack;
/**
* The queue of resident cold entries.
* <p>
* There is always at least one entry: the head entry.
*/
private Entry<V> queue;
private final Entry<V> queue;
/**
* The queue of non-resident cold entries.
* <p>
* There is always at least one entry: the head entry.
*/
private Entry<V> queue2;
private final Entry<V> queue2;
/**
* The number of times any item was moved to the top of the stack.
......@@ -572,11 +545,11 @@ public class CacheLongKeyLIRS<V> {
* Create a new cache segment.
*
* @param maxMemory the maximum memory to use
* @param len the number of hash table buckets (must be a power of 2)
* @param stackMoveDistance the number of other entries to be moved to
* the top of the stack before moving an entry to the top
* @param len the number of hash table buckets (must be a power of 2)
*/
Segment(long maxMemory, int len, int stackMoveDistance) {
Segment(long maxMemory, int stackMoveDistance, int len) {
setMaxMemory(maxMemory);
this.stackMoveDistance = stackMoveDistance;
......@@ -594,25 +567,18 @@ public class CacheLongKeyLIRS<V> {
@SuppressWarnings("unchecked")
Entry<V>[] e = new Entry[len];
entries = e;
mapSize = 0;
usedMemory = 0;
stackSize = queueSize = queue2Size = 0;
}
/**
* Create a new, larger cache segment from an existing one.
* Create a new cache segment from an existing one.
* The caller must synchronize on the old segment, to avoid
* concurrent modifications.
*
* @param old the old segment
* @param resizeFactor the factor to use to calculate the number of hash
* table buckets (must be a power of 2)
* @param len the number of hash table buckets (must be a power of 2)
*/
Segment(Segment<V> old, int resizeFactor) {
this(old.maxMemory,
old.entries.length * resizeFactor,
old.stackMoveDistance);
Segment(Segment<V> old, int len) {
this(old.maxMemory, old.stackMoveDistance, len);
Entry<V> s = old.stack.stackPrev;
while (s != old.stack) {
Entry<V> e = copy(s);
......@@ -641,6 +607,24 @@ public class CacheLongKeyLIRS<V> {
s = s.queuePrev;
}
}
/**
* Calculate the new number of hash table buckets if the internal map
* should be re-sized.
*
* @return 0 if no resizing is needed, or the new length
*/
int getNewMapLen() {
int len = mask + 1;
if (len * 3 < mapSize * 4 && len < (1 << 28)) {
// more than 75% usage
return len * 2;
} else if (len > 32 && len / 8 > mapSize) {
// less than 12% usage
return len / 2;
}
return 0;
}
private void addToMap(Entry<V> e) {
int index = getHash(e.key) & mask;
......@@ -659,15 +643,6 @@ public class CacheLongKeyLIRS<V> {
return e;
}
/**
* Check whether the cache segment is full.
*
* @return true if it contains more entries than hash table buckets.
*/
public boolean isFull() {
return mapSize > mask;
}
/**
* Get the memory used for the given key.
*
......
......@@ -58,7 +58,8 @@ public class TestCacheLIRS extends TestBase {
switch (r.nextInt(3)) {
case 0:
int memory = r.nextInt(5) + 1;
buff.append("add ").append(key).append(' ').append(memory).append('\n');
buff.append("add ").append(key).append(' ').
append(memory).append('\n');
test.put(key, j, memory);
break;
case 1:
......@@ -78,13 +79,13 @@ public class TestCacheLIRS extends TestBase {
test.put(1, 10, 100);
assertEquals(10, test.get(1).intValue());
try {
test.put(null, 10, 100);
test.put(null, 10, 100);
fail();
} catch (NullPointerException e) {
// expected
}
try {
test.put(1, null, 100);
test.put(1, null, 100);
fail();
} catch (NullPointerException e) {
// expected
......@@ -95,12 +96,6 @@ public class TestCacheLIRS extends TestBase {
} catch (IllegalArgumentException e) {
// expected
}
try {
test.setAverageMemory(0);
fail();
} catch (IllegalArgumentException e) {
// expected
}
}
private void testSize() {
......@@ -114,17 +109,6 @@ public class TestCacheLIRS extends TestBase {
verifyMapSize(769, 2048);
CacheLIRS<Integer, Integer> test;
test = createCache(3, 10);
test.put(0, 0, 9);
test.put(1, 10, 9);
test.put(2, 20, 9);
test.put(3, 30, 9);
test.put(4, 40, 9);
test = createCache(1, 1);
test.put(1, 10);
test.put(0, 0);
test.get(0);
test = createCache(1000);
for (int j = 0; j < 2000; j++) {
......@@ -141,11 +125,22 @@ public class TestCacheLIRS extends TestBase {
private void verifyMapSize(int elements, int expectedMapSize) {
CacheLIRS<Integer, Integer> test;
test = createCache(elements - 1);
assertTrue(test.sizeMapArray() < expectedMapSize);
for (int i = 0; i < elements - 1; i++) {
test.put(i, i * 10);
}
assertTrue(test.sizeMapArray() + "<" + expectedMapSize,
test.sizeMapArray() < expectedMapSize);
test = createCache(elements);
for (int i = 0; i < elements + 1; i++) {
test.put(i, i * 10);
}
assertEquals(expectedMapSize, test.sizeMapArray());
test = createCache(elements * 100, 100);
assertEquals(expectedMapSize, test.sizeMapArray());
test = createCache(elements * 2);
for (int i = 0; i < elements * 2; i++) {
test.put(i, i * 10);
}
assertTrue(test.sizeMapArray() + ">" + expectedMapSize,
test.sizeMapArray() > expectedMapSize);
}
private void testGetPutPeekRemove() {
......@@ -285,7 +280,7 @@ public class TestCacheLIRS extends TestBase {
}
private void testClear() {
CacheLIRS<Integer, Integer> test = createCache(40, 10);
CacheLIRS<Integer, Integer> test = createCache(40);
for (int i = 0; i < 5; i++) {
test.put(i, 10 * i, 9);
}
......@@ -301,7 +296,6 @@ public class TestCacheLIRS extends TestBase {
assertTrue(x >= 1 && x <= 4);
}
assertEquals(40, test.getMaxMemory());
assertEquals(10, test.getAverageMemory());
assertEquals(36, test.getUsedMemory());
assertEquals(4, test.size());
assertEquals(3, test.sizeHot());
......@@ -312,20 +306,15 @@ public class TestCacheLIRS extends TestBase {
test.setMaxMemory(10);
assertEquals(10, test.getMaxMemory());
test.setMaxMemory(40);
test.setAverageMemory(1);
assertEquals(1, test.getAverageMemory());
test.setAverageMemory(10);
verify(test, "mem: 36 stack: 4 3 2 1 cold: 4 non-resident: 0");
// putAll uses the average memory
test.putAll(test);
verify(test, "mem: 40 stack: 4 3 2 1 cold: non-resident: 0");
verify(test, "mem: 4 stack: 4 3 2 1 cold: non-resident: 0");
test.clear();
verify(test, "mem: 0 stack: cold: non-resident:");
assertEquals(40, test.getMaxMemory());
assertEquals(10, test.getAverageMemory());
assertEquals(0, test.getUsedMemory());
assertEquals(0, test.size());
assertEquals(0, test.sizeHot());
......@@ -348,7 +337,8 @@ public class TestCacheLIRS extends TestBase {
for (int i = 0; i < 20; i++) {
test.put(i, 10 * i);
}
verify(test, "mem: 4 stack: 19 18 17 16 3 2 1 cold: 19 non-resident: 18 17 16");
verify(test, "mem: 4 stack: 19 18 17 16 3 2 1 " +
"cold: 19 non-resident: 18 17 16");
}
private void testBadHashMethod() {
......@@ -559,13 +549,8 @@ public class TestCacheLIRS extends TestBase {
}
}
private static <K, V> CacheLIRS<K, V> createCache(int maxElements) {
return createCache(maxElements, 1);
}
private static <K, V> CacheLIRS<K, V> createCache(int maxSize,
int averageSize) {
return new CacheLIRS<K, V>(maxSize, averageSize, 1, 0);
private static <K, V> CacheLIRS<K, V> createCache(int maxSize) {
return new CacheLIRS<K, V>(maxSize, 1, 0);
}
}
......@@ -10,6 +10,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Random;
import org.h2.mvstore.cache.CacheLongKeyLIRS;
import org.h2.test.TestBase;
import org.h2.util.New;
......@@ -34,7 +35,6 @@ public class TestCacheLongKeyLIRS extends TestBase {
}
private void testCache() {
testResize();
testRandomSmallCache();
testEdgeCases();
testSize();
......@@ -47,21 +47,6 @@ public class TestCacheLongKeyLIRS extends TestBase {
testRandomOperations();
}
private void testResize() {
// cache with 100 memory, average memory 10
// (that means 10 entries)
CacheLongKeyLIRS<Integer> t1 =
new CacheLongKeyLIRS<Integer>(100, 10, 1, 0);
// another cache with more entries
CacheLongKeyLIRS<Integer> t2 =
new CacheLongKeyLIRS<Integer>(100, 1, 1, 0);
for (int i = 0; i < 200; i++) {
t1.put(i, i, 1);
t2.put(i, i, 1);
}
assertEquals(toString(t2), toString(t1));
}
private static void testRandomSmallCache() {
Random r = new Random(1);
for (int i = 0; i < 10000; i++) {
......@@ -94,7 +79,7 @@ public class TestCacheLongKeyLIRS extends TestBase {
test.put(1, 10, 100);
assertEquals(10, test.get(1).intValue());
try {
test.put(1, null, 100);
test.put(1, null, 100);
fail();
} catch (IllegalArgumentException e) {
// expected
......@@ -105,12 +90,6 @@ public class TestCacheLongKeyLIRS extends TestBase {
} catch (IllegalArgumentException e) {
// expected
}
try {
test.setAverageMemory(0);
fail();
} catch (IllegalArgumentException e) {
// expected
}
}
private void testSize() {
......@@ -124,17 +103,6 @@ public class TestCacheLongKeyLIRS extends TestBase {
verifyMapSize(769, 2048);
CacheLongKeyLIRS<Integer> test;
test = createCache(3, 10);
test.put(0, 0, 9);
test.put(1, 10, 9);
test.put(2, 20, 9);
test.put(3, 30, 9);
test.put(4, 40, 9);
test = createCache(1, 1);
test.put(1, 10);
test.put(0, 0);
test.get(0);
test = createCache(1000);
for (int j = 0; j < 2000; j++) {
......@@ -142,20 +110,31 @@ public class TestCacheLongKeyLIRS extends TestBase {
}
// for a cache of size 1000,
// there are 62 cold entries (about 6.25%).
// assertEquals(62, test.size() - test.sizeHot());
assertEquals(62, test.size() - test.sizeHot());
// at most as many non-resident elements
// as there are entries in the stack
// assertEquals(968, test.sizeNonResident());
assertEquals(968, test.sizeNonResident());
}
private void verifyMapSize(int elements, int expectedMapSize) {
CacheLongKeyLIRS<Integer> test;
test = createCache(elements - 1);
assertTrue(test.sizeMapArray() < expectedMapSize);
for (int i = 0; i < elements - 1; i++) {
test.put(i, i * 10);
}
assertTrue(test.sizeMapArray() + "<" + expectedMapSize,
test.sizeMapArray() < expectedMapSize);
test = createCache(elements);
for (int i = 0; i < elements + 1; i++) {
test.put(i, i * 10);
}
assertEquals(expectedMapSize, test.sizeMapArray());
test = createCache(elements * 100, 100);
assertEquals(expectedMapSize, test.sizeMapArray());
test = createCache(elements * 2);
for (int i = 0; i < elements * 2; i++) {
test.put(i, i * 10);
}
assertTrue(test.sizeMapArray() + ">" + expectedMapSize,
test.sizeMapArray() > expectedMapSize);
}
private void testGetPutPeekRemove() {
......@@ -295,7 +274,7 @@ public class TestCacheLongKeyLIRS extends TestBase {
}
private void testClear() {
CacheLongKeyLIRS<Integer> test = createCache(40, 10);
CacheLongKeyLIRS<Integer> test = createCache(40);
for (int i = 0; i < 5; i++) {
test.put(i, 10 * i, 9);
}
......@@ -311,7 +290,6 @@ public class TestCacheLongKeyLIRS extends TestBase {
assertTrue(x >= 1 && x <= 4);
}
assertEquals(40, test.getMaxMemory());
assertEquals(10, test.getAverageMemory());
assertEquals(36, test.getUsedMemory());
assertEquals(4, test.size());
assertEquals(3, test.sizeHot());
......@@ -322,20 +300,15 @@ public class TestCacheLongKeyLIRS extends TestBase {
test.setMaxMemory(10);
assertEquals(10, test.getMaxMemory());
test.setMaxMemory(40);
test.setAverageMemory(1);
assertEquals(1, test.getAverageMemory());
test.setAverageMemory(10);
verify(test, "mem: 36 stack: 4 3 2 1 cold: 4 non-resident: 0");
// putAll uses the average memory
test.putAll(test.getMap());
verify(test, "mem: 40 stack: 4 3 2 1 cold: non-resident: 0");
verify(test, "mem: 4 stack: 4 3 2 1 cold: non-resident: 0");
test.clear();
verify(test, "mem: 0 stack: cold: non-resident:");
assertEquals(40, test.getMaxMemory());
assertEquals(10, test.getAverageMemory());
assertEquals(0, test.getUsedMemory());
assertEquals(0, test.size());
assertEquals(0, test.sizeHot());
......@@ -504,13 +477,8 @@ public class TestCacheLongKeyLIRS extends TestBase {
}
}
private static <V> CacheLongKeyLIRS<V> createCache(int maxElements) {
return createCache(maxElements, 1);
}
private static <V> CacheLongKeyLIRS<V> createCache(int maxSize,
int averageSize) {
return new CacheLongKeyLIRS<V>(maxSize, averageSize, 1, 0);
private static <V> CacheLongKeyLIRS<V> createCache(int maxSize) {
return new CacheLongKeyLIRS<V>(maxSize, 1, 0);
}
}
......@@ -35,9 +35,10 @@ import java.util.Set;
* an individual LIRS cache.
* <p>
* Accessed entries are only moved to the top of the stack if at least a number
* of other entries have been moved to the front (1% by default). Write access
* and moving entries to the top of the stack is synchronized per segment.
*
* of other entries have been moved to the front (8 per segment by default).
* Write access and moving entries to the top of the stack is synchronized per
* segment.
*
* @author Thomas Mueller
* @param <K> the key type
* @param <V> the value type
......@@ -49,11 +50,6 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
*/
private long maxMemory;
/**
* The average memory used by one entry.
*/
private int averageMemory;
private final Segment<K, V>[] segments;
private final int segmentCount;
......@@ -64,29 +60,26 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
/**
* Create a new cache with the given number of entries, and the default
* settings (an average size of 1 per entry, 16 segments, and stack move
* distance equals to the maximum number of entries divided by 100).
* settings (16 segments, and stack move distance of 8.
*
* @param maxEntries the maximum number of entries
* @param maxMemory the maximum memory to use (1 or larger)
*/
public CacheLIRS(int maxEntries) {
this(maxEntries, 1, 16, maxEntries / 100);
public CacheLIRS(int maxMemory) {
this(maxMemory, 16, 8);
}
/**
* Create a new cache with the given memory size.
*
* @param maxMemory the maximum memory to use (1 or larger)
* @param averageMemory the average memory (1 or larger)
* @param segmentCount the number of cache segments (must be a power of 2)
* @param stackMoveDistance how many other item are to be moved to the top
* of the stack before the current item is moved
*/
@SuppressWarnings("unchecked")
public CacheLIRS(long maxMemory, int averageMemory, int segmentCount,
public CacheLIRS(long maxMemory, int segmentCount,
int stackMoveDistance) {
setMaxMemory(maxMemory);
setAverageMemory(averageMemory);
if (Integer.bitCount(segmentCount) != 1) {
throw new IllegalArgumentException(
"The segment count must be a power of 2, is "
......@@ -97,7 +90,8 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
this.stackMoveDistance = stackMoveDistance;
segments = new Segment[segmentCount];
clear();
this.segmentShift = Integer.numberOfTrailingZeros(segments[0].entries.length);
// use the high bits for the segment
this.segmentShift = 32 - Integer.bitCount(segmentMask);
}
/**
......@@ -108,7 +102,7 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
long max = Math.max(1, maxMemory / segmentCount);
for (int i = 0; i < segmentCount; i++) {
segments[i] = new Segment<K, V>(
this, max, averageMemory, stackMoveDistance);
this, max, stackMoveDistance, 8);
}
}
......@@ -154,11 +148,35 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
*/
public V put(K key, V value, int memory) {
int hash = getHash(key);
return getSegment(hash).put(key, hash, value, memory);
int segmentIndex = getSegmentIndex(hash);
Segment<K, V> s = segments[segmentIndex];
// check whether resize is required: synchronize on s, to avoid
// concurrent resizes (concurrent reads read
// from the old segment)
synchronized (s) {
s = resizeIfNeeded(s, segmentIndex);
return s.put(key, hash, value, memory);
}
}
private Segment<K, V> resizeIfNeeded(Segment<K, V> s, int segmentIndex) {
int newLen = s.getNewMapLen();
if (newLen == 0) {
return s;
}
// another thread might have resized
// (as we retrieved the segment before synchronizing on it)
Segment<K, V> s2 = segments[segmentIndex];
if (s == s2) {
// no other thread resized, so we do
s = new Segment<K, V>(s, newLen);
segments[segmentIndex] = s;
}
return s;
}
/**
* Add an entry to the cache using the average memory size.
* Add an entry to the cache using a memory size of 1.
*
* @param key the key (may not be null)
* @param value the value (may not be null)
......@@ -170,15 +188,14 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
}
/**
* Get the size of the given value. The default implementation returns the
* average memory as configured for this cache.
* Get the size of the given value. The default implementation returns 1.
*
* @param key the key
* @param value the value
* @return the size
*/
protected int sizeOf(K key, V value) {
return averageMemory;
return 1;
}
/**
......@@ -199,9 +216,17 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
* @return the old value, or null if there was no resident entry
*/
@Override
public synchronized V remove(Object key) {
public V remove(Object key) {
int hash = getHash(key);
return getSegment(hash).remove(key, hash);
int segmentIndex = getSegmentIndex(hash);
Segment<K, V> s = segments[segmentIndex];
// check whether resize is required: synchronize on s, to avoid
// concurrent resizes (concurrent reads read
// from the old segment)
synchronized (s) {
s = resizeIfNeeded(s, segmentIndex);
return s.remove(key, hash);
}
}
/**
......@@ -230,8 +255,11 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
}
private Segment<K, V> getSegment(int hash) {
int segmentIndex = (hash >>> segmentShift) & segmentMask;
return segments[segmentIndex];
return segments[getSegmentIndex(hash)];
}
private int getSegmentIndex(int hash) {
return (hash >>> segmentShift) & segmentMask;
}
/**
......@@ -284,33 +312,6 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
}
}
/**
* Set the average memory used per entry. It is used to calculate the
* length of the internal array.
*
* @param averageMemory the average memory used (1 or larger)
*/
public void setAverageMemory(int averageMemory) {
if (averageMemory <= 0) {
throw new IllegalArgumentException("Average memory must be larger than 0");
}
this.averageMemory = averageMemory;
if (segments != null) {
for (Segment<K, V> s : segments) {
s.setAverageMemory(averageMemory);
}
}
}
/**
* Get the average memory used per entry.
*
* @return the average memory
*/
public int getAverageMemory() {
return averageMemory;
}
/**
* Get the maximum memory to use.
*
......@@ -326,7 +327,7 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
* @return the entry set
*/
@Override
public synchronized Set<Map.Entry<K, V>> entrySet() {
public Set<Map.Entry<K, V>> entrySet() {
HashMap<K, V> map = new HashMap<K, V>();
for (K k : keySet()) {
map.put(k, find(k).value);
......@@ -340,7 +341,7 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
* @return the set of keys
*/
@Override
public synchronized Set<K> keySet() {
public Set<K> keySet() {
HashSet<K> set = new HashSet<K>();
for (Segment<K, V> s : segments) {
set.addAll(s.keySet());
......@@ -409,7 +410,7 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
* @param nonResident true for non-resident entries
* @return the key list
*/
public synchronized List<K> keys(boolean cold, boolean nonResident) {
public List<K> keys(boolean cold, boolean nonResident) {
ArrayList<K> keys = new ArrayList<K>();
for (Segment<K, V> s : segments) {
keys.addAll(s.keys(cold, nonResident));
......@@ -423,8 +424,8 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
* @param <K> the key type
* @param <V> the value type
*/
static class Segment<K, V> {
private static class Segment<K, V> {
/**
* The number of (hot, cold, and non-resident) entries in the map.
*/
......@@ -443,7 +444,7 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
/**
* The map array. The size is always a power of 2.
*/
Entry<K, V>[] entries;
final Entry<K, V>[] entries;
/**
* The currently used memory.
......@@ -463,16 +464,11 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
*/
private long maxMemory;
/**
* The average memory used by one entry.
*/
private int averageMemory;
/**
* The bit mask that is applied to the key hash code to get the index in
* the map array. The mask is the length of the array minus one.
*/
private int mask;
private final int mask;
/**
* The LIRS stack size.
......@@ -481,26 +477,27 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
/**
* The stack of recently referenced elements. This includes all hot
* entries, the recently referenced cold entries, and all non-resident
* cold entries.
* entries, and the recently referenced cold entries. Resident cold
* entries that were not recently referenced, as well as non-resident
* cold entries, are not in the stack.
* <p>
* There is always at least one entry: the head entry.
*/
private Entry<K, V> stack;
private final Entry<K, V> stack;
/**
* The queue of resident cold entries.
* <p>
* There is always at least one entry: the head entry.
*/
private Entry<K, V> queue;
private final Entry<K, V> queue;
/**
* The queue of non-resident cold entries.
* <p>
* There is always at least one entry: the head entry.
*/
private Entry<K, V> queue2;
private final Entry<K, V> queue2;
/**
* The number of times any item was moved to the top of the stack.
......@@ -508,35 +505,20 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
private int stackMoveCounter;
/**
* Create a new cache.
* Create a new cache segment.
*
* @param cache the cache
* @param maxMemory the maximum memory to use
* @param averageMemory the average memory usage of an object
* @param stackMoveDistance the number of other entries to be moved to
* the top of the stack before moving an entry to the top
* @param len the number of hash table buckets (must be a power of 2)
*/
Segment(CacheLIRS<K, V> cache, long maxMemory, int averageMemory,
int stackMoveDistance) {
Segment(CacheLIRS<K, V> cache, long maxMemory,
int stackMoveDistance, int len) {
this.cache = cache;
setMaxMemory(maxMemory);
setAverageMemory(averageMemory);
this.stackMoveDistance = stackMoveDistance;
clear();
}
private void clear() {
// calculate the size of the map array
// assume a fill factor of at most 80%
long maxLen = (long) (maxMemory / averageMemory / 0.75);
// the size needs to be a power of 2
long l = 8;
while (l < maxLen) {
l += l;
}
// the array size is at most 2^31 elements
int len = (int) Math.min(1L << 31, l);
// the bit mask has all bits set
mask = len - 1;
......@@ -548,15 +530,83 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
queue2 = new Entry<K, V>();
queue2.queuePrev = queue2.queueNext = queue2;
// first set to null - avoiding out of memory
entries = null;
@SuppressWarnings("unchecked")
Entry<K, V>[] e = new Entry[len];
entries = e;
}
mapSize = 0;
usedMemory = 0;
stackSize = queueSize = queue2Size = 0;
/**
* Create a new cache segment from an existing one.
* The caller must synchronize on the old segment, to avoid
* concurrent modifications.
*
* @param old the old segment
* @param len the number of hash table buckets (must be a power of 2)
*/
Segment(Segment<K, V> old, int len) {
this(old.cache, old.maxMemory, old.stackMoveDistance, len);
Entry<K, V> s = old.stack.stackPrev;
while (s != old.stack) {
Entry<K, V> e = copy(s);
addToMap(e);
addToStack(e);
s = s.stackPrev;
}
s = old.queue.queuePrev;
while (s != old.queue) {
Entry<K, V> e = find(s.key, getHash(s.key));
if (e == null) {
e = copy(s);
addToMap(e);
}
addToQueue(queue, e);
s = s.queuePrev;
}
s = old.queue2.queuePrev;
while (s != old.queue2) {
Entry<K, V> e = find(s.key, getHash(s.key));
if (e == null) {
e = copy(s);
addToMap(e);
}
addToQueue(queue2, e);
s = s.queuePrev;
}
}
/**
* Calculate the new number of hash table buckets if the internal map
* should be re-sized.
*
* @return 0 if no resizing is needed, or the new length
*/
int getNewMapLen() {
int len = mask + 1;
if (len * 3 < mapSize * 4 && len < (1 << 28)) {
// more than 75% usage
return len * 2;
} else if (len > 32 && len / 8 > mapSize) {
// less than 12% usage
return len / 2;
}
return 0;
}
private void addToMap(Entry<K, V> e) {
int index = getHash(e.key) & mask;
e.mapNext = entries[index];
entries[index] = e;
usedMemory += e.memory;
mapSize++;
}
private static <K, V> Entry<K, V> copy(Entry<K, V> old) {
Entry<K, V> e = new Entry<K, V>();
e.key = old.key;
e.value = old.value;
e.memory = old.memory;
e.topMove = old.topMove;
return e;
}
/**
......@@ -593,8 +643,8 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
}
if (e.isHot()) {
if (e != stack.stackNext) {
if (stackMoveDistance == 0
|| stackMoveCounter - e.topMove > stackMoveDistance) {
if (stackMoveDistance == 0 ||
stackMoveCounter - e.topMove > stackMoveDistance) {
access(key, hash);
}
}
......@@ -617,8 +667,8 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
}
if (e.isHot()) {
if (e != stack.stackNext) {
if (stackMoveDistance == 0
|| stackMoveCounter - e.topMove > stackMoveDistance) {
if (stackMoveDistance == 0 ||
stackMoveCounter - e.topMove > stackMoveDistance) {
// move a hot entry to the top of the stack
// unless it is already there
boolean wasEnd = e == stack.stackPrev;
......@@ -894,11 +944,13 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
ArrayList<K> keys = new ArrayList<K>();
if (cold) {
Entry<K, V> start = nonResident ? queue2 : queue;
for (Entry<K, V> e = start.queueNext; e != start; e = e.queueNext) {
for (Entry<K, V> e = start.queueNext; e != start;
e = e.queueNext) {
keys.add(e.key);
}
} else {
for (Entry<K, V> e = stack.stackNext; e != stack; e = e.stackNext) {
for (Entry<K, V> e = stack.stackNext; e != stack;
e = e.stackNext) {
keys.add(e.key);
}
}
......@@ -942,27 +994,9 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
* @param maxMemory the maximum size (1 or larger)
*/
void setMaxMemory(long maxMemory) {
if (maxMemory <= 0) {
throw new IllegalArgumentException(
"Max memory must be larger than 0");
}
this.maxMemory = maxMemory;
}
/**
* Set the average memory used per entry. It is used to calculate the
* length of the internal array.
*
* @param averageMemory the average memory used (1 or larger)
*/
void setAverageMemory(int averageMemory) {
if (averageMemory <= 0) {
throw new IllegalArgumentException(
"Average memory must be larger than 0");
}
this.averageMemory = averageMemory;
}
}
/**
......@@ -1019,7 +1053,7 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
Entry<K, V> queuePrev;
/**
* The next entry in the map
* The next entry in the map (the chained entry).
*/
Entry<K, V> mapNext;
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论