Unverified 提交 1f07457e authored 作者: Andrei Tokar's avatar Andrei Tokar 提交者: GitHub

Merge pull request #1271 from h2database/mem-leak

Minor memory leak
......@@ -1038,7 +1038,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
/**
* Get version of the map, which is the version of the store,
* at which map was modified last time.
* at the moment when map was modified last time.
*
* @return version
*/
......@@ -1136,10 +1136,10 @@ public class MVMap<K, V> extends AbstractMap<K, V>
*/
final void copyFrom(MVMap<K, V> sourceMap) {
// We are going to cheat a little bit in the copy()
// by setting map's root to an arbitrary nodes
// to allow for just created ones to be saved.
// by temporary setting map's root to some arbitrary nodes.
// This will allow for newly created ones to be saved.
// That's why it's important to preserve all chunks
// created in the process, especially it retention time
// created in the process, especially if retention time
// is set to a lower value, or even 0.
MVStore.TxCounter txCounter = store.registerVersionUsage();
try {
......@@ -1170,6 +1170,11 @@ public class MVMap<K, V> extends AbstractMap<K, V>
return target;
}
/**
* If map was used in append mode, this method will ensure that append buffer
* is flushed - emptied with all entries inserted into map as a new leaf.
* @return potentially updated RootReference
*/
public RootReference flushAppendBuffer() {
return flushAppendBuffer(null);
}
......@@ -1216,9 +1221,9 @@ public class MVMap<K, V> extends AbstractMap<K, V>
p = split;
} else {
Object keys[] = new Object[] { key };
Page.PageReference children[] = new Page.PageReference[store.getKeysPerPage() + 1];
children[0] = new Page.PageReference(p);
children[1] = new Page.PageReference(split);
Page.PageReference children[] = new Page.PageReference[] {
new Page.PageReference(p),
new Page.PageReference(split)};
p = Page.create(this, keys, null, children, p.getTotalCount() + split.getTotalCount(), 0);
}
break;
......@@ -1231,7 +1236,8 @@ public class MVMap<K, V> extends AbstractMap<K, V>
p.setChild(index, split);
p.insertNode(index, key, c);
int keyCount;
if ((keyCount = p.getKeyCount()) <= store.getKeysPerPage() && (p.getMemory() < store.getMaxPageSize() || keyCount <= (p.isLeaf() ? 1 : 2))) {
if ((keyCount = p.getKeyCount()) <= store.getKeysPerPage() &&
(p.getMemory() < store.getMaxPageSize() || keyCount <= (p.isLeaf() ? 1 : 2))) {
break;
}
int at = keyCount - 2;
......
......@@ -860,12 +860,12 @@ public abstract class Page implements Cloneable
/**
* The position, if known, or 0.
*/
final long pos;
private long pos;
/**
* The page, if in memory, or null.
*/
final Page page;
private Page page;
/**
* The descendant count for this child page.
......@@ -878,7 +878,7 @@ public abstract class Page implements Cloneable
PageReference(long pos, long count) {
this(null, pos, count);
assert pos != 0;
assert DataUtils.isPageSaved(pos);
}
private PageReference(Page page, long pos, long count) {
......@@ -887,6 +887,35 @@ public abstract class Page implements Cloneable
this.count = count;
}
public Page getPage() {
return page;
}
void clearPageReference() {
if (page != null) {
if (!page.isSaved()) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_INTERNAL, "Page not written");
}
page.writeEnd();
assert pos == page.getPos();
assert count == page.getTotalCount();
page = null;
}
}
long getPos() {
return pos;
}
void resetPos() {
Page p = page;
if (p != null) {
pos = p.getPos();
assert count == p.getTotalCount();
}
}
@Override
public String toString() {
return "Cnt:" + count + ", pos:" + DataUtils.getPageChunkId(pos) +
......@@ -940,10 +969,10 @@ public abstract class Page implements Cloneable
@Override
public Page getChildPage(int index) {
PageReference ref = children[index];
Page page = ref.page;
Page page = ref.getPage();
if(page == null) {
page = map.readPage(ref.pos);
assert ref.pos == page.getPos();
page = map.readPage(ref.getPos());
assert ref.getPos() == page.getPos();
assert ref.count == page.getTotalCount();
}
return page;
......@@ -951,12 +980,12 @@ public abstract class Page implements Cloneable
@Override
public Page getChildPageIfLoaded(int index) {
return children[index].page;
return children[index].getPage();
}
@Override
public long getChildPagePos(int index) {
return children[index].pos;
return children[index].getPos();
}
@Override
......@@ -1017,7 +1046,7 @@ public abstract class Page implements Cloneable
public void setChild(int index, Page c) {
assert c != null;
PageReference child = children[index];
if (c != child.page || c.getPos() != child.pos) {
if (c != child.getPage() || c.getPos() != child.getPos()) {
totalCount += c.getTotalCount() - child.count;
children = children.clone();
children[index] = new PageReference(c);
......@@ -1068,10 +1097,11 @@ public abstract class Page implements Cloneable
if (isPersistent()) {
for (int i = 0, size = map.getChildPageCount(this); i < size; i++) {
PageReference ref = children[i];
if (ref.page != null) {
ref.page.removeAllRecursive();
Page page = ref.getPage();
if (page != null) {
page.removeAllRecursive();
} else {
long c = children[i].pos;
long c = ref.getPos();
int type = DataUtils.getPageType(c);
if (type == PAGE_TYPE_LEAF) {
int mem = DataUtils.getPageMaxLength(c);
......@@ -1118,7 +1148,7 @@ public abstract class Page implements Cloneable
protected void writeChildren(WriteBuffer buff, boolean withCounts) {
int keyCount = getKeyCount();
for (int i = 0; i <= keyCount; i++) {
buff.putLong(children[i].pos);
buff.putLong(children[i].getPos());
}
if(withCounts) {
for (int i = 0; i <= keyCount; i++) {
......@@ -1133,10 +1163,11 @@ public abstract class Page implements Cloneable
int patch = write(chunk, buff);
int len = getRawChildPageCount();
for (int i = 0; i < len; i++) {
Page p = children[i].page;
PageReference ref = children[i];
Page p = ref.getPage();
if (p != null) {
p.writeUnsavedRecursive(chunk, buff);
children[i] = new PageReference(p);
ref.resetPos();
}
}
int old = buff.position();
......@@ -1150,15 +1181,7 @@ public abstract class Page implements Cloneable
void writeEnd() {
int len = getRawChildPageCount();
for (int i = 0; i < len; i++) {
PageReference ref = children[i];
if (ref.page != null) {
if (!ref.page.isSaved()) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_INTERNAL, "Page not written");
}
ref.page.writeEnd();
children[i] = new PageReference(ref.pos, ref.count);
}
children[i].clearPageReference();
}
}
......@@ -1181,7 +1204,7 @@ public abstract class Page implements Cloneable
if (i > 0) {
buff.append(" ");
}
buff.append("[").append(Long.toHexString(children[i].pos)).append("]");
buff.append("[").append(Long.toHexString(children[i].getPos())).append("]");
if(i < keyCount) {
buff.append(" ").append(getKey(i));
}
......
......@@ -7,10 +7,10 @@ package org.h2.test.store;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.TimeUnit;
import org.h2.mvstore.MVStore;
......@@ -84,7 +84,7 @@ public class TestMVStoreBenchmark extends TestBase {
mapList = new ArrayList<>(count);
mem = getMemory();
for (int i = 0; i < count; i++) {
mapList.add(new HashMap<Integer, String>(size));
mapList.add(new ConcurrentHashMap<Integer, String>(size));
}
addEntries(mapList, size);
hash = getMemory() - mem;
......@@ -93,7 +93,7 @@ public class TestMVStoreBenchmark extends TestBase {
mapList.clear();
mem = getMemory();
for (int i = 0; i < count; i++) {
mapList.add(new TreeMap<Integer, String>());
mapList.add(new ConcurrentSkipListMap<Integer, String>());
}
addEntries(mapList, size);
tree = getMemory() - mem;
......@@ -150,11 +150,10 @@ public class TestMVStoreBenchmark extends TestBase {
MVStore store = MVStore.open(null);
map = store.openMap("test");
mv = testPerformance(map, size);
map = new HashMap<>(size);
// map = new ConcurrentHashMap<Integer, String>(size);
store.close();
map = new ConcurrentHashMap<>(size);
hash = testPerformance(map, size);
map = new TreeMap<>();
// map = new ConcurrentSkipListMap<Integer, String>();
map = new ConcurrentSkipListMap<>();
tree = testPerformance(map, size);
if (hash < tree && mv < tree * 1.5) {
break;
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论