Unverified 提交 1f07457e authored 作者: Andrei Tokar's avatar Andrei Tokar 提交者: GitHub

Merge pull request #1271 from h2database/mem-leak

Minor memory leak
...@@ -1038,7 +1038,7 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1038,7 +1038,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
/** /**
* Get version of the map, which is the version of the store, * Get version of the map, which is the version of the store,
* at which map was modified last time. * at the moment when map was modified last time.
* *
* @return version * @return version
*/ */
...@@ -1136,10 +1136,10 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1136,10 +1136,10 @@ public class MVMap<K, V> extends AbstractMap<K, V>
*/ */
final void copyFrom(MVMap<K, V> sourceMap) { final void copyFrom(MVMap<K, V> sourceMap) {
// We are going to cheat a little bit in the copy() // We are going to cheat a little bit in the copy()
// by setting map's root to an arbitrary nodes // by temporary setting map's root to some arbitrary nodes.
// to allow for just created ones to be saved. // This will allow for newly created ones to be saved.
// That's why it's important to preserve all chunks // That's why it's important to preserve all chunks
// created in the process, especially it retention time // created in the process, especially if retention time
// is set to a lower value, or even 0. // is set to a lower value, or even 0.
MVStore.TxCounter txCounter = store.registerVersionUsage(); MVStore.TxCounter txCounter = store.registerVersionUsage();
try { try {
...@@ -1170,6 +1170,11 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1170,6 +1170,11 @@ public class MVMap<K, V> extends AbstractMap<K, V>
return target; return target;
} }
/**
* If map was used in append mode, this method will ensure that append buffer
* is flushed - emptied with all entries inserted into map as a new leaf.
* @return potentially updated RootReference
*/
public RootReference flushAppendBuffer() { public RootReference flushAppendBuffer() {
return flushAppendBuffer(null); return flushAppendBuffer(null);
} }
...@@ -1216,9 +1221,9 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1216,9 +1221,9 @@ public class MVMap<K, V> extends AbstractMap<K, V>
p = split; p = split;
} else { } else {
Object keys[] = new Object[] { key }; Object keys[] = new Object[] { key };
Page.PageReference children[] = new Page.PageReference[store.getKeysPerPage() + 1]; Page.PageReference children[] = new Page.PageReference[] {
children[0] = new Page.PageReference(p); new Page.PageReference(p),
children[1] = new Page.PageReference(split); new Page.PageReference(split)};
p = Page.create(this, keys, null, children, p.getTotalCount() + split.getTotalCount(), 0); p = Page.create(this, keys, null, children, p.getTotalCount() + split.getTotalCount(), 0);
} }
break; break;
...@@ -1231,7 +1236,8 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1231,7 +1236,8 @@ public class MVMap<K, V> extends AbstractMap<K, V>
p.setChild(index, split); p.setChild(index, split);
p.insertNode(index, key, c); p.insertNode(index, key, c);
int keyCount; int keyCount;
if ((keyCount = p.getKeyCount()) <= store.getKeysPerPage() && (p.getMemory() < store.getMaxPageSize() || keyCount <= (p.isLeaf() ? 1 : 2))) { if ((keyCount = p.getKeyCount()) <= store.getKeysPerPage() &&
(p.getMemory() < store.getMaxPageSize() || keyCount <= (p.isLeaf() ? 1 : 2))) {
break; break;
} }
int at = keyCount - 2; int at = keyCount - 2;
......
...@@ -860,12 +860,12 @@ public abstract class Page implements Cloneable ...@@ -860,12 +860,12 @@ public abstract class Page implements Cloneable
/** /**
* The position, if known, or 0. * The position, if known, or 0.
*/ */
final long pos; private long pos;
/** /**
* The page, if in memory, or null. * The page, if in memory, or null.
*/ */
final Page page; private Page page;
/** /**
* The descendant count for this child page. * The descendant count for this child page.
...@@ -878,7 +878,7 @@ public abstract class Page implements Cloneable ...@@ -878,7 +878,7 @@ public abstract class Page implements Cloneable
PageReference(long pos, long count) { PageReference(long pos, long count) {
this(null, pos, count); this(null, pos, count);
assert pos != 0; assert DataUtils.isPageSaved(pos);
} }
private PageReference(Page page, long pos, long count) { private PageReference(Page page, long pos, long count) {
...@@ -887,6 +887,35 @@ public abstract class Page implements Cloneable ...@@ -887,6 +887,35 @@ public abstract class Page implements Cloneable
this.count = count; this.count = count;
} }
public Page getPage() {
return page;
}
void clearPageReference() {
if (page != null) {
if (!page.isSaved()) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_INTERNAL, "Page not written");
}
page.writeEnd();
assert pos == page.getPos();
assert count == page.getTotalCount();
page = null;
}
}
long getPos() {
return pos;
}
void resetPos() {
Page p = page;
if (p != null) {
pos = p.getPos();
assert count == p.getTotalCount();
}
}
@Override @Override
public String toString() { public String toString() {
return "Cnt:" + count + ", pos:" + DataUtils.getPageChunkId(pos) + return "Cnt:" + count + ", pos:" + DataUtils.getPageChunkId(pos) +
...@@ -940,10 +969,10 @@ public abstract class Page implements Cloneable ...@@ -940,10 +969,10 @@ public abstract class Page implements Cloneable
@Override @Override
public Page getChildPage(int index) { public Page getChildPage(int index) {
PageReference ref = children[index]; PageReference ref = children[index];
Page page = ref.page; Page page = ref.getPage();
if(page == null) { if(page == null) {
page = map.readPage(ref.pos); page = map.readPage(ref.getPos());
assert ref.pos == page.getPos(); assert ref.getPos() == page.getPos();
assert ref.count == page.getTotalCount(); assert ref.count == page.getTotalCount();
} }
return page; return page;
...@@ -951,12 +980,12 @@ public abstract class Page implements Cloneable ...@@ -951,12 +980,12 @@ public abstract class Page implements Cloneable
@Override @Override
public Page getChildPageIfLoaded(int index) { public Page getChildPageIfLoaded(int index) {
return children[index].page; return children[index].getPage();
} }
@Override @Override
public long getChildPagePos(int index) { public long getChildPagePos(int index) {
return children[index].pos; return children[index].getPos();
} }
@Override @Override
...@@ -1017,7 +1046,7 @@ public abstract class Page implements Cloneable ...@@ -1017,7 +1046,7 @@ public abstract class Page implements Cloneable
public void setChild(int index, Page c) { public void setChild(int index, Page c) {
assert c != null; assert c != null;
PageReference child = children[index]; PageReference child = children[index];
if (c != child.page || c.getPos() != child.pos) { if (c != child.getPage() || c.getPos() != child.getPos()) {
totalCount += c.getTotalCount() - child.count; totalCount += c.getTotalCount() - child.count;
children = children.clone(); children = children.clone();
children[index] = new PageReference(c); children[index] = new PageReference(c);
...@@ -1068,10 +1097,11 @@ public abstract class Page implements Cloneable ...@@ -1068,10 +1097,11 @@ public abstract class Page implements Cloneable
if (isPersistent()) { if (isPersistent()) {
for (int i = 0, size = map.getChildPageCount(this); i < size; i++) { for (int i = 0, size = map.getChildPageCount(this); i < size; i++) {
PageReference ref = children[i]; PageReference ref = children[i];
if (ref.page != null) { Page page = ref.getPage();
ref.page.removeAllRecursive(); if (page != null) {
page.removeAllRecursive();
} else { } else {
long c = children[i].pos; long c = ref.getPos();
int type = DataUtils.getPageType(c); int type = DataUtils.getPageType(c);
if (type == PAGE_TYPE_LEAF) { if (type == PAGE_TYPE_LEAF) {
int mem = DataUtils.getPageMaxLength(c); int mem = DataUtils.getPageMaxLength(c);
...@@ -1118,7 +1148,7 @@ public abstract class Page implements Cloneable ...@@ -1118,7 +1148,7 @@ public abstract class Page implements Cloneable
protected void writeChildren(WriteBuffer buff, boolean withCounts) { protected void writeChildren(WriteBuffer buff, boolean withCounts) {
int keyCount = getKeyCount(); int keyCount = getKeyCount();
for (int i = 0; i <= keyCount; i++) { for (int i = 0; i <= keyCount; i++) {
buff.putLong(children[i].pos); buff.putLong(children[i].getPos());
} }
if(withCounts) { if(withCounts) {
for (int i = 0; i <= keyCount; i++) { for (int i = 0; i <= keyCount; i++) {
...@@ -1133,10 +1163,11 @@ public abstract class Page implements Cloneable ...@@ -1133,10 +1163,11 @@ public abstract class Page implements Cloneable
int patch = write(chunk, buff); int patch = write(chunk, buff);
int len = getRawChildPageCount(); int len = getRawChildPageCount();
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
Page p = children[i].page; PageReference ref = children[i];
Page p = ref.getPage();
if (p != null) { if (p != null) {
p.writeUnsavedRecursive(chunk, buff); p.writeUnsavedRecursive(chunk, buff);
children[i] = new PageReference(p); ref.resetPos();
} }
} }
int old = buff.position(); int old = buff.position();
...@@ -1150,15 +1181,7 @@ public abstract class Page implements Cloneable ...@@ -1150,15 +1181,7 @@ public abstract class Page implements Cloneable
void writeEnd() { void writeEnd() {
int len = getRawChildPageCount(); int len = getRawChildPageCount();
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
PageReference ref = children[i]; children[i].clearPageReference();
if (ref.page != null) {
if (!ref.page.isSaved()) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_INTERNAL, "Page not written");
}
ref.page.writeEnd();
children[i] = new PageReference(ref.pos, ref.count);
}
} }
} }
...@@ -1181,7 +1204,7 @@ public abstract class Page implements Cloneable ...@@ -1181,7 +1204,7 @@ public abstract class Page implements Cloneable
if (i > 0) { if (i > 0) {
buff.append(" "); buff.append(" ");
} }
buff.append("[").append(Long.toHexString(children[i].pos)).append("]"); buff.append("[").append(Long.toHexString(children[i].getPos())).append("]");
if(i < keyCount) { if(i < keyCount) {
buff.append(" ").append(getKey(i)); buff.append(" ").append(getKey(i));
} }
......
...@@ -7,10 +7,10 @@ package org.h2.test.store; ...@@ -7,10 +7,10 @@ package org.h2.test.store;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.h2.mvstore.MVStore; import org.h2.mvstore.MVStore;
...@@ -84,7 +84,7 @@ public class TestMVStoreBenchmark extends TestBase { ...@@ -84,7 +84,7 @@ public class TestMVStoreBenchmark extends TestBase {
mapList = new ArrayList<>(count); mapList = new ArrayList<>(count);
mem = getMemory(); mem = getMemory();
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
mapList.add(new HashMap<Integer, String>(size)); mapList.add(new ConcurrentHashMap<Integer, String>(size));
} }
addEntries(mapList, size); addEntries(mapList, size);
hash = getMemory() - mem; hash = getMemory() - mem;
...@@ -93,7 +93,7 @@ public class TestMVStoreBenchmark extends TestBase { ...@@ -93,7 +93,7 @@ public class TestMVStoreBenchmark extends TestBase {
mapList.clear(); mapList.clear();
mem = getMemory(); mem = getMemory();
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
mapList.add(new TreeMap<Integer, String>()); mapList.add(new ConcurrentSkipListMap<Integer, String>());
} }
addEntries(mapList, size); addEntries(mapList, size);
tree = getMemory() - mem; tree = getMemory() - mem;
...@@ -150,11 +150,10 @@ public class TestMVStoreBenchmark extends TestBase { ...@@ -150,11 +150,10 @@ public class TestMVStoreBenchmark extends TestBase {
MVStore store = MVStore.open(null); MVStore store = MVStore.open(null);
map = store.openMap("test"); map = store.openMap("test");
mv = testPerformance(map, size); mv = testPerformance(map, size);
map = new HashMap<>(size); store.close();
// map = new ConcurrentHashMap<Integer, String>(size); map = new ConcurrentHashMap<>(size);
hash = testPerformance(map, size); hash = testPerformance(map, size);
map = new TreeMap<>(); map = new ConcurrentSkipListMap<>();
// map = new ConcurrentSkipListMap<Integer, String>();
tree = testPerformance(map, size); tree = testPerformance(map, size);
if (hash < tree && mv < tree * 1.5) { if (hash < tree && mv < tree * 1.5) {
break; break;
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论