提交 eab9b833 authored 作者: Thomas Mueller's avatar Thomas Mueller

A persistent tree map (work in progress).

上级 cefce952
......@@ -45,21 +45,22 @@ public class TestTreeMapStore extends TestBase {
for (int j = 0; j < 5; j++) {
FileUtils.delete(fileName);
BtreeMapStore s = openStore(fileName);
// s.setCompressor(null);
s.setMaxPageSize(40);
RowType rowType = RowType.fromString("r(i,,)", new TestTypeFactory());
BtreeMap<Integer, Object[]> m = s.openMap("data", new IntegerType(), rowType);
int i = 0;
// long t = System.currentTimeMillis();
for (; i < len;) {
Object[] o = new Object[3];
o[0] = i;
o[1] = "Hello";
o[2] = "World";
m.put(i, o);
i++;
if (i % 10000 == 0) {
s.store();
}
Object[] o = new Object[3];
o[0] = i;
o[1] = "Hello World";
o[2] = "World";
m.put(i, o);
i++;
if (i % 10000 == 0) {
s.store();
}
}
s.store();
s.close();
......
......@@ -230,8 +230,8 @@ public class BtreeMap<K, V> {
return name;
}
int getMaxPageSize() {
return store.getMaxPageSize();
BtreeMapStore getStore() {
return store;
}
int getId() {
......
......@@ -18,6 +18,8 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import org.h2.compress.CompressLZF;
import org.h2.compress.Compressor;
import org.h2.dev.store.FilePathCache;
import org.h2.store.fs.FilePath;
import org.h2.store.fs.FileUtils;
......@@ -48,7 +50,6 @@ Limits: there are at most 67 million chunks (each chunk is at most 2 GB large).
TODO:
- use partial page checksums
- compress chunks
- rollback feature
- support range deletes
- keep page type (leaf/node) in pos to speed up large deletes
......@@ -102,6 +103,8 @@ public class BtreeMapStore {
// TODO use an int instead? (with rollover to 0)
private long transaction;
private Compressor compressor = new CompressLZF();
private BtreeMapStore(String fileName, DataTypeFactory typeFactory) {
this.fileName = fileName;
this.typeFactory = typeFactory;
......@@ -697,4 +700,12 @@ public class BtreeMapStore {
return maxPageSize;
}
public Compressor getCompressor() {
return compressor;
}
public void setCompressor(Compressor compressor) {
this.compressor = compressor;
}
}
......@@ -118,6 +118,7 @@ public class Dump {
}
}
writer.println();
writer.flush();
}
}
......@@ -10,6 +10,7 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import org.h2.compress.Compressor;
/**
* A btree page (a node or a leaf).
......@@ -341,7 +342,7 @@ public class Page {
parent.setChild(parentIndex, p.pos);
}
if (!p.isLeaf()) {
if (p.keyCount() >= map.getMaxPageSize()) {
if (p.keyCount() >= map.getStore().getMaxPageSize()) {
// TODO almost duplicate code
int pos = p.keyCount() / 2;
Object k = p.keys[pos];
......@@ -365,7 +366,7 @@ public class Page {
}
index = -index - 1;
p.insert(index, key, value, 0);
if (p.keyCount() >= map.getMaxPageSize()) {
if (p.keyCount() >= map.getStore().getMaxPageSize()) {
int pos = p.keyCount() / 2;
Object k = p.keys[pos];
Page split = p.splitLeaf(pos);
......@@ -504,6 +505,7 @@ public class Page {
}
private void read(ByteBuffer buff, int maxLength) {
int start = buff.position();
int len = buff.getInt();
if (len > maxLength) {
throw new RuntimeException("Length too large, expected < " + maxLength + " got " + len);
......@@ -512,22 +514,32 @@ public class Page {
if (mapId != map.getId()) {
throw new RuntimeException("Page pos mismatch, expected " + map.getId() + " got " + mapId);
}
boolean node = buff.get() == 1;
int type = buff.get();
boolean node = (type & 1) != 0;
boolean compressed = (type & 2) != 0;
if (compressed) {
Compressor compressor = map.getStore().getCompressor();
int lenAdd = DataUtils.readVarInt(buff);
int compLen = len + start - buff.position();
byte[] comp = new byte[compLen];
buff.get(comp);
byte[] exp = new byte[compLen + lenAdd];
compressor.expand(comp, 0, compLen, exp, 0, exp.length);
buff = ByteBuffer.wrap(exp);
}
len = DataUtils.readVarInt(buff);
keys = new Object[len];
for (int i = 0; i < len; i++) {
keys[i] = map.getKeyType().read(buff);
}
if (node) {
children = new long[len];
keys = new Object[len - 1];
for (int i = 0; i < len; i++) {
children = new long[len + 1];
for (int i = 0; i <= len; i++) {
children[i] = buff.getLong();
}
for (int i = 0; i < len - 1; i++) {
keys[i] = map.getKeyType().read(buff);
}
} else {
keys = new Object[len];
values = new Object[len];
for (int i = 0; i < len; i++) {
keys[i] = map.getKeyType().read(buff);
values[i] = map.getValueType().read(buff);
}
}
......@@ -540,31 +552,44 @@ public class Page {
* @param chunkId the chunk id
*/
private void write(ByteBuffer buff, int chunkId) {
int offset = buff.position();
int start = buff.position();
buff.putInt(0);
DataUtils.writeVarInt(buff, map.getId());
if (children != null) {
buff.put((byte) 1);
int len = children.length;
DataUtils.writeVarInt(buff, len);
for (int i = 0; i < len; i++) {
Compressor compressor = map.getStore().getCompressor();
int type = children != null ? 1 : 0;
buff.put((byte) type);
int compressStart = buff.position();
int len = keys.length;
DataUtils.writeVarInt(buff, len);
for (int i = 0; i < len; i++) {
map.getKeyType().write(buff, keys[i]);
}
if (type == 1) {
for (int i = 0; i < len + 1; i++) {
buff.putLong(children[i]);
}
for (int i = 0; i < len - 1; i++) {
map.getKeyType().write(buff, keys[i]);
}
} else {
buff.put((byte) 0);
int len = keys.length;
DataUtils.writeVarInt(buff, len);
for (int i = 0; i < len; i++) {
map.getKeyType().write(buff, keys[i]);
map.getValueType().write(buff, values[i]);
}
}
int len = buff.position() - offset;
buff.putInt(offset, len);
this.pos = Page.getPos(chunkId, offset, len);
if (compressor != null) {
len = buff.position() - compressStart;
byte[] exp = new byte[len];
buff.position(compressStart);
buff.get(exp);
byte[] comp = new byte[exp.length * 2];
int compLen = compressor.compress(exp, exp.length, comp, 0);
if (compLen + DataUtils.getVarIntLen(compLen - len) < len) {
buff.position(compressStart - 1);
buff.put((byte) (type + 2));
DataUtils.writeVarInt(buff, len - compLen);
buff.put(comp, 0, compLen);
}
}
len = buff.position() - start;
buff.putInt(start, len);
this.pos = Page.getPos(chunkId, start, len);
}
/**
......@@ -574,24 +599,21 @@ public class Page {
*/
int getMaxLengthTempRecursive() {
int maxLength = 4 + DataUtils.MAX_VAR_INT_LEN + 1;
int len = keys.length;
maxLength += DataUtils.MAX_VAR_INT_LEN;
for (int i = 0; i < len; i++) {
maxLength += map.getKeyType().getMaxLength(keys[i]);
}
if (children != null) {
int len = children.length;
maxLength += DataUtils.MAX_VAR_INT_LEN;
maxLength += 8 * len;
for (int i = 0; i < len - 1; i++) {
maxLength += map.getKeyType().getMaxLength(keys[i]);
}
for (int i = 0; i < len; i++) {
for (int i = 0; i < len + 1; i++) {
long c = children[i];
if (c < 0) {
maxLength += map.readPage(c).getMaxLengthTempRecursive();
}
}
} else {
int len = keys.length;
maxLength += DataUtils.MAX_VAR_INT_LEN;
for (int i = 0; i < len; i++) {
maxLength += map.getKeyType().getMaxLength(keys[i]);
maxLength += map.getValueType().getMaxLength(values[i]);
}
}
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论