提交 eab9b833 authored 作者: Thomas Mueller's avatar Thomas Mueller

A persistent tree map (work in progress).

上级 cefce952
...@@ -45,21 +45,22 @@ public class TestTreeMapStore extends TestBase { ...@@ -45,21 +45,22 @@ public class TestTreeMapStore extends TestBase {
for (int j = 0; j < 5; j++) { for (int j = 0; j < 5; j++) {
FileUtils.delete(fileName); FileUtils.delete(fileName);
BtreeMapStore s = openStore(fileName); BtreeMapStore s = openStore(fileName);
// s.setCompressor(null);
s.setMaxPageSize(40); s.setMaxPageSize(40);
RowType rowType = RowType.fromString("r(i,,)", new TestTypeFactory()); RowType rowType = RowType.fromString("r(i,,)", new TestTypeFactory());
BtreeMap<Integer, Object[]> m = s.openMap("data", new IntegerType(), rowType); BtreeMap<Integer, Object[]> m = s.openMap("data", new IntegerType(), rowType);
int i = 0; int i = 0;
// long t = System.currentTimeMillis(); // long t = System.currentTimeMillis();
for (; i < len;) { for (; i < len;) {
Object[] o = new Object[3]; Object[] o = new Object[3];
o[0] = i; o[0] = i;
o[1] = "Hello"; o[1] = "Hello World";
o[2] = "World"; o[2] = "World";
m.put(i, o); m.put(i, o);
i++; i++;
if (i % 10000 == 0) { if (i % 10000 == 0) {
s.store(); s.store();
} }
} }
s.store(); s.store();
s.close(); s.close();
......
...@@ -230,8 +230,8 @@ public class BtreeMap<K, V> { ...@@ -230,8 +230,8 @@ public class BtreeMap<K, V> {
return name; return name;
} }
int getMaxPageSize() { BtreeMapStore getStore() {
return store.getMaxPageSize(); return store;
} }
int getId() { int getId() {
......
...@@ -18,6 +18,8 @@ import java.util.HashMap; ...@@ -18,6 +18,8 @@ import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
import org.h2.compress.CompressLZF;
import org.h2.compress.Compressor;
import org.h2.dev.store.FilePathCache; import org.h2.dev.store.FilePathCache;
import org.h2.store.fs.FilePath; import org.h2.store.fs.FilePath;
import org.h2.store.fs.FileUtils; import org.h2.store.fs.FileUtils;
...@@ -48,7 +50,6 @@ Limits: there are at most 67 million chunks (each chunk is at most 2 GB large). ...@@ -48,7 +50,6 @@ Limits: there are at most 67 million chunks (each chunk is at most 2 GB large).
TODO: TODO:
- use partial page checksums - use partial page checksums
- compress chunks
- rollback feature - rollback feature
- support range deletes - support range deletes
- keep page type (leaf/node) in pos to speed up large deletes - keep page type (leaf/node) in pos to speed up large deletes
...@@ -102,6 +103,8 @@ public class BtreeMapStore { ...@@ -102,6 +103,8 @@ public class BtreeMapStore {
// TODO use an int instead? (with rollover to 0) // TODO use an int instead? (with rollover to 0)
private long transaction; private long transaction;
private Compressor compressor = new CompressLZF();
private BtreeMapStore(String fileName, DataTypeFactory typeFactory) { private BtreeMapStore(String fileName, DataTypeFactory typeFactory) {
this.fileName = fileName; this.fileName = fileName;
this.typeFactory = typeFactory; this.typeFactory = typeFactory;
...@@ -697,4 +700,12 @@ public class BtreeMapStore { ...@@ -697,4 +700,12 @@ public class BtreeMapStore {
return maxPageSize; return maxPageSize;
} }
public Compressor getCompressor() {
return compressor;
}
public void setCompressor(Compressor compressor) {
this.compressor = compressor;
}
} }
...@@ -118,6 +118,7 @@ public class Dump { ...@@ -118,6 +118,7 @@ public class Dump {
} }
} }
writer.println(); writer.println();
writer.flush();
} }
} }
...@@ -10,6 +10,7 @@ import java.io.IOException; ...@@ -10,6 +10,7 @@ import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
import java.util.ArrayList; import java.util.ArrayList;
import org.h2.compress.Compressor;
/** /**
* A btree page (a node or a leaf). * A btree page (a node or a leaf).
...@@ -341,7 +342,7 @@ public class Page { ...@@ -341,7 +342,7 @@ public class Page {
parent.setChild(parentIndex, p.pos); parent.setChild(parentIndex, p.pos);
} }
if (!p.isLeaf()) { if (!p.isLeaf()) {
if (p.keyCount() >= map.getMaxPageSize()) { if (p.keyCount() >= map.getStore().getMaxPageSize()) {
// TODO almost duplicate code // TODO almost duplicate code
int pos = p.keyCount() / 2; int pos = p.keyCount() / 2;
Object k = p.keys[pos]; Object k = p.keys[pos];
...@@ -365,7 +366,7 @@ public class Page { ...@@ -365,7 +366,7 @@ public class Page {
} }
index = -index - 1; index = -index - 1;
p.insert(index, key, value, 0); p.insert(index, key, value, 0);
if (p.keyCount() >= map.getMaxPageSize()) { if (p.keyCount() >= map.getStore().getMaxPageSize()) {
int pos = p.keyCount() / 2; int pos = p.keyCount() / 2;
Object k = p.keys[pos]; Object k = p.keys[pos];
Page split = p.splitLeaf(pos); Page split = p.splitLeaf(pos);
...@@ -504,6 +505,7 @@ public class Page { ...@@ -504,6 +505,7 @@ public class Page {
} }
private void read(ByteBuffer buff, int maxLength) { private void read(ByteBuffer buff, int maxLength) {
int start = buff.position();
int len = buff.getInt(); int len = buff.getInt();
if (len > maxLength) { if (len > maxLength) {
throw new RuntimeException("Length too large, expected < " + maxLength + " got " + len); throw new RuntimeException("Length too large, expected < " + maxLength + " got " + len);
...@@ -512,22 +514,32 @@ public class Page { ...@@ -512,22 +514,32 @@ public class Page {
if (mapId != map.getId()) { if (mapId != map.getId()) {
throw new RuntimeException("Page pos mismatch, expected " + map.getId() + " got " + mapId); throw new RuntimeException("Page pos mismatch, expected " + map.getId() + " got " + mapId);
} }
boolean node = buff.get() == 1; int type = buff.get();
boolean node = (type & 1) != 0;
boolean compressed = (type & 2) != 0;
if (compressed) {
Compressor compressor = map.getStore().getCompressor();
int lenAdd = DataUtils.readVarInt(buff);
int compLen = len + start - buff.position();
byte[] comp = new byte[compLen];
buff.get(comp);
byte[] exp = new byte[compLen + lenAdd];
compressor.expand(comp, 0, compLen, exp, 0, exp.length);
buff = ByteBuffer.wrap(exp);
}
len = DataUtils.readVarInt(buff); len = DataUtils.readVarInt(buff);
keys = new Object[len];
for (int i = 0; i < len; i++) {
keys[i] = map.getKeyType().read(buff);
}
if (node) { if (node) {
children = new long[len]; children = new long[len + 1];
keys = new Object[len - 1]; for (int i = 0; i <= len; i++) {
for (int i = 0; i < len; i++) {
children[i] = buff.getLong(); children[i] = buff.getLong();
} }
for (int i = 0; i < len - 1; i++) {
keys[i] = map.getKeyType().read(buff);
}
} else { } else {
keys = new Object[len];
values = new Object[len]; values = new Object[len];
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
keys[i] = map.getKeyType().read(buff);
values[i] = map.getValueType().read(buff); values[i] = map.getValueType().read(buff);
} }
} }
...@@ -540,31 +552,44 @@ public class Page { ...@@ -540,31 +552,44 @@ public class Page {
* @param chunkId the chunk id * @param chunkId the chunk id
*/ */
private void write(ByteBuffer buff, int chunkId) { private void write(ByteBuffer buff, int chunkId) {
int offset = buff.position(); int start = buff.position();
buff.putInt(0); buff.putInt(0);
DataUtils.writeVarInt(buff, map.getId()); DataUtils.writeVarInt(buff, map.getId());
if (children != null) { Compressor compressor = map.getStore().getCompressor();
buff.put((byte) 1); int type = children != null ? 1 : 0;
int len = children.length; buff.put((byte) type);
DataUtils.writeVarInt(buff, len); int compressStart = buff.position();
for (int i = 0; i < len; i++) { int len = keys.length;
DataUtils.writeVarInt(buff, len);
for (int i = 0; i < len; i++) {
map.getKeyType().write(buff, keys[i]);
}
if (type == 1) {
for (int i = 0; i < len + 1; i++) {
buff.putLong(children[i]); buff.putLong(children[i]);
} }
for (int i = 0; i < len - 1; i++) {
map.getKeyType().write(buff, keys[i]);
}
} else { } else {
buff.put((byte) 0);
int len = keys.length;
DataUtils.writeVarInt(buff, len);
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
map.getKeyType().write(buff, keys[i]);
map.getValueType().write(buff, values[i]); map.getValueType().write(buff, values[i]);
} }
} }
int len = buff.position() - offset; if (compressor != null) {
buff.putInt(offset, len); len = buff.position() - compressStart;
this.pos = Page.getPos(chunkId, offset, len); byte[] exp = new byte[len];
buff.position(compressStart);
buff.get(exp);
byte[] comp = new byte[exp.length * 2];
int compLen = compressor.compress(exp, exp.length, comp, 0);
if (compLen + DataUtils.getVarIntLen(compLen - len) < len) {
buff.position(compressStart - 1);
buff.put((byte) (type + 2));
DataUtils.writeVarInt(buff, len - compLen);
buff.put(comp, 0, compLen);
}
}
len = buff.position() - start;
buff.putInt(start, len);
this.pos = Page.getPos(chunkId, start, len);
} }
/** /**
...@@ -574,24 +599,21 @@ public class Page { ...@@ -574,24 +599,21 @@ public class Page {
*/ */
int getMaxLengthTempRecursive() { int getMaxLengthTempRecursive() {
int maxLength = 4 + DataUtils.MAX_VAR_INT_LEN + 1; int maxLength = 4 + DataUtils.MAX_VAR_INT_LEN + 1;
int len = keys.length;
maxLength += DataUtils.MAX_VAR_INT_LEN;
for (int i = 0; i < len; i++) {
maxLength += map.getKeyType().getMaxLength(keys[i]);
}
if (children != null) { if (children != null) {
int len = children.length;
maxLength += DataUtils.MAX_VAR_INT_LEN;
maxLength += 8 * len; maxLength += 8 * len;
for (int i = 0; i < len - 1; i++) { for (int i = 0; i < len + 1; i++) {
maxLength += map.getKeyType().getMaxLength(keys[i]);
}
for (int i = 0; i < len; i++) {
long c = children[i]; long c = children[i];
if (c < 0) { if (c < 0) {
maxLength += map.readPage(c).getMaxLengthTempRecursive(); maxLength += map.readPage(c).getMaxLengthTempRecursive();
} }
} }
} else { } else {
int len = keys.length;
maxLength += DataUtils.MAX_VAR_INT_LEN;
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
maxLength += map.getKeyType().getMaxLength(keys[i]);
maxLength += map.getValueType().getMaxLength(values[i]); maxLength += map.getValueType().getMaxLength(values[i]);
} }
} }
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论