提交 cefce952 authored 作者: Thomas Mueller's avatar Thomas Mueller

A persistent tree map (work in progress).

上级 4a0120b8
......@@ -103,6 +103,8 @@ import org.h2.test.server.TestAutoServer;
import org.h2.test.server.TestNestedLoop;
import org.h2.test.server.TestWeb;
import org.h2.test.server.TestInit;
import org.h2.test.store.TestCacheLIRS;
import org.h2.test.store.TestDataUtils;
import org.h2.test.store.TestTreeMapStore;
import org.h2.test.synth.TestBtreeIndex;
import org.h2.test.synth.TestCrashAPI;
......@@ -662,8 +664,9 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
private void testUnit() {
// store
new TestCacheLIRS().runTest(this);
new TestTreeMapStore().runTest(this);
new TestCache().runTest(this);
new TestDataUtils().runTest(this);
// unit
new TestAutoReconnect().runTest(this);
......
......@@ -12,13 +12,12 @@ import java.util.Map.Entry;
import java.util.Random;
import org.h2.dev.store.btree.CacheLIRS;
import org.h2.test.TestBase;
import org.h2.upgrade.v1_1.util.Profiler;
import org.h2.util.New;
/**
* Tests the cache algorithm.
*/
public class TestCache extends TestBase {
public class TestCacheLIRS extends TestBase {
/**
* Run just this test.
......@@ -30,8 +29,6 @@ public class TestCache extends TestBase {
}
public void test() throws Exception {
Profiler p = new Profiler();
p.startCollecting();
testEdgeCases();
testSize();
testClear();
......@@ -42,7 +39,6 @@ public class TestCache extends TestBase {
testBadHashMethod();
testScanResistance();
testRandomOperations();
System.out.println(p.getTop(5));
}
private void testEdgeCases() {
......
/*
* Copyright 2004-2011 H2 Group. Multiple-Licensed under the H2 License, Version
* 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group
*/
package org.h2.test.store;
import org.h2.dev.store.btree.Page;
import org.h2.test.TestBase;
/**
* Test utility classes.
*/
public class TestDataUtils extends TestBase {
/**
* Run just this test.
*
* @param a ignored
*/
public static void main(String... a) throws Exception {
TestBase.createCaller().init().test();
}
public void test() throws Exception {
testPagePos();
}
private void testPagePos() {
int lastCode = 0;
assertEquals(0, Page.encodeLength(32));
assertEquals(1, Page.encodeLength(33));
assertEquals(1, Page.encodeLength(48));
assertEquals(2, Page.encodeLength(49));
assertEquals(30, Page.encodeLength(1024 * 1024));
assertEquals(31, Page.encodeLength(1024 * 1024 + 1));
for (int i = 1024 * 1024 + 1; i < 100 * 1024 * 1024; i += 1024) {
int code = Page.encodeLength(i);
assertEquals(31, code);
}
for (int i = 0; i < 1024 * 1024; i++) {
int code = Page.encodeLength(i);
assertTrue(code <= 31 && code >= 0);
assertTrue(code >= lastCode);
if (code > lastCode) {
lastCode = code;
}
int max = Page.getMaxLength(code);
assertTrue(max >= i && max >= 32);
}
}
}
......@@ -17,13 +17,13 @@ import java.util.Iterator;
public class BtreeMap<K, V> {
private final BtreeMapStore store;
private final long id;
private final int id;
private final String name;
private final DataType keyType;
private final DataType valueType;
private Page root;
private BtreeMap(BtreeMapStore store, long id, String name, DataType keyType, DataType valueType) {
private BtreeMap(BtreeMapStore store, int id, String name, DataType keyType, DataType valueType) {
this.store = store;
this.id = id;
this.name = name;
......@@ -43,7 +43,7 @@ public class BtreeMap<K, V> {
* @param valueClass the value class
* @return the map
*/
static <K, V> BtreeMap<K, V> open(BtreeMapStore store, long id, String name, DataType keyType, DataType valueType) {
static <K, V> BtreeMap<K, V> open(BtreeMapStore store, int id, String name, DataType keyType, DataType valueType) {
return new BtreeMap<K, V>(store, id, name, keyType, valueType);
}
......@@ -96,6 +96,14 @@ public class BtreeMap<K, V> {
}
}
/**
* Remove all entries, and remove the map.
*/
public void remove() {
clear();
store.removeMap(id);
}
/**
* Remove a key-value pair.
*
......@@ -114,7 +122,7 @@ public class BtreeMap<K, V> {
* @return true if yes
*/
boolean isChanged() {
return root != null && root.getId() < 0;
return root != null && root.getPos() < 0;
}
private void markChanged() {
......@@ -167,22 +175,22 @@ public class BtreeMap<K, V> {
}
/**
* Read a node.
* Read a page.
*
* @param id the node id
* @return the node
* @param pos the position of the page
* @return the page
*/
Page readPage(long id) {
return store.readPage(this, id);
Page readPage(long pos) {
return store.readPage(this, pos);
}
/**
* Remove a node.
* Remove a page.
*
* @param id the node id
* @param pos the position of the page
*/
void removePage(long id) {
store.removePage(id);
void removePage(long pos) {
store.removePage(pos);
}
/**
......@@ -190,7 +198,7 @@ public class BtreeMap<K, V> {
*
* @param rootPos the position
*/
void setRoot(long rootPos) {
void setRootPos(long rootPos) {
root = readPage(rootPos);
}
......@@ -205,9 +213,9 @@ public class BtreeMap<K, V> {
}
/**
* Get the root node.
* Get the root page.
*
* @return the root node
* @return the root page
*/
Page getRoot() {
return root;
......@@ -226,7 +234,7 @@ public class BtreeMap<K, V> {
return store.getMaxPageSize();
}
long getId() {
int getId() {
return id;
}
......
......@@ -40,21 +40,31 @@ chunk:
1 byte: 'c'
4 bytes: length
4 bytes: chunk id (an incrementing number)
4 bytes: metaRootPos (relative to the chunk start)
8 bytes: metaRootPos
data ...
todo:
Limits: there are at most 67 million chunks (each chunk is at most 2 GB large).
- use page checksums
TODO:
- use partial page checksums
- compress chunks
- possibly encode the length in pos (1=32, 2=128, 3=512,...)
- rollback feature
- support range deletes
- keep page type (leaf/node) in pos to speed up large deletes
- floating header (avoid duplicate header)
for each chunk, store chunk (a counter)
for each page, store chunk id and offset to root
for each chunk, store position of expected next chunks
- support reading metadata to copy all data,
- support quota (per map, per storage)
- support r-tree, kd-tree
- map ids should be per chunk, to ensure uniqueness
*/
/**
......@@ -73,7 +83,7 @@ public class BtreeMapStore {
private FileChannel file;
private int blockSize = 4 * 1024;
private long rootChunkPos;
private long rootChunkStart;
private int tempPageId;
private Map<Long, Page> cache = CacheLIRS.newInstance(readCacheSize, 2048);
......@@ -84,16 +94,14 @@ public class BtreeMapStore {
// TODO use bit set, and integer instead of long
private BtreeMap<String, String> meta;
private long lastMapId;
private HashMap<String, BtreeMap<?, ?>> maps = New.hashMap();
private HashMap<String, BtreeMap<?, ?>> mapsChanged = New.hashMap();
private int mapIdMin;
private BitSet mapIds = new BitSet();
// TODO use an int instead? (with rollover to 0)
private long transaction;
// TODO support reading metadata to support quota (per map, per storage)
// TODO support r-tree
private BtreeMapStore(String fileName, DataTypeFactory typeFactory) {
this.fileName = fileName;
this.typeFactory = typeFactory;
......@@ -137,17 +145,17 @@ public class BtreeMapStore {
BtreeMap<K, V> m = (BtreeMap<K, V>) maps.get(name);
if (m == null) {
String identifier = meta.get("map." + name);
long id;
int id;
String root;
if (identifier == null) {
id = ++lastMapId;
id = nextMapId();
String types = id + "/" + keyType.asString() + "/" + valueType.asString();
meta.put("map." + name, types);
root = null;
} else {
String types = meta.get("map." + name);
String[] idTypeList = StringUtils.arraySplit(types, '/', false);
id = Long.parseLong(idTypeList[0]);
id = Integer.parseInt(idTypeList[0]);
keyType = getDataType(idTypeList[1]);
valueType = getDataType(idTypeList[2]);
root = meta.get("root." + id);
......@@ -155,12 +163,26 @@ public class BtreeMapStore {
m = BtreeMap.open(this, id, name, keyType, valueType);
maps.put(name, m);
if (root != null && !"0".equals(root)) {
m.setRoot(Long.parseLong(root));
m.setRootPos(Long.parseLong(root));
}
}
return m;
}
private int nextMapId() {
int result;
while (true) {
result = mapIds.nextClearBit(mapIdMin);
mapIds.set(result);
// TODO need to check in oldest
if (meta.get("root." + result) == null) {
break;
}
}
mapIdMin = result;
return result;
}
/**
* Open a map.
*
......@@ -177,6 +199,11 @@ public class BtreeMapStore {
return openMap(name, keyType, valueType);
}
void removeMap(int id) {
mapIds.clear(id);
mapIdMin = Math.min(id, mapIdMin);
}
private DataType getDataType(Class<?> clazz) {
if (clazz == String.class) {
return STRING_TYPE;
......@@ -228,10 +255,10 @@ public class BtreeMapStore {
}
private void readMeta() {
Chunk header = readChunkHeader(rootChunkPos);
Chunk header = readChunkHeader(rootChunkStart);
lastChunkId = header.id;
chunks.put(header.id, header);
meta.setRoot(getId(header.id, header.metaRootOffset));
meta.setRootPos(header.metaRootPos);
Iterator<String> it = meta.keyIterator("chunk.");
while (it.hasNext()) {
String s = it.next();
......@@ -242,7 +269,7 @@ public class BtreeMapStore {
if (c.id == header.id) {
c.start = header.start;
c.length = header.length;
c.metaRootOffset = header.metaRootOffset;
c.metaRootPos = header.metaRootPos;
}
lastChunkId = Math.max(c.id, lastChunkId);
chunks.put(c.id, c);
......@@ -256,8 +283,8 @@ public class BtreeMapStore {
"versionRead:1\n" +
"versionWrite:1\n" +
"blockSize:" + blockSize + "\n" +
"rootChunk:" + rootChunkPos + "\n" +
"lastMapId:" + lastMapId + "\n" +
"rootChunk:" + rootChunkStart + "\n" +
"lastMapId:" + mapIdMin + "\n" +
"transaction:" + transaction + "\n").getBytes("UTF-8"));
file.position(0);
file.write(header);
......@@ -276,9 +303,9 @@ public class BtreeMapStore {
file.read(ByteBuffer.wrap(header));
Properties prop = new Properties();
prop.load(new StringReader(new String(header, "UTF-8")));
rootChunkPos = Long.parseLong(prop.get("rootChunk").toString());
rootChunkStart = Long.parseLong(prop.get("rootChunk").toString());
transaction = Long.parseLong(prop.get("transaction").toString());
lastMapId = Long.parseLong(prop.get("lastMapId").toString());
mapIdMin = Integer.parseInt(prop.get("lastMapId").toString());
} catch (Exception e) {
throw convert(e);
}
......@@ -303,18 +330,18 @@ public class BtreeMapStore {
}
}
private long getPosition(long posId) {
Chunk c = getChunk(posId);
if (c == null) {
throw new RuntimeException("Chunk " + getChunkId(posId) + " not found");
}
long pos = c.start;
pos += (int) (posId & Integer.MAX_VALUE);
return pos;
private Chunk getChunk(long pos) {
return chunks.get(Page.getChunkId(pos));
}
private static long getId(int chunkId, int offset) {
return ((long) chunkId << 32) | offset;
private long getFilePosition(long pos) {
Chunk c = getChunk(pos);
if (c == null) {
throw new RuntimeException("Chunk " + Page.getChunkId(pos) + " not found");
}
long filePos = c.start;
filePos += Page.getOffset(pos);
return filePos;
}
/**
......@@ -351,7 +378,7 @@ public class BtreeMapStore {
chunks.remove(x);
}
int count = 0;
int maxLength = 1 + 4 + 4 + 4;
int maxLength = 1 + 4 + 4 + 8;
for (BtreeMap<?, ?> m : mapsChanged.values()) {
Page p = m.getRoot();
if (p != null) {
......@@ -370,12 +397,11 @@ public class BtreeMapStore {
buff.put((byte) 'c');
buff.putInt(0);
buff.putInt(0);
buff.putInt(0);
long idOffset = getId(chunkId, 0);
buff.putLong(0);
for (BtreeMap<?, ?> m : mapsChanged.values()) {
Page p = m.getRoot();
if (p != null) {
long root = p.writeTempRecursive(buff, idOffset);
long root = p.writeTempRecursive(buff, chunkId);
meta.put("root." + m.getId(), "" + root);
}
}
......@@ -385,33 +411,32 @@ public class BtreeMapStore {
c.liveCount = count;
meta.put("chunk." + c.id, c.toString());
meta.getRoot().writeTempRecursive(buff, idOffset);
meta.getRoot().writeTempRecursive(buff, chunkId);
buff.flip();
int length = buff.limit();
long storePos = allocateChunk(length);
int rootOffset = (int) (meta.getRoot().getId() - idOffset);
long filePos = allocateChunk(length);
buff.rewind();
buff.put((byte) 'c');
buff.putInt(length);
buff.putInt(chunkId);
buff.putInt(rootOffset);
buff.putLong(meta.getRoot().getPos());
buff.rewind();
try {
file.position(storePos);
file.position(filePos);
file.write(buff);
} catch (IOException e) {
throw new RuntimeException(e);
}
rootChunkPos = storePos;
rootChunkStart = filePos;
writeHeader();
mapsChanged.clear();
temp.clear();
tempPageId = 0;
// update the start position and length
c.start = storePos;
c.start = filePos;
c.length = length;
meta.put("chunk." + c.id, c.toString());
......@@ -478,22 +503,22 @@ public class BtreeMapStore {
return ++transaction;
}
private Chunk readChunkHeader(long pos) {
private Chunk readChunkHeader(long start) {
try {
file.position(pos);
ByteBuffer buff = ByteBuffer.wrap(new byte[16]);
file.read(buff);
file.position(start);
ByteBuffer buff = ByteBuffer.wrap(new byte[32]);
DataUtils.readFully(file, buff);
buff.rewind();
if (buff.get() != 'c') {
throw new RuntimeException("File corrupt");
}
int length = buff.getInt();
int chunkId = buff.getInt();
int offset = buff.getInt();
long metaRootPos = buff.getLong();
Chunk c = new Chunk(chunkId);
c.start = pos;
c.start = start;
c.length = length;
c.metaRootOffset = offset;
c.metaRootPos = metaRootPos;
return c;
} catch (IOException e) {
throw new RuntimeException(e);
......@@ -552,9 +577,9 @@ public class BtreeMapStore {
}
}
Chunk header = readChunkHeader(move.start);
log(" meta:" + move.id + "/" + header.metaRootOffset + " start: " + move.start);
log(" meta:" + move.id + "/" + header.metaRootPos + " start: " + move.start);
BtreeMap<String, String> oldMeta = BtreeMap.open(this, 0, "old-meta", STRING_TYPE, STRING_TYPE);
oldMeta.setRoot(getId(header.id, header.metaRootOffset));
oldMeta.setRootPos(header.metaRootPos);
Iterator<String> it = oldMeta.keyIterator(null);
ArrayList<Integer> oldChunks = New.arrayList();
while (it.hasNext()) {
......@@ -577,7 +602,7 @@ public class BtreeMapStore {
continue;
}
String[] idTypesList = StringUtils.arraySplit(s, '/', false);
long id = Long.parseLong(idTypesList[0]);
int id = Integer.parseInt(idTypesList[0]);
DataType kt = getDataType(idTypesList[1]);
DataType vt = getDataType(idTypesList[2]);
long oldDataRoot = Long.parseLong(oldMeta.get("root." + id));
......@@ -585,7 +610,7 @@ public class BtreeMapStore {
if (oldDataRoot == 0) {
// no rows
} else {
oldData.setRoot(oldDataRoot);
oldData.setRootPos(oldDataRoot);
@SuppressWarnings("unchecked")
BtreeMap<Object, Object> data = (BtreeMap<Object, Object>) maps.get(k);
Iterator<?> dataIt = oldData.keyIterator(null);
......@@ -594,11 +619,11 @@ public class BtreeMapStore {
Page p = data.getPage(o);
if (p == null) {
// was removed later - ignore
} else if (p.getId() < 0) {
} else if (p.getPos() < 0) {
// temporarily changed - ok
// TODO move old data if changed temporarily?
// TODO move old data if there is an uncommitted change?
} else {
Chunk c = getChunk(p.getId());
Chunk c = getChunk(p.getPos());
if (old.contains(c)) {
log(" move key:" + o + " chunk:" + c.id);
Object value = data.get(o);
......@@ -618,32 +643,18 @@ public class BtreeMapStore {
* Read a page.
*
* @param map the map
* @param id the page id
* @param pos the page position
* @return the page
*/
Page readPage(BtreeMap<?, ?> map, long id) {
if (id < 0) {
return temp.get(id);
Page readPage(BtreeMap<?, ?> map, long pos) {
if (pos < 0) {
return temp.get(pos);
}
Page p = cache.get(id);
Page p = cache.get(pos);
if (p == null) {
try {
long pos = getPosition(id);
file.position(pos);
ByteBuffer buff = ByteBuffer.wrap(new byte[8 * 1024]);
// TODO read fully; read only required bytes
do {
int len = file.read(buff);
if (len < 0) {
break;
}
} while (buff.remaining() > 0);
buff.rewind();
p = Page.read(map, id, buff);
} catch (Exception e) {
throw new RuntimeException(e);
}
cache.put(id, p);
long filePos = getFilePosition(pos);
p = Page.read(file, map, filePos, pos);
cache.put(pos, p);
}
return p;
}
......@@ -651,31 +662,23 @@ public class BtreeMapStore {
/**
* Remove a page.
*
* @param id the page id
* @param pos the position of the page
*/
void removePage(long id) {
if (id > 0) {
cache.remove(id);
if (getChunk(id).liveCount == 0) {
throw new RuntimeException("Negative live count: " + id);
void removePage(long pos) {
if (pos > 0) {
cache.remove(pos);
if (getChunk(pos).liveCount == 0) {
throw new RuntimeException("Negative live count: " + pos);
}
getChunk(id).liveCount--;
getChunk(pos).liveCount--;
} else {
temp.remove(id);
temp.remove(pos);
if (temp.size() == 0) {
tempPageId = 0;
}
}
}
private static int getChunkId(long pos) {
return (int) (pos >>> 32);
}
private Chunk getChunk(long pos) {
return chunks.get(getChunkId(pos));
}
/**
* Log the string, if logging is enabled.
*
......
......@@ -46,9 +46,9 @@ class Chunk {
int collectPriority;
/**
* The offset of the meta root.
* The position of the meta root.
*/
int metaRootOffset;
long metaRootPos;
Chunk(int id) {
this.id = id;
......@@ -70,7 +70,7 @@ class Chunk {
c.length = Long.parseLong(prop.get("length").toString());
c.entryCount = Integer.parseInt(prop.get("entryCount").toString());
c.liveCount = Integer.parseInt(prop.get("liveCount").toString());
c.metaRootOffset = Integer.parseInt(prop.get("metaRoot").toString());
c.metaRootPos = Long.parseLong(prop.get("metaRoot").toString());
return c;
} catch (IOException e) {
throw new RuntimeException(e);
......@@ -96,7 +96,7 @@ class Chunk {
"length:" + length + "\n" +
"entryCount:" + entryCount + "\n" +
"liveCount:" + liveCount + "\n" +
"metaRoot:" + metaRootOffset + "\n";
"metaRoot:" + metaRootPos + "\n";
}
}
......
......@@ -6,7 +6,9 @@
*/
package org.h2.dev.store.btree;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
/**
* Utility methods
......@@ -172,4 +174,14 @@ public class DataUtils {
}
}
static void readFully(FileChannel file, ByteBuffer buff) throws IOException {
do {
int len = file.read(buff);
if (len < 0) {
break;
}
} while (buff.remaining() > 0);
buff.rewind();
}
}
......@@ -6,7 +6,9 @@
*/
package org.h2.dev.store.btree;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
/**
......@@ -18,7 +20,7 @@ import java.util.ArrayList;
public class Page {
private final BtreeMap<?, ?> map;
private long id;
private long pos;
private long transaction;
private Object[] keys;
private Object[] values;
......@@ -44,7 +46,7 @@ public class Page {
p.values = values;
p.children = children;
p.transaction = map.getTransaction();
p.id = map.registerTempPage(p);
p.pos = map.registerTempPage(p);
return p;
}
......@@ -52,25 +54,38 @@ public class Page {
* Read a page.
*
* @param map the map
* @param id the page id
* @param pos the page position
* @param buff the source buffer
* @return the page
*/
static Page read(BtreeMap<?, ?> map, long id, ByteBuffer buff) {
static Page read(FileChannel file, BtreeMap<?, ?> map, long filePos, long pos) {
int maxLength = Page.getMaxLength(pos), length = maxLength;
ByteBuffer buff;
try {
file.position(filePos);
if (maxLength == Integer.MAX_VALUE) {
buff = ByteBuffer.wrap(new byte[128]);
DataUtils.readFully(file, buff);
maxLength = buff.getInt();
file.position(filePos);
}
buff = ByteBuffer.wrap(new byte[length]);
DataUtils.readFully(file, buff);
} catch (IOException e) {
throw new RuntimeException(e);
}
Page p = new Page(map);
p.id = id;
p.read(buff);
p.pos = pos;
p.read(buff, maxLength);
return p;
}
private Page copyOnWrite() {
// TODO avoid creating objects (arrays) that are then not used
// possibly add shortcut for copy with add / copy with remove
long t = map.getTransaction();
if (transaction == t) {
return this;
}
map.removePage(id);
map.removePage(pos);
Page newPage = create(map, keys, values, children);
newPage.transaction = t;
newPage.cachedCompare = cachedCompare;
......@@ -79,7 +94,7 @@ public class Page {
public String toString() {
StringBuilder buff = new StringBuilder();
buff.append("nodeId: ").append(id).append("\n");
buff.append("pos: ").append(pos).append("\n");
for (int i = 0; i <= keys.length; i++) {
if (i > 0) {
buff.append(" ");
......@@ -99,12 +114,12 @@ public class Page {
}
/**
* Get the page id.
* Get the position of the page
*
* @return the page id
* @return the position
*/
long getId() {
return id;
long getPos() {
return pos;
}
/**
......@@ -236,7 +251,7 @@ public class Page {
return null;
}
while (true) {
// TODO avoid remove/add pairs if possible
// TODO performance: avoid remove/add pairs if possible
CursorPos p = parents.remove(parents.size() - 1);
int index = p.index++;
if (index < p.page.keys.length) {
......@@ -323,7 +338,7 @@ public class Page {
int parentIndex = 0;
while (true) {
if (parent != null) {
parent.setChild(parentIndex, p.id);
parent.setChild(parentIndex, p.pos);
}
if (!p.isLeaf()) {
if (p.keyCount() >= map.getMaxPageSize()) {
......@@ -333,11 +348,11 @@ public class Page {
Page split = p.splitNode(pos);
if (parent == null) {
Object[] keys = { k };
long[] children = { p.getId(), split.getId() };
long[] children = { p.getPos(), split.getPos() };
top = create(map, keys, null, children);
p = top;
} else {
parent.insert(parentIndex, k, null, split.getId());
parent.insert(parentIndex, k, null, split.getPos());
p = parent;
}
}
......@@ -356,10 +371,10 @@ public class Page {
Page split = p.splitLeaf(pos);
if (parent == null) {
Object[] keys = { k };
long[] children = { p.getId(), split.getId() };
long[] children = { p.getPos(), split.getPos() };
top = create(map, keys, null, children);
} else {
parent.insert(parentIndex, k, null, split.getId());
parent.insert(parentIndex, k, null, split.getPos());
}
}
break;
......@@ -402,22 +417,22 @@ public class Page {
map.readPage(c).removeAllRecursive();
}
}
map.removePage(id);
map.removePage(pos);
}
/**
* Remove a key-value pair.
*
* @param p the root node
* @param p the root page
* @param key the key
* @return the new root node
* @return the new root page
*/
static Page remove(Page p, Object key) {
int index = p.findKey(key);
if (p.isLeaf()) {
if (index >= 0) {
if (p.keyCount() == 1) {
p.map.removePage(p.id);
p.map.removePage(p.pos);
return null;
}
p = p.copyOnWrite();
......@@ -442,12 +457,12 @@ public class Page {
p = p.copyOnWrite();
p.remove(index);
if (p.keyCount() == 0) {
p.map.removePage(p.id);
p.map.removePage(p.pos);
p = p.map.readPage(p.children[0]);
}
} else {
p = p.copyOnWrite();
p.setChild(index, c2.id);
p.setChild(index, c2.pos);
}
return p;
}
......@@ -488,15 +503,17 @@ public class Page {
}
}
private void read(ByteBuffer buff) {
// len
buff.getInt();
long id = buff.getLong();
if (id != map.getId()) {
throw new RuntimeException("Page map id mismatch, expected " + map.getId() + " got " + id);
private void read(ByteBuffer buff, int maxLength) {
int len = buff.getInt();
if (len > maxLength) {
throw new RuntimeException("Length too large, expected < " + maxLength + " got " + len);
}
int mapId = DataUtils.readVarInt(buff);
if (mapId != map.getId()) {
throw new RuntimeException("Page pos mismatch, expected " + map.getId() + " got " + mapId);
}
boolean node = buff.get() == 1;
int len = DataUtils.readVarInt(buff);
len = DataUtils.readVarInt(buff);
if (node) {
children = new long[len];
keys = new Object[len - 1];
......@@ -517,14 +534,15 @@ public class Page {
}
/**
* Store the page.
* Store the page and update the position.
*
* @param buff the target buffer
* @param chunkId the chunk id
*/
private void write(ByteBuffer buff) {
int pos = buff.position();
private void write(ByteBuffer buff, int chunkId) {
int offset = buff.position();
buff.putInt(0);
buff.putLong(map.getId());
DataUtils.writeVarInt(buff, map.getId());
if (children != null) {
buff.put((byte) 1);
int len = children.length;
......@@ -544,8 +562,9 @@ public class Page {
map.getValueType().write(buff, values[i]);
}
}
int len = buff.position() - pos;
buff.putInt(pos, len);
int len = buff.position() - offset;
buff.putInt(offset, len);
this.pos = Page.getPos(chunkId, offset, len);
}
/**
......@@ -554,7 +573,7 @@ public class Page {
* @return the next page id
*/
int getMaxLengthTempRecursive() {
int maxLength = 4 + 8 + 1;
int maxLength = 4 + DataUtils.MAX_VAR_INT_LEN + 1;
if (children != null) {
int len = children.length;
maxLength += DataUtils.MAX_VAR_INT_LEN;
......@@ -580,26 +599,25 @@ public class Page {
}
/**
* Store this page and all children that are changed,
* in reverse order, and update the id and child ids.
* Store this page and all children that are changed, in reverse order, and
* update the position and the children.
*
* @param buff the target buffer
* @param idOffset the offset of the id
* @param posOffset the offset of the id
* @return the page id
*/
long writeTempRecursive(ByteBuffer buff, long idOffset) {
long writeTempRecursive(ByteBuffer buff, int chunkId) {
if (children != null) {
int len = children.length;
for (int i = 0; i < len; i++) {
long c = children[i];
if (c < 0) {
children[i] = map.readPage(c).writeTempRecursive(buff, idOffset);
children[i] = map.readPage(c).writeTempRecursive(buff, chunkId);
}
}
}
this.id = idOffset + buff.position();
write(buff);
return id;
write(buff, chunkId);
return pos;
}
/**
......@@ -621,4 +639,73 @@ public class Page {
return count;
}
/**
* Get the chunk id from the position.
*
* @param pos the position
* @return the chunk id
*/
static int getChunkId(long pos) {
return (int) (pos >>> 37);
}
/**
* Get the offset from the position.
*
* @param pos the position
* @return the offset
*/
public static long getOffset(long pos) {
return (int) (pos >> 5);
}
/**
* Get the position of this page. The following information is encoded in
* the position: the chunk id, the offset, and the maximum length.
*
* @param chunkId the chunk id
* @param offset the offset
* @param length the length
* @return the position
*/
static long getPos(int chunkId, int offset, int length) {
return ((long) chunkId << 37) | ((long) offset << 5) | encodeLength(length);
}
/**
* Convert the length to a length code 0..31. 31 means more than 1 MB.
*
* @param len the length
* @return the length code
*/
public static int encodeLength(int len) {
if (len <= 32) {
return 0;
}
int x = len;
int shift = 0;
while (x > 3) {
shift++;
x = (x >> 1) + (x & 1);
}
shift = Math.max(0, shift - 4);
int code = (shift << 1) + (x & 1);
return Math.min(31, code);
}
/**
* Get the maximum length for the given code.
* For the code 31, Integer.MAX_VALUE is returned.
*
* @param pos the position
* @return the maximum length
*/
public static int getMaxLength(long pos) {
int code = (int) (pos & 31);
if (code == 31) {
return Integer.MAX_VALUE;
}
return (2 + (code & 1)) << ((code >> 1) + 4);
}
}
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论