提交 115f51dc authored 作者: Thomas Mueller's avatar Thomas Mueller

Shrink headers

上级 4c8e3aa0
...@@ -15,14 +15,6 @@ import java.util.HashMap; ...@@ -15,14 +15,6 @@ import java.util.HashMap;
* Chunks are page aligned (each page is usually 4096 bytes). * Chunks are page aligned (each page is usually 4096 bytes).
* There are at most 67 million (2^26) chunks, * There are at most 67 million (2^26) chunks,
* each chunk is at most 2 GB large. * each chunk is at most 2 GB large.
* Chunk format:
* 1 byte: 'c'
* 4 bytes: length
* 4 bytes: chunk id (an incrementing number)
* 4 bytes: pageCount
* 8 bytes: metaRootPos
* 8 bytes: maxLengthLive
* [ Page ] *
*/ */
public class Chunk { public class Chunk {
...@@ -37,14 +29,14 @@ public class Chunk { ...@@ -37,14 +29,14 @@ public class Chunk {
public final int id; public final int id;
/** /**
* The start position within the file. * The start block number within the file.
*/ */
public long start; public long block;
/** /**
* The length in bytes. * The length in number of blocks.
*/ */
public int length; public int blocks;
/** /**
* The total number of pages in this chunk. * The total number of pages in this chunk.
...@@ -122,11 +114,19 @@ public class Chunk { ...@@ -122,11 +114,19 @@ public class Chunk {
* *
* @param buff the target buffer * @param buff the target buffer
*/ */
void writeHeader(WriteBuffer buff) { void writeHeader(WriteBuffer buff, int minLength) {
long pos = buff.position();
buff.put((byte) '{'); buff.put((byte) '{');
buff.put(asString().getBytes(DataUtils.UTF8)); buff.put(asString().getBytes(DataUtils.UTF8));
buff.put((byte) '}'); buff.put((byte) '}');
buff.put((byte) ' '); while (buff.position() - pos < minLength - 1) {
buff.put((byte) ' ');
}
buff.put((byte) '\n');
}
static String getMetaKey(int chunkId) {
return "chunk." + Integer.toHexString(chunkId);
} }
/** /**
...@@ -137,17 +137,17 @@ public class Chunk { ...@@ -137,17 +137,17 @@ public class Chunk {
*/ */
public static Chunk fromString(String s) { public static Chunk fromString(String s) {
HashMap<String, String> map = DataUtils.parseMap(s); HashMap<String, String> map = DataUtils.parseMap(s);
int id = Integer.parseInt(map.get("chunk")); int id = Integer.parseInt(map.get("chunk"), 16);
Chunk c = new Chunk(id); Chunk c = new Chunk(id);
c.start = Long.parseLong(map.get("start")); c.block = Long.parseLong(map.get("block"), 16);
c.length = Integer.parseInt(map.get("length")); c.blocks = Integer.parseInt(map.get("blocks"), 16);
c.pageCount = Integer.parseInt(map.get("pageCount")); c.pageCount = Integer.parseInt(map.get("pages"), 16);
c.pageCountLive = Integer.parseInt(map.get("pageCountLive")); c.pageCountLive = DataUtils.parseHexInt(map.get("livePages"), c.pageCount);
c.maxLength = Long.parseLong(map.get("maxLength")); c.maxLength = Long.parseLong(map.get("max"), 16);
c.maxLengthLive = Long.parseLong(map.get("maxLengthLive")); c.maxLengthLive = DataUtils.parseHexLong(map.get("liveMax"), c.maxLength);
c.metaRootPos = Long.parseLong(map.get("metaRoot")); c.metaRootPos = Long.parseLong(map.get("root"), 16);
c.time = Long.parseLong(map.get("time")); c.time = Long.parseLong(map.get("time"), 16);
c.version = Long.parseLong(map.get("version")); c.version = Long.parseLong(map.get("version"), 16);
return c; return c;
} }
...@@ -171,17 +171,22 @@ public class Chunk { ...@@ -171,17 +171,22 @@ public class Chunk {
* @return the string * @return the string
*/ */
public String asString() { public String asString() {
return StringBuilder buff = new StringBuilder();
"chunk:" + id + "," + buff.append("chunk:").append(Integer.toHexString(id)).
"length:" + length + "," + append(",block:").append(Long.toHexString(block)).
"maxLength:" + maxLength + "," + append(",blocks:").append(Integer.toHexString(blocks));
"maxLengthLive:" + maxLengthLive + "," + if (maxLength != maxLengthLive) {
"metaRoot:" + metaRootPos + "," + buff.append(",liveMax:").append(Long.toHexString(maxLengthLive));
"pageCount:" + pageCount + "," + }
"pageCountLive:" + pageCountLive + "," + if (pageCount != pageCountLive) {
"start:" + start + "," + buff.append(",livePages:").append(Integer.toHexString(pageCountLive));
"time:" + time + "," + }
"version:" + version; buff.append(",max:").append(Long.toHexString(maxLength)).
append(",pages:").append(Integer.toHexString(pageCount)).
append(",root:").append(Long.toHexString(metaRootPos)).
append(",time:").append(Long.toHexString(time)).
append(",version:").append(Long.toHexString(version));
return buff.toString();
} }
@Override @Override
......
...@@ -828,41 +828,42 @@ public class DataUtils { ...@@ -828,41 +828,42 @@ public class DataUtils {
} }
/** /**
* Parse a string as a number. * Parse a string as a hexadecimal number.
* *
* @param x the number * @param x the number
* @param defaultValue if x is null * @param defaultValue if x is null
* @return the parsed value * @return the parsed value
* @throws IllegalStateException if parsing fails * @throws IllegalStateException if parsing fails
*/ */
public static long parseLong(String x, long defaultValue) { public static long parseHexLong(String x, long defaultValue) {
if (x == null) { if (x == null) {
return defaultValue; return defaultValue;
} }
try { try {
return Long.parseLong(x); return Long.parseLong(x, 16);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
throw newIllegalStateException(ERROR_FILE_CORRUPT, throw newIllegalStateException(ERROR_FILE_CORRUPT,
"Error parsing the value {0} as a long", x, e); "Error parsing the value {0}", x, e);
} }
} }
/** /**
* Try to parse a string as a number. * Parse a string as a hexadecimal number.
* *
* @param x the number * @param x the number
* @param defaultValue if x is null * @param defaultValue if x is null
* @param errorValue if parsing fails * @return the parsed value
* @return the parsed value if parsing is possible * @throws IllegalStateException if parsing fails
*/ */
public static long parseLong(String x, long defaultValue, long errorValue) { public static int parseHexInt(String x, int defaultValue) {
if (x == null) { if (x == null) {
return defaultValue; return defaultValue;
} }
try { try {
return Long.parseLong(x); return Integer.parseInt(x, 16);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
return errorValue; throw newIllegalStateException(ERROR_FILE_CORRUPT,
"Error parsing the value {0}", x, e);
} }
} }
......
...@@ -72,9 +72,8 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -72,9 +72,8 @@ public class MVMap<K, V> extends AbstractMap<K, V>
*/ */
protected void init(MVStore store, HashMap<String, String> config) { protected void init(MVStore store, HashMap<String, String> config) {
this.store = store; this.store = store;
this.id = Integer.parseInt(config.get("id")); this.id = Integer.parseInt(config.get("id"), 16);
String x = config.get("createVersion"); this.createVersion = DataUtils.parseHexLong(config.get("createVersion"), 0);
this.createVersion = x == null ? 0 : Long.parseLong(x);
this.writeVersion = store.getCurrentVersion(); this.writeVersion = store.getCurrentVersion();
} }
...@@ -1038,8 +1037,8 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1038,8 +1037,8 @@ public class MVMap<K, V> extends AbstractMap<K, V>
MVMap<K, V> m = new MVMap<K, V>(keyType, valueType); MVMap<K, V> m = new MVMap<K, V>(keyType, valueType);
m.readOnly = true; m.readOnly = true;
HashMap<String, String> config = New.hashMap(); HashMap<String, String> config = New.hashMap();
config.put("id", String.valueOf(id)); config.put("id", Integer.toHexString(id));
config.put("createVersion", String.valueOf(createVersion)); config.put("createVersion", Long.toHexString(createVersion));
m.init(store, config); m.init(store, config);
m.root = root; m.root = root;
return m; return m;
...@@ -1098,7 +1097,7 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1098,7 +1097,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
DataUtils.appendMap(buff, "name", name); DataUtils.appendMap(buff, "name", name);
} }
if (createVersion != 0) { if (createVersion != 0) {
DataUtils.appendMap(buff, "createVersion", createVersion); DataUtils.appendMap(buff, "createVersion", Long.toHexString(createVersion));
} }
String type = getType(); String type = getType();
if (type != null) { if (type != null) {
......
...@@ -37,34 +37,6 @@ store header: (blockSize) bytes ...@@ -37,34 +37,6 @@ store header: (blockSize) bytes
(there are two headers for security at the beginning of the file, (there are two headers for security at the beginning of the file,
and there is a store header at the end of each chunk) and there is a store header at the end of each chunk)
store header:
{H:2,...
Format:
Current store header:
H:3,blockSize:4096,chunk:10,creationTime:1391059293945,format:1,lastMapId:15,rootChunk:8192,version:10,fletcher:5d2a9623
used:
chunk,creationTime,format,(formatRead,)lastMapId,rootChunk,version
(blockSize not used)
fletcher
map.10 = test
name.lobData = 10
chunk: store live only if there is garbage!
Plan: (hex encoded values, H:2, rootChunk -> block, creationTime -> created, lastMapId -> map
{H:2,block:2,blockSize:1000,chunk:a,created:143e19856f9,format:1,map:b,version:a,fletcher:5d2a9623}\n
{block:2,blocks:a0,chunk:a,max:2030,pages:100,root:a020,time:103}\n
{<chunk>}\n .... {<storeHeader>}\n
map.a = test
name.lobData = a
Chunk: (id -> chunk, start -> block, length -> blocks, pageCount -> pages,
pageCountLive -> livePages, maxLength -> max, maxLengthLive -> liveMax,
metaRootPos -> root (offset))
+, if different: maxLive:1030,pagesLive:30
TODO: TODO:
Documentation Documentation
...@@ -85,6 +57,13 @@ TransactionStore: ...@@ -85,6 +57,13 @@ TransactionStore:
if there is only one connection if there is only one connection
MVStore: MVStore:
- maybe reduce size of store header
- maybe make the free space bitset operate on blocks
- maybe let a chunk point to a list of potential next chunks
(so no fixed location header is needed), similar to a skip list
- document and review the file format
- automated 'kill process' and 'power failure' test - automated 'kill process' and 'power failure' test
- update checkstyle - update checkstyle
- feature to auto-compact from time to time and on close - feature to auto-compact from time to time and on close
...@@ -92,10 +71,6 @@ MVStore: ...@@ -92,10 +71,6 @@ MVStore:
- possibly split chunk metadata into immutable and mutable - possibly split chunk metadata into immutable and mutable
- compact: avoid processing pages using a counting bloom filter - compact: avoid processing pages using a counting bloom filter
- defragment (re-creating maps, specially those with small pages) - defragment (re-creating maps, specially those with small pages)
- chunk header: store changed chunk data as row; maybe after the root
- two chunk checksums (header+last page; the second one 2 bytes per page)
- maybe let a chunk point to a list of potential next chunks
(so no fixed location header is needed), similar to a skip list
- store number of write operations per page (maybe defragment - store number of write operations per page (maybe defragment
if much different than count) if much different than count)
- r-tree: nearest neighbor search - r-tree: nearest neighbor search
...@@ -122,8 +97,6 @@ MVStore: ...@@ -122,8 +97,6 @@ MVStore:
- optional pluggable checksum mechanism (per page), which - optional pluggable checksum mechanism (per page), which
requires that everything is a page (including headers) requires that everything is a page (including headers)
- rename setStoreVersion to setDataVersion or similar - rename setStoreVersion to setDataVersion or similar
- to save space for small chunks, combine the last partial
block with the header
- temporary file storage - temporary file storage
- simple rollback method (rollback to last committed version) - simple rollback method (rollback to last committed version)
- MVMap to implement SortedMap, then NavigableMap - MVMap to implement SortedMap, then NavigableMap
...@@ -134,8 +107,6 @@ MVStore: ...@@ -134,8 +107,6 @@ MVStore:
- add new feature to the file system API to avoid copying data - add new feature to the file system API to avoid copying data
(reads that returns a ByteBuffer instead of writing into one) (reads that returns a ByteBuffer instead of writing into one)
for memory mapped files and off-heap storage for memory mapped files and off-heap storage
- do we need to store a dummy chunk entry in the chunk itself?
currently yes, as some fields are not set in the chunk header
- support log structured merge style operations (blind writes) - support log structured merge style operations (blind writes)
using one map per level plus bloom filter using one map per level plus bloom filter
- have a strict call order MVStore -> MVMap -> Page -> FileStore - have a strict call order MVStore -> MVMap -> Page -> FileStore
...@@ -167,6 +138,11 @@ public class MVStore { ...@@ -167,6 +138,11 @@ public class MVStore {
* written twice, one copy in each block, to ensure it survives a crash. * written twice, one copy in each block, to ensure it survives a crash.
*/ */
static final int BLOCK_SIZE = 4 * 1024; static final int BLOCK_SIZE = 4 * 1024;
/**
* The maximum length of the store header.
*/
static final int STORE_HEADER_LENGTH = 256;
private static final int FORMAT_WRITE = 1; private static final int FORMAT_WRITE = 1;
private static final int FORMAT_READ = 1; private static final int FORMAT_READ = 1;
...@@ -184,7 +160,7 @@ public class MVStore { ...@@ -184,7 +160,7 @@ public class MVStore {
private final int pageSplitSize; private final int pageSplitSize;
private long rootChunkStart; private long lastChunkBlock;
/** /**
* The page cache. The default size is 16 MB, and the average size is 2 KB. * The page cache. The default size is 16 MB, and the average size is 2 KB.
...@@ -335,13 +311,13 @@ public class MVStore { ...@@ -335,13 +311,13 @@ public class MVStore {
creationTime = 0; creationTime = 0;
creationTime = getTime(); creationTime = getTime();
lastCommitTime = creationTime; lastCommitTime = creationTime;
storeHeader.put("blockSize", "" + BLOCK_SIZE); storeHeader.put("blockSize", Integer.toHexString(BLOCK_SIZE));
storeHeader.put("format", "" + FORMAT_WRITE); storeHeader.put("format", Integer.toHexString(FORMAT_WRITE));
storeHeader.put("creationTime", "" + creationTime); storeHeader.put("created", Long.toHexString(creationTime));
writeStoreHeader(); writeStoreHeader();
} else { } else {
readStoreHeader(); readStoreHeader();
long format = DataUtils.parseLong(storeHeader.get("format"), 0); long format = DataUtils.parseHexLong(storeHeader.get("format"), 0);
if (format > FORMAT_WRITE && !fileStore.isReadOnly()) { if (format > FORMAT_WRITE && !fileStore.isReadOnly()) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_UNSUPPORTED_FORMAT, DataUtils.ERROR_UNSUPPORTED_FORMAT,
...@@ -349,14 +325,14 @@ public class MVStore { ...@@ -349,14 +325,14 @@ public class MVStore {
"and the file was not opened in read-only mode", "and the file was not opened in read-only mode",
format, FORMAT_WRITE); format, FORMAT_WRITE);
} }
format = DataUtils.parseLong(storeHeader.get("formatRead"), format); format = DataUtils.parseHexLong(storeHeader.get("formatRead"), format);
if (format > FORMAT_READ) { if (format > FORMAT_READ) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_UNSUPPORTED_FORMAT, DataUtils.ERROR_UNSUPPORTED_FORMAT,
"The read format {0} is larger than the supported format {1}", "The read format {0} is larger than the supported format {1}",
format, FORMAT_READ); format, FORMAT_READ);
} }
if (rootChunkStart > 0) { if (lastChunkBlock > 0) {
readMeta(); readMeta();
} }
} }
...@@ -406,8 +382,8 @@ public class MVStore { ...@@ -406,8 +382,8 @@ public class MVStore {
<T extends MVMap<?, ?>> T openMapVersion(long version, int mapId, <T extends MVMap<?, ?>> T openMapVersion(long version, int mapId,
MVMap<?, ?> template) { MVMap<?, ?> template) {
MVMap<String, String> oldMeta = getMetaMap(version); MVMap<String, String> oldMeta = getMetaMap(version);
String r = oldMeta.get("root." + mapId); String r = oldMeta.get("root." + Integer.toHexString(mapId));
long rootPos = DataUtils.parseLong(r, 0); long rootPos = DataUtils.parseHexLong(r, 0);
MVMap<?, ?> m = template.openReadOnly(); MVMap<?, ?> m = template.openReadOnly();
m.setRootPos(rootPos, version); m.setRootPos(rootPos, version);
return (T) m; return (T) m;
...@@ -446,7 +422,7 @@ public class MVStore { ...@@ -446,7 +422,7 @@ public class MVStore {
HashMap<String, String> c; HashMap<String, String> c;
M map; M map;
if (x != null) { if (x != null) {
id = Integer.parseInt(x); id = Integer.parseInt(x, 16);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
M old = (M) maps.get(id); M old = (M) maps.get(id);
if (old != null) { if (old != null) {
...@@ -457,18 +433,19 @@ public class MVStore { ...@@ -457,18 +433,19 @@ public class MVStore {
c = DataUtils.parseMap(config); c = DataUtils.parseMap(config);
c.put("id", x); c.put("id", x);
map.init(this, c); map.init(this, c);
String r = meta.get("root." + id); String r = meta.get("root." + x);
root = r == null ? 0 : Long.parseLong(r); root = DataUtils.parseHexLong(r, 0);
} else { } else {
c = New.hashMap(); c = New.hashMap();
id = ++lastMapId; id = ++lastMapId;
c.put("id", Integer.toString(id)); x = Integer.toHexString(id);
c.put("createVersion", Long.toString(currentVersion)); c.put("id", x);
c.put("createVersion", Long.toHexString(currentVersion));
map = builder.create(); map = builder.create();
map.init(this, c); map.init(this, c);
markMetaChanged(); markMetaChanged();
meta.put("map." + id, map.asString(name)); meta.put("map." + x, map.asString(name));
meta.put("name." + name, Integer.toString(id)); meta.put("name." + name, x);
root = 0; root = 0;
} }
map.setRootPos(root, -1); map.setRootPos(root, -1);
...@@ -521,7 +498,7 @@ public class MVStore { ...@@ -521,7 +498,7 @@ public class MVStore {
private MVMap<String, String> getMetaMap(long version) { private MVMap<String, String> getMetaMap(long version) {
Chunk c = getChunkForVersion(version); Chunk c = getChunkForVersion(version);
DataUtils.checkArgument(c != null, "Unknown version {0}", version); DataUtils.checkArgument(c != null, "Unknown version {0}", version);
c = readChunkHeader(c.start); c = readChunkHeader(c.block);
MVMap<String, String> oldMeta = meta.openReadOnly(); MVMap<String, String> oldMeta = meta.openReadOnly();
oldMeta.setRootPos(c.metaRootPos, version); oldMeta.setRootPos(c.metaRootPos, version);
return oldMeta; return oldMeta;
...@@ -556,8 +533,8 @@ public class MVStore { ...@@ -556,8 +533,8 @@ public class MVStore {
private synchronized void readMeta() { private synchronized void readMeta() {
chunks.clear(); chunks.clear();
Chunk header = readChunkHeader(rootChunkStart); Chunk header = readChunkHeader(lastChunkBlock);
if (header.start == Long.MAX_VALUE) { if (header.block == Long.MAX_VALUE) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_CORRUPT, DataUtils.ERROR_FILE_CORRUPT,
"Chunk {0} is invalid", header.id); "Chunk {0} is invalid", header.id);
...@@ -565,28 +542,19 @@ public class MVStore { ...@@ -565,28 +542,19 @@ public class MVStore {
lastChunkId = header.id; lastChunkId = header.id;
chunks.put(header.id, header); chunks.put(header.id, header);
meta.setRootPos(header.metaRootPos, -1); meta.setRootPos(header.metaRootPos, -1);
String s = meta.get("chunk." + lastChunkId); chunks.put(header.id, header);
Chunk h2 = Chunk.fromString(s);
h2.start = header.start;
h2.length = header.length;
h2.metaRootPos = header.metaRootPos;
h2.pageCount = header.pageCount;
h2.pageCountLive = header.pageCountLive;
h2.maxLength = header.maxLength;
h2.maxLengthLive = header.maxLengthLive;
chunks.put(header.id, h2);
// we can load the chunk in any order, // we can load the chunk in any order,
// because loading chunk metadata // because loading chunk metadata
// might recursively load another chunk // might recursively load another chunk
for (Iterator<String> it = meta.keyIterator("chunk."); it.hasNext();) { for (Iterator<String> it = meta.keyIterator("chunk."); it.hasNext();) {
s = it.next(); String s = it.next();
if (!s.startsWith("chunk.")) { if (!s.startsWith("chunk.")) {
break; break;
} }
s = meta.get(s); s = meta.get(s);
Chunk c = Chunk.fromString(s); Chunk c = Chunk.fromString(s);
if (!chunks.containsKey(c.id)) { if (!chunks.containsKey(c.id)) {
if (c.start == Long.MAX_VALUE) { if (c.block == Long.MAX_VALUE) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_CORRUPT, DataUtils.ERROR_FILE_CORRUPT,
"Chunk {0} is invalid", c.id); "Chunk {0} is invalid", c.id);
...@@ -600,8 +568,9 @@ public class MVStore { ...@@ -600,8 +568,9 @@ public class MVStore {
// remove this chunk in the next save operation // remove this chunk in the next save operation
registerFreePage(currentVersion, c.id, 0, 0); registerFreePage(currentVersion, c.id, 0, 0);
} }
int len = MathUtils.roundUpInt(c.length, BLOCK_SIZE) + BLOCK_SIZE; long start = c.block * BLOCK_SIZE;
fileStore.markUsed(c.start, len); int len = c.blocks * BLOCK_SIZE;
fileStore.markUsed(start, len);
} }
} }
...@@ -618,7 +587,15 @@ public class MVStore { ...@@ -618,7 +587,15 @@ public class MVStore {
buff.put(buffLastBlock); buff.put(buffLastBlock);
buff.put(buffFirst2Blocks); buff.put(buffFirst2Blocks);
for (int i = 0; i < 3 * BLOCK_SIZE; i += BLOCK_SIZE) { for (int i = 0; i < 3 * BLOCK_SIZE; i += BLOCK_SIZE) {
String s = new String(buff.array(), i, BLOCK_SIZE, DataUtils.UTF8) int start = i;
if (i == 0) {
start = BLOCK_SIZE - STORE_HEADER_LENGTH;
}
if (buff.array()[start] != '{') {
continue;
}
String s = new String(buff.array(), start,
STORE_HEADER_LENGTH, DataUtils.UTF8)
.trim(); .trim();
HashMap<String, String> m; HashMap<String, String> m;
try { try {
...@@ -642,14 +619,14 @@ public class MVStore { ...@@ -642,14 +619,14 @@ public class MVStore {
if (check != checksum) { if (check != checksum) {
continue; continue;
} }
long chunk = Long.parseLong(m.get("chunk")); long chunk = Long.parseLong(m.get("chunk"), 16);
if (chunk > newestChunk) { if (chunk > newestChunk) {
newestChunk = chunk; newestChunk = chunk;
storeHeader = m; storeHeader = m;
rootChunkStart = Long.parseLong(m.get("rootChunk")); lastChunkBlock = Long.parseLong(m.get("block"), 16);
creationTime = Long.parseLong(m.get("creationTime")); creationTime = Long.parseLong(m.get("created"), 16);
lastMapId = Integer.parseInt(m.get("lastMapId")); lastMapId = Integer.parseInt(m.get("map"), 16);
currentVersion = Long.parseLong(m.get("version")); currentVersion = Long.parseLong(m.get("version"), 16);
} }
} }
if (currentVersion < 0) { if (currentVersion < 0) {
...@@ -661,28 +638,31 @@ public class MVStore { ...@@ -661,28 +638,31 @@ public class MVStore {
lastStoredVersion = -1; lastStoredVersion = -1;
} }
private byte[] getStoreHeaderBytes() { private byte[] getStoreHeaderBytes(int minLength) {
StringBuilder buff = new StringBuilder("{H:2"); StringBuilder buff = new StringBuilder("{H:2");
storeHeader.put("lastMapId", "" + lastMapId); storeHeader.put("map", Integer.toHexString(lastMapId));
storeHeader.put("chunk", "" + lastChunkId); storeHeader.put("chunk", Integer.toHexString(lastChunkId));
storeHeader.put("rootChunk", "" + rootChunkStart); storeHeader.put("block", Long.toHexString(lastChunkBlock));
storeHeader.put("version", "" + currentVersion); storeHeader.put("version", Long.toHexString(currentVersion));
DataUtils.appendMap(buff, storeHeader); DataUtils.appendMap(buff, storeHeader);
byte[] bytes = buff.toString().getBytes(DataUtils.UTF8); byte[] bytes = buff.toString().getBytes(DataUtils.UTF8);
int checksum = DataUtils.getFletcher32(bytes, bytes.length / 2 * 2); int checksum = DataUtils.getFletcher32(bytes, bytes.length / 2 * 2);
DataUtils.appendMap(buff, "fletcher", Integer.toHexString(checksum)); DataUtils.appendMap(buff, "fletcher", Integer.toHexString(checksum));
buff.append("}\n"); buff.append("}");
bytes = buff.toString().getBytes(DataUtils.UTF8); if (buff.length() >= STORE_HEADER_LENGTH - 1) {
if (bytes.length > BLOCK_SIZE) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_UNSUPPORTED_FORMAT, DataUtils.ERROR_UNSUPPORTED_FORMAT,
"Store header too large: {0}", buff); "Store header too large: {0}", buff);
} }
return bytes; while (buff.length() < minLength - 1) {
buff.append(' ');
}
buff.append("\n");
return buff.toString().getBytes(DataUtils.UTF8);
} }
private void writeStoreHeader() { private void writeStoreHeader() {
byte[] bytes = getStoreHeaderBytes(); byte[] bytes = getStoreHeaderBytes(0);
ByteBuffer header = ByteBuffer.allocate(2 * BLOCK_SIZE); ByteBuffer header = ByteBuffer.allocate(2 * BLOCK_SIZE);
header.put(bytes); header.put(bytes);
header.position(BLOCK_SIZE); header.position(BLOCK_SIZE);
...@@ -778,14 +758,14 @@ public class MVStore { ...@@ -778,14 +758,14 @@ public class MVStore {
DataUtils.ERROR_INTERNAL, DataUtils.ERROR_INTERNAL,
"Unsynchronized metadata read"); "Unsynchronized metadata read");
} }
String s = meta.get("chunk." + chunkId); String s = meta.get(Chunk.getMetaKey(chunkId));
if (s == null) { if (s == null) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_CORRUPT, DataUtils.ERROR_FILE_CORRUPT,
"Chunk {0} not found", chunkId); "Chunk {0} not found", chunkId);
} }
c = Chunk.fromString(s); c = Chunk.fromString(s);
if (c.start == Long.MAX_VALUE) { if (c.block == Long.MAX_VALUE) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_CORRUPT, DataUtils.ERROR_FILE_CORRUPT,
"Chunk {0} is invalid", chunkId); "Chunk {0} is invalid", chunkId);
...@@ -875,12 +855,12 @@ public class MVStore { ...@@ -875,12 +855,12 @@ public class MVStore {
lastCommitTime = time; lastCommitTime = time;
retainChunk = null; retainChunk = null;
// the last chunk was not completely correct in the last store() // the last chunk was not stored before and needs to be set now (it's
// this needs to be updated now (it's better not to update right after // better not to update right after storing, because that would modify
// storing, because that would modify the meta map again) // the meta map again)
Chunk lastChunk = chunks.get(lastChunkId); Chunk lastChunk = chunks.get(lastChunkId);
if (lastChunk != null) { if (lastChunk != null) {
meta.put("chunk." + lastChunk.id, lastChunk.asString()); meta.put(Chunk.getMetaKey(lastChunk.id), lastChunk.asString());
// never go backward in time // never go backward in time
time = Math.max(lastChunk.time, time); time = Math.max(lastChunk.time, time);
} }
...@@ -891,12 +871,14 @@ public class MVStore { ...@@ -891,12 +871,14 @@ public class MVStore {
c.maxLength = Long.MAX_VALUE; c.maxLength = Long.MAX_VALUE;
c.maxLengthLive = Long.MAX_VALUE; c.maxLengthLive = Long.MAX_VALUE;
c.metaRootPos = Long.MAX_VALUE; c.metaRootPos = Long.MAX_VALUE;
c.start = Long.MAX_VALUE; c.block = Long.MAX_VALUE;
c.length = Integer.MAX_VALUE; c.blocks = Integer.MAX_VALUE;
c.time = time; c.time = time;
c.version = version; c.version = version;
chunks.put(c.id, c); chunks.put(c.id, c);
meta.put("chunk." + c.id, c.asString()); // force a metadata update
meta.put(Chunk.getMetaKey(c.id), c.asString());
meta.remove(Chunk.getMetaKey(c.id));
ArrayList<MVMap<?, ?>> list = New.arrayList(maps.values()); ArrayList<MVMap<?, ?>> list = New.arrayList(maps.values());
ArrayList<MVMap<?, ?>> changed = New.arrayList(); ArrayList<MVMap<?, ?>> changed = New.arrayList();
for (MVMap<?, ?> m : list) { for (MVMap<?, ?> m : list) {
...@@ -916,17 +898,18 @@ public class MVStore { ...@@ -916,17 +898,18 @@ public class MVStore {
} }
for (MVMap<?, ?> m : changed) { for (MVMap<?, ?> m : changed) {
Page p = m.getRoot(); Page p = m.getRoot();
String key = "root." + Long.toHexString(m.getId());
if (p.getTotalCount() == 0) { if (p.getTotalCount() == 0) {
meta.put("root." + m.getId(), "0"); meta.put(key, "0");
} else { } else {
meta.put("root." + m.getId(), String.valueOf(Integer.MAX_VALUE)); meta.put(key, Long.toHexString(Long.MAX_VALUE));
} }
} }
Set<Chunk> removedChunks = applyFreedSpace(storeVersion, time); Set<Chunk> removedChunks = applyFreedSpace(storeVersion, time);
WriteBuffer buff = getWriteBuffer(); WriteBuffer buff = getWriteBuffer();
// need to patch the header later // need to patch the header later
c.writeHeader(buff); c.writeHeader(buff, 0);
long endHeader = buff.position(); int headerLength = buff.position();
c.pageCount = 0; c.pageCount = 0;
c.pageCountLive = 0; c.pageCountLive = 0;
c.maxLength = 0; c.maxLength = 0;
...@@ -936,28 +919,27 @@ public class MVStore { ...@@ -936,28 +919,27 @@ public class MVStore {
if (p.getTotalCount() > 0) { if (p.getTotalCount() > 0) {
p.writeUnsavedRecursive(c, buff); p.writeUnsavedRecursive(c, buff);
long root = p.getPos(); long root = p.getPos();
meta.put("root." + m.getId(), "" + root); String key = "root." + Long.toHexString(m.getId());
meta.put(key, Long.toHexString(root));
} }
} }
meta.put("chunk." + c.id, c.asString());
meta.setWriteVersion(version); meta.setWriteVersion(version);
// this will (again) modify maxLengthLive, but
// the correct value is written in the chunk header
Page metaRoot = meta.getRoot(); Page metaRoot = meta.getRoot();
metaRoot.writeUnsavedRecursive(c, buff); metaRoot.writeUnsavedRecursive(c, buff);
int chunkLength = buff.position(); int chunkLength = buff.position();
// round to the next block, // add the store header and round to the next block
// and one additional block for the store header int length = MathUtils.roundUpInt(chunkLength +
int length = MathUtils.roundUpInt(chunkLength, BLOCK_SIZE) + BLOCK_SIZE; STORE_HEADER_LENGTH, BLOCK_SIZE);
buff.limit(length); buff.limit(length);
// free up the space of unused chunks now // free up the space of unused chunks now
for (Chunk x : removedChunks) { for (Chunk x : removedChunks) {
int len = MathUtils.roundUpInt(x.length, BLOCK_SIZE) + BLOCK_SIZE; long start = x.block * BLOCK_SIZE;
fileStore.free(x.start, len); int len = x.blocks * BLOCK_SIZE;
fileStore.free(start, len);
} }
// the length of the file that is still in use // the length of the file that is still in use
...@@ -973,23 +955,17 @@ public class MVStore { ...@@ -973,23 +955,17 @@ public class MVStore {
// end is not necessarily the end of the file // end is not necessarily the end of the file
boolean storeAtEndOfFile = filePos + length >= fileStore.size(); boolean storeAtEndOfFile = filePos + length >= fileStore.size();
c.start = filePos; c.block = filePos / BLOCK_SIZE;
c.length = chunkLength; c.blocks = length / BLOCK_SIZE;
c.metaRootPos = metaRoot.getPos(); c.metaRootPos = metaRoot.getPos();
buff.position(0); buff.position(0);
c.writeHeader(buff); c.writeHeader(buff, headerLength);
while (buff.position() < endHeader - 1) { lastChunkBlock = filePos / BLOCK_SIZE;
buff.put((byte) ' ');
}
buff.put((byte) '\n');
rootChunkStart = filePos;
revertTemp(storeVersion); revertTemp(storeVersion);
buff.position(buff.limit() - BLOCK_SIZE); buff.position(buff.limit() - STORE_HEADER_LENGTH);
byte[] header = getStoreHeaderBytes(); byte[] header = getStoreHeaderBytes(STORE_HEADER_LENGTH);
buff.put(header); buff.put(header);
// fill the header with zeroes
buff.put(new byte[BLOCK_SIZE - header.length]);
buff.position(0); buff.position(0);
write(filePos, buff.getBuffer()); write(filePos, buff.getBuffer());
...@@ -1118,14 +1094,14 @@ public class MVStore { ...@@ -1118,14 +1094,14 @@ public class MVStore {
if (canOverwriteChunk(c, time)) { if (canOverwriteChunk(c, time)) {
removedChunks.add(c); removedChunks.add(c);
chunks.remove(c.id); chunks.remove(c.id);
meta.remove("chunk." + c.id); meta.remove(Chunk.getMetaKey(c.id));
} else { } else {
meta.put("chunk." + c.id, c.asString()); meta.put(Chunk.getMetaKey(c.id), c.asString());
// remove this chunk in the next save operation // remove this chunk in the next save operation
registerFreePage(storeVersion + 1, c.id, 0, 0); registerFreePage(storeVersion + 1, c.id, 0, 0);
} }
} else { } else {
meta.put("chunk." + c.id, c.asString()); meta.put(Chunk.getMetaKey(c.id), c.asString());
} }
} }
if (modified.size() == 0) { if (modified.size() == 0) {
...@@ -1165,9 +1141,10 @@ public class MVStore { ...@@ -1165,9 +1141,10 @@ public class MVStore {
private long getFileLengthInUse() { private long getFileLengthInUse() {
long size = 2 * BLOCK_SIZE; long size = 2 * BLOCK_SIZE;
for (Chunk c : chunks.values()) { for (Chunk c : chunks.values()) {
long x = c.start + c.length; if (c.blocks != Integer.MAX_VALUE) {
size = Math.max(size, MathUtils.roundUpLong(x, BLOCK_SIZE) long x = (c.block + c.blocks) * BLOCK_SIZE;
+ BLOCK_SIZE); size = Math.max(size, x);
}
} }
return size; return size;
} }
...@@ -1193,9 +1170,10 @@ public class MVStore { ...@@ -1193,9 +1170,10 @@ public class MVStore {
return false; return false;
} }
private Chunk readChunkHeader(long start) { private Chunk readChunkHeader(long block) {
ByteBuffer buff = fileStore.readFully(start, Chunk.MAX_HEADER_LENGTH); long p = block * BLOCK_SIZE;
return Chunk.fromHeader(buff, start); ByteBuffer buff = fileStore.readFully(p, Chunk.MAX_HEADER_LENGTH);
return Chunk.fromHeader(buff, p);
} }
/** /**
...@@ -1226,44 +1204,45 @@ public class MVStore { ...@@ -1226,44 +1204,45 @@ public class MVStore {
for (Chunk c : free) { for (Chunk c : free) {
chunks.remove(c.id); chunks.remove(c.id);
markMetaChanged(); markMetaChanged();
meta.remove("chunk." + c.id); meta.remove(Chunk.getMetaKey(c.id));
int length = MathUtils.roundUpInt(c.length, BLOCK_SIZE) long start = c.block * BLOCK_SIZE;
+ BLOCK_SIZE; int length = c.blocks * BLOCK_SIZE;
fileStore.free(c.start, length); fileStore.free(start, length);
} }
if (fileStore.getFillRate() == 100) { if (fileStore.getFillRate() == 100) {
return false; return false;
} }
long firstFree = fileStore.getFirstFree(); long firstFree = fileStore.getFirstFree() / BLOCK_SIZE;
ArrayList<Chunk> move = New.arrayList(); ArrayList<Chunk> move = New.arrayList();
for (Chunk c : chunks.values()) { for (Chunk c : chunks.values()) {
if (c.start > firstFree) { if (c.block > firstFree) {
move.add(c); move.add(c);
} }
} }
for (Chunk c : move) { for (Chunk c : move) {
WriteBuffer buff = getWriteBuffer(); WriteBuffer buff = getWriteBuffer();
int length = MathUtils.roundUpInt(c.length, BLOCK_SIZE) long start = c.block * BLOCK_SIZE;
+ BLOCK_SIZE; int length = c.blocks * BLOCK_SIZE;
buff.limit(length); buff.limit(length);
ByteBuffer buff2 = fileStore.readFully(c.start, length); ByteBuffer readBuff = fileStore.readFully(start, length);
buff.put(buff2); Chunk.fromHeader(readBuff, 0);
int chunkHeaderLen = readBuff.position();
buff.position(chunkHeaderLen);
buff.put(readBuff);
long end = getFileLengthInUse(); long end = getFileLengthInUse();
fileStore.markUsed(end, length); fileStore.markUsed(end, length);
fileStore.free(c.start, length); fileStore.free(start, length);
c.start = end; c.block = end / BLOCK_SIZE;
buff.position(0); buff.position(0);
c.writeHeader(buff); c.writeHeader(buff, chunkHeaderLen);
buff.position(buff.limit() - BLOCK_SIZE); buff.position(length - STORE_HEADER_LENGTH);
byte[] header = getStoreHeaderBytes(); byte[] header = getStoreHeaderBytes(STORE_HEADER_LENGTH);
buff.put(header); buff.put(header);
// fill the header with zeroes
buff.put(new byte[BLOCK_SIZE - header.length]);
buff.position(0); buff.position(0);
write(end, buff.getBuffer()); write(end, buff.getBuffer());
releaseWriteBuffer(buff); releaseWriteBuffer(buff);
markMetaChanged(); markMetaChanged();
meta.put("chunk." + c.id, c.asString()); meta.put(Chunk.getMetaKey(c.id), c.asString());
} }
boolean oldReuse = reuseSpace; boolean oldReuse = reuseSpace;
...@@ -1282,26 +1261,27 @@ public class MVStore { ...@@ -1282,26 +1261,27 @@ public class MVStore {
continue; continue;
} }
WriteBuffer buff = getWriteBuffer(); WriteBuffer buff = getWriteBuffer();
int length = MathUtils.roundUpInt(c.length, BLOCK_SIZE) long start = c.block * BLOCK_SIZE;
+ BLOCK_SIZE; int length = c.blocks * BLOCK_SIZE;
buff.limit(length); buff.limit(length);
ByteBuffer buff2 = fileStore.readFully(c.start, length); ByteBuffer readBuff = fileStore.readFully(start, length);
buff.put(buff2); Chunk.fromHeader(readBuff, 0);
int chunkHeaderLen = readBuff.position();
buff.position(chunkHeaderLen);
buff.put(readBuff);
long pos = fileStore.allocate(length); long pos = fileStore.allocate(length);
fileStore.free(c.start, length); fileStore.free(start, length);
buff.position(0); buff.position(0);
c.start = pos; c.block = pos / BLOCK_SIZE;
c.writeHeader(buff); c.writeHeader(buff, chunkHeaderLen);
buff.position(buff.limit() - BLOCK_SIZE); buff.position(length - STORE_HEADER_LENGTH);
byte[] header = getStoreHeaderBytes(); byte[] header = getStoreHeaderBytes(STORE_HEADER_LENGTH);
buff.put(header); buff.put(header);
// fill the header with zeroes
buff.put(new byte[BLOCK_SIZE - header.length]);
buff.position(0); buff.position(0);
write(pos, buff.getBuffer()); write(pos, buff.getBuffer());
releaseWriteBuffer(buff); releaseWriteBuffer(buff);
markMetaChanged(); markMetaChanged();
meta.put("chunk." + c.id, c.asString()); meta.put(Chunk.getMetaKey(c.id), c.asString());
} }
// update the metadata (within the file) // update the metadata (within the file)
...@@ -1417,13 +1397,19 @@ public class MVStore { ...@@ -1417,13 +1397,19 @@ public class MVStore {
} }
private void copyLive(Chunk chunk, ArrayList<Chunk> old) { private void copyLive(Chunk chunk, ArrayList<Chunk> old) {
ByteBuffer buff = fileStore.readFully(chunk.start, chunk.length); long start = chunk.block * BLOCK_SIZE;
Chunk.fromHeader(buff, chunk.start); int chunkLength = chunk.blocks * BLOCK_SIZE;
int chunkLength = chunk.length; ByteBuffer buff = fileStore.readFully(start, chunkLength);
Chunk.fromHeader(buff, start);
int pagesRemaining = chunk.pageCount;
markMetaChanged(); markMetaChanged();
while (buff.position() < chunkLength) { while (pagesRemaining-- > 0) {
int start = buff.position(); int offset = buff.position();
int pageLength = buff.getInt(); int pageLength = buff.getInt();
if (pageLength <= 0) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_CORRUPT, "Page length {0}", pageLength);
}
buff.getShort(); buff.getShort();
int mapId = DataUtils.readVarInt(buff); int mapId = DataUtils.readVarInt(buff);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
...@@ -1432,12 +1418,12 @@ public class MVStore { ...@@ -1432,12 +1418,12 @@ public class MVStore {
// pages of maps that are not open or that have been removed // pages of maps that are not open or that have been removed
// later on are not moved (for maps that are not open, the live // later on are not moved (for maps that are not open, the live
// counter is not decremented, so the chunk is not removed) // counter is not decremented, so the chunk is not removed)
buff.position(start + pageLength); buff.position(offset + pageLength);
continue; continue;
} }
buff.position(start); buff.position(offset);
Page page = new Page(map, 0); Page page = new Page(map, 0);
page.read(buff, chunk.id, buff.position(), chunk.length); page.read(buff, chunk.id, buff.position(), chunkLength);
for (int i = 0; i < page.getKeyCount(); i++) { for (int i = 0; i < page.getKeyCount(); i++) {
Object k = page.getKey(i); Object k = page.getKey(i);
Page p = map.getPage(k); Page p = map.getPage(k);
...@@ -1480,7 +1466,7 @@ public class MVStore { ...@@ -1480,7 +1466,7 @@ public class MVStore {
Page p = cache == null ? null : cache.get(pos); Page p = cache == null ? null : cache.get(pos);
if (p == null) { if (p == null) {
Chunk c = getChunk(pos); Chunk c = getChunk(pos);
long filePos = c.start; long filePos = c.block * BLOCK_SIZE;
filePos += DataUtils.getPageOffset(pos); filePos += DataUtils.getPageOffset(pos);
if (filePos < 0) { if (filePos < 0) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
...@@ -1719,7 +1705,7 @@ public class MVStore { ...@@ -1719,7 +1705,7 @@ public class MVStore {
public int getStoreVersion() { public int getStoreVersion() {
checkOpen(); checkOpen();
String x = meta.get("setting.storeVersion"); String x = meta.get("setting.storeVersion");
return x == null ? 0 : Integer.parseInt(x); return x == null ? 0 : Integer.parseInt(x, 16);
} }
/** /**
...@@ -1730,7 +1716,7 @@ public class MVStore { ...@@ -1730,7 +1716,7 @@ public class MVStore {
public synchronized void setStoreVersion(int version) { public synchronized void setStoreVersion(int version) {
checkOpen(); checkOpen();
markMetaChanged(); markMetaChanged();
meta.put("setting.storeVersion", Integer.toString(version)); meta.put("setting.storeVersion", Integer.toHexString(version));
} }
/** /**
...@@ -1806,19 +1792,18 @@ public class MVStore { ...@@ -1806,19 +1792,18 @@ public class MVStore {
break; break;
} }
chunks.remove(lastChunkId); chunks.remove(lastChunkId);
int len = MathUtils.roundUpInt(last.length, BLOCK_SIZE) + BLOCK_SIZE; long start = last.block * BLOCK_SIZE;
fileStore.free(last.start, len); int len = last.blocks * BLOCK_SIZE;
fileStore.free(start, len);
// need to overwrite the last page,
// so that old end headers is not used
long pos = start + len - STORE_HEADER_LENGTH;
ByteBuffer header = ByteBuffer.allocate(STORE_HEADER_LENGTH);
write(pos, header);
lastChunkId--; lastChunkId--;
} }
rootChunkStart = last.start; lastChunkBlock = last.block;
writeStoreHeader(); writeStoreHeader();
// need to write the header at the end of the file as well,
// so that the old end header is not used
byte[] bytes = getStoreHeaderBytes();
ByteBuffer header = ByteBuffer.allocate(BLOCK_SIZE);
header.put(bytes);
header.rewind();
write(fileStore.size(), header);
readStoreHeader(); readStoreHeader();
readMeta(); readMeta();
} }
...@@ -1829,8 +1814,8 @@ public class MVStore { ...@@ -1829,8 +1814,8 @@ public class MVStore {
maps.remove(id); maps.remove(id);
} else { } else {
if (loadFromFile) { if (loadFromFile) {
String r = meta.get("root." + id); String r = meta.get("root." + Integer.toHexString(id));
long root = r == null ? 0 : Long.parseLong(r); long root = DataUtils.parseHexLong(r, 0);
m.setRootPos(root, -1); m.setRootPos(root, -1);
} }
} }
...@@ -1839,7 +1824,7 @@ public class MVStore { ...@@ -1839,7 +1824,7 @@ public class MVStore {
// rollback might have rolled back the stored chunk metadata as well // rollback might have rolled back the stored chunk metadata as well
Chunk c = chunks.get(lastChunkId - 1); Chunk c = chunks.get(lastChunkId - 1);
if (c != null) { if (c != null) {
meta.put("chunk." + c.id, c.asString()); meta.put(Chunk.getMetaKey(c.id), c.asString());
} }
currentVersion = version; currentVersion = version;
setWriteVersion(version); setWriteVersion(version);
...@@ -1914,9 +1899,10 @@ public class MVStore { ...@@ -1914,9 +1899,10 @@ public class MVStore {
!meta.containsKey("name." + newName), !meta.containsKey("name." + newName),
"A map named {0} already exists", newName); "A map named {0} already exists", newName);
markMetaChanged(); markMetaChanged();
String x = Integer.toHexString(id);
meta.remove("name." + oldName); meta.remove("name." + oldName);
meta.put("map." + id, map.asString(newName)); meta.put("map." + x, map.asString(newName));
meta.put("name." + newName, Integer.toString(id)); meta.put("name." + newName, x);
} }
/** /**
...@@ -1932,9 +1918,10 @@ public class MVStore { ...@@ -1932,9 +1918,10 @@ public class MVStore {
int id = map.getId(); int id = map.getId();
String name = getMapName(id); String name = getMapName(id);
markMetaChanged(); markMetaChanged();
meta.remove("map." + id); String x = Integer.toHexString(id);
meta.remove("map." + x);
meta.remove("name." + name); meta.remove("name." + name);
meta.remove("root." + id); meta.remove("root." + x);
maps.remove(id); maps.remove(id);
} }
...@@ -1945,7 +1932,7 @@ public class MVStore { ...@@ -1945,7 +1932,7 @@ public class MVStore {
* @return the name, or null if not found * @return the name, or null if not found
*/ */
public synchronized String getMapName(int id) { public synchronized String getMapName(int id) {
String m = meta.get("map." + id); String m = meta.get("map." + Integer.toHexString(id));
return m == null ? null : DataUtils.parseMap(m).get("name"); return m == null ? null : DataUtils.parseMap(m).get("name");
} }
......
...@@ -67,18 +67,21 @@ public class MVStoreTool { ...@@ -67,18 +67,21 @@ public class MVStoreTool {
file = FilePath.get(fileName).open("r"); file = FilePath.get(fileName).open("r");
long fileLength = file.size(); long fileLength = file.size();
pw.println("file " + fileName); pw.println("file " + fileName);
pw.println(" length " + fileLength); pw.println(" length " + Long.toHexString(fileLength));
ByteBuffer block = ByteBuffer.allocate(4096); ByteBuffer block = ByteBuffer.allocate(4096);
for (long pos = 0; pos < fileLength;) { for (long pos = 0; pos < fileLength;) {
block.rewind(); block.rewind();
DataUtils.readFully(file, pos, block); DataUtils.readFully(file, pos, block);
block.rewind(); block.rewind();
if (block.get() != '{') { if (block.get() != '{') {
continue; block.position(MVStore.BLOCK_SIZE - MVStore.STORE_HEADER_LENGTH);
if (block.get() != '{') {
continue;
}
} }
byte headerType = block.get(); byte headerType = block.get();
if (headerType == 'H') { if (headerType == 'H') {
pw.println(" store header at " + pos); pw.println(" store header at " + Long.toHexString(pos));
pw.println(" " + new String(block.array(), "UTF-8").trim()); pw.println(" " + new String(block.array(), "UTF-8").trim());
pos += blockSize; pos += blockSize;
continue; continue;
...@@ -89,29 +92,32 @@ public class MVStoreTool { ...@@ -89,29 +92,32 @@ public class MVStoreTool {
} }
block.position(0); block.position(0);
Chunk c = Chunk.fromHeader(block, pos); Chunk c = Chunk.fromHeader(block, pos);
int chunkLength = c.length; int chunkLength = c.blocks * MVStore.BLOCK_SIZE;
pw.println(" " + c.toString()); pw.println(" " + c.toString());
ByteBuffer chunk = ByteBuffer.allocate(chunkLength); ByteBuffer chunk = ByteBuffer.allocate(chunkLength);
DataUtils.readFully(file, pos, chunk); DataUtils.readFully(file, pos, chunk);
int p = block.position(); int p = block.position();
pos = (pos + chunkLength + blockSize) / blockSize * blockSize; pos += chunkLength;
chunkLength -= p; int remaining = c.pageCount;
while (chunkLength > 0) { while (remaining > 0) {
chunk.position(p); chunk.position(p);
int pageLength = chunk.getInt(); int pageLength = chunk.getInt();
// check value (ignored) // check value (ignored)
chunk.getShort(); chunk.getShort();
long mapId = DataUtils.readVarInt(chunk); int mapId = DataUtils.readVarInt(chunk);
int len = DataUtils.readVarInt(chunk); int len = DataUtils.readVarInt(chunk);
int type = chunk.get(); int type = chunk.get();
boolean compressed = (type & 2) != 0; boolean compressed = (type & 2) != 0;
boolean node = (type & 1) != 0; boolean node = (type & 1) != 0;
pw.println(" map " + mapId + " at " + p + " " + pw.println(
(node ? "node" : "leaf") + " " + " map " + Integer.toHexString(mapId) +
(compressed ? "compressed " : "") + " at " + Long.toHexString(p) + " " +
"len: " + pageLength + " entries: " + len); (node ? " node" : " leaf") +
(compressed ? " compressed" : "") +
" len: " + Integer.toHexString(pageLength) +
" entries: " + Integer.toHexString(len));
p += pageLength; p += pageLength;
chunkLength -= pageLength; remaining--;
if (mapId == 0 && !compressed) { if (mapId == 0 && !compressed) {
String[] keys = new String[len]; String[] keys = new String[len];
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
...@@ -147,6 +153,12 @@ public class MVStoreTool { ...@@ -147,6 +153,12 @@ public class MVStoreTool {
} }
} }
} }
chunk.position(chunk.limit() - MVStore.STORE_HEADER_LENGTH);
if (chunk.get() == '{' && chunk.get() == 'H') {
pw.println(" store header");
pw.println(" " + new String(chunk.array(), chunk.position() - 2,
MVStore.STORE_HEADER_LENGTH, "UTF-8").trim());
}
} }
} catch (IOException e) { } catch (IOException e) {
pw.println("ERROR: " + e); pw.println("ERROR: " + e);
...@@ -165,8 +177,9 @@ public class MVStoreTool { ...@@ -165,8 +177,9 @@ public class MVStoreTool {
} }
private static String getPosString(long pos) { private static String getPosString(long pos) {
return "pos " + pos + ", chunk " + DataUtils.getPageChunkId(pos) + return "pos " + Long.toHexString(pos) +
", offset " + DataUtils.getPageOffset(pos); ", chunk " + Integer.toHexString(DataUtils.getPageChunkId(pos)) +
", offset " + Integer.toHexString(DataUtils.getPageOffset(pos));
} }
......
...@@ -260,13 +260,13 @@ public class Page { ...@@ -260,13 +260,13 @@ public class Page {
public String toString() { public String toString() {
StringBuilder buff = new StringBuilder(); StringBuilder buff = new StringBuilder();
buff.append("id: ").append(System.identityHashCode(this)).append('\n'); buff.append("id: ").append(System.identityHashCode(this)).append('\n');
buff.append("pos: ").append(pos).append("\n"); buff.append("pos: ").append(Long.toHexString(pos)).append("\n");
for (int i = 0; i <= keyCount; i++) { for (int i = 0; i <= keyCount; i++) {
if (i > 0) { if (i > 0) {
buff.append(" "); buff.append(" ");
} }
if (children != null) { if (children != null) {
buff.append("[" + children[i] + "] "); buff.append("[" + Long.toHexString(children[i]) + "] ");
} }
if (i < keyCount) { if (i < keyCount) {
buff.append(keys[i]); buff.append(keys[i]);
......
...@@ -121,7 +121,8 @@ public class TestConcurrent extends TestMVStore { ...@@ -121,7 +121,8 @@ public class TestConcurrent extends TestMVStore {
chunkCount++; chunkCount++;
} }
} }
assertEquals(1, chunkCount); // the chunk metadata is not yet written
assertEquals(0, chunkCount);
s.close(); s.close();
} }
FileUtils.deleteRecursive("memFS:", false); FileUtils.deleteRecursive("memFS:", false);
......
...@@ -292,12 +292,12 @@ public class TestMVStore extends TestBase { ...@@ -292,12 +292,12 @@ public class TestMVStore extends TestBase {
}). }).
open(); open();
s.setAutoCommitDelay(10); s.setAutoCommitDelay(50);
MVMap<Integer, String> m; MVMap<Integer, String> m;
m = s.openMap("data"); m = s.openMap("data");
s.getFileStore().getFile().close(); s.getFileStore().getFile().close();
m.put(1, "Hello"); m.put(1, "Hello");
for (int i = 0; i < 100; i++) { for (int i = 0; i < 200; i++) {
if (exRef.get() != null) { if (exRef.get() != null) {
break; break;
} }
...@@ -597,7 +597,7 @@ public class TestMVStore extends TestBase { ...@@ -597,7 +597,7 @@ public class TestMVStore extends TestBase {
} }
s.close(); s.close();
int[] expectedReadsForCacheSize = { int[] expectedReadsForCacheSize = {
3407, 2590, 1924, 1440, 1096, 956, 918 3407, 2590, 1924, 1440, 1111, 956, 918
}; };
for (int cacheSize = 0; cacheSize <= 6; cacheSize += 4) { for (int cacheSize = 0; cacheSize <= 6; cacheSize += 4) {
int cacheMB = 1 + 3 * cacheSize; int cacheMB = 1 + 3 * cacheSize;
...@@ -612,8 +612,10 @@ public class TestMVStore extends TestBase { ...@@ -612,8 +612,10 @@ public class TestMVStore extends TestBase {
assertEquals(10240, x.length()); assertEquals(10240, x.length());
} }
} }
assertEquals(expectedReadsForCacheSize[cacheSize], long readCount = s.getFileStore().getReadCount();
s.getFileStore().getReadCount()); int expected = expectedReadsForCacheSize[cacheSize];
assertTrue("reads: " + readCount + " expected: " + expected,
Math.abs(100 - (100 * expected / readCount)) < 5);
s.close(); s.close();
} }
...@@ -648,8 +650,8 @@ public class TestMVStore extends TestBase { ...@@ -648,8 +650,8 @@ public class TestMVStore extends TestBase {
MVStore s = openStore(fileName); MVStore s = openStore(fileName);
long time = System.currentTimeMillis(); long time = System.currentTimeMillis();
assertEquals("1", s.getStoreHeader().get("format")); assertEquals("1", s.getStoreHeader().get("format"));
long creationTime = Long.parseLong(s.getStoreHeader() long creationTime = Long.parseLong(
.get("creationTime")); s.getStoreHeader().get("created"), 16);
assertTrue(Math.abs(time - creationTime) < 100); assertTrue(Math.abs(time - creationTime) < 100);
s.getStoreHeader().put("test", "123"); s.getStoreHeader().put("test", "123");
MVMap<Integer, Integer> map = s.openMap("test"); MVMap<Integer, Integer> map = s.openMap("test");
...@@ -684,7 +686,6 @@ public class TestMVStore extends TestBase { ...@@ -684,7 +686,6 @@ public class TestMVStore extends TestBase {
} }
} }
s.close(); s.close();
FilePath f = FilePath.get(fileName); FilePath f = FilePath.get(fileName);
int blockSize = 4 * 1024; int blockSize = 4 * 1024;
// test corrupt file headers // test corrupt file headers
...@@ -692,7 +693,7 @@ public class TestMVStore extends TestBase { ...@@ -692,7 +693,7 @@ public class TestMVStore extends TestBase {
FileChannel fc = f.open("rw"); FileChannel fc = f.open("rw");
if (i == 0) { if (i == 0) {
// corrupt the last block (the end header) // corrupt the last block (the end header)
fc.truncate(fc.size() - 4096); fc.write(ByteBuffer.allocate(256), fc.size() - 256);
} }
ByteBuffer buff = ByteBuffer.allocate(4 * 1024); ByteBuffer buff = ByteBuffer.allocate(4 * 1024);
fc.read(buff, i); fc.read(buff, i);
...@@ -1137,7 +1138,7 @@ public class TestMVStore extends TestBase { ...@@ -1137,7 +1138,7 @@ public class TestMVStore extends TestBase {
assertEquals(0, m.size()); assertEquals(0, m.size());
s.commit(); s.commit();
// ensure only nodes are read, but not leaves // ensure only nodes are read, but not leaves
assertEquals(42, s.getFileStore().getReadCount()); assertEquals(40, s.getFileStore().getReadCount());
assertEquals(1, s.getFileStore().getWriteCount()); assertEquals(1, s.getFileStore().getWriteCount());
s.close(); s.close();
} }
...@@ -1295,7 +1296,6 @@ public class TestMVStore extends TestBase { ...@@ -1295,7 +1296,6 @@ public class TestMVStore extends TestBase {
data.put("2", "World"); data.put("2", "World");
s.commit(); s.commit();
assertEquals(1, s.getCurrentVersion()); assertEquals(1, s.getCurrentVersion());
assertTrue(m.containsKey("chunk.1"));
assertFalse(m.containsKey("chunk.2")); assertFalse(m.containsKey("chunk.2"));
assertEquals("[data]", s.getMapNames().toString()); assertEquals("[data]", s.getMapNames().toString());
...@@ -1305,20 +1305,18 @@ public class TestMVStore extends TestBase { ...@@ -1305,20 +1305,18 @@ public class TestMVStore extends TestBase {
String id = s.getMetaMap().get("name.data"); String id = s.getMetaMap().get("name.data");
assertEquals("name:data", m.get("map." + id)); assertEquals("name:data", m.get("map." + id));
assertTrue(m.containsKey("chunk.1"));
assertEquals("Hello", data.put("1", "Hallo")); assertEquals("Hello", data.put("1", "Hallo"));
s.commit(); s.commit();
assertEquals("name:data", m.get("map." + id)); assertEquals("name:data", m.get("map." + id));
assertTrue(m.get("root.1").length() > 0); assertTrue(m.get("root.1").length() > 0);
assertTrue(m.containsKey("chunk.1")); assertTrue(m.containsKey("chunk.1"));
assertTrue(m.containsKey("chunk.2"));
assertEquals(2, s.getCurrentVersion()); assertEquals(2, s.getCurrentVersion());
s.rollbackTo(1); s.rollbackTo(1);
assertEquals("Hello", data.get("1")); assertEquals("Hello", data.get("1"));
assertEquals("World", data.get("2")); assertEquals("World", data.get("2"));
assertTrue(m.containsKey("chunk.1")); assertFalse(m.containsKey("chunk.1"));
assertFalse(m.containsKey("chunk.2")); assertFalse(m.containsKey("chunk.2"));
s.close(); s.close();
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论