提交 619817be authored 作者: Thomas Mueller's avatar Thomas Mueller

MVStore: the file format was changed slightly.

上级 94d4218f
...@@ -20,6 +20,7 @@ Change Log ...@@ -20,6 +20,7 @@ Change Log
<h2>Next Version (unreleased)</h2> <h2>Next Version (unreleased)</h2>
<ul><li>Issue 545: Unnecessary duplicate code was removed. <ul><li>Issue 545: Unnecessary duplicate code was removed.
</li><li>The profiler tool can now process files with full thread dumps. </li><li>The profiler tool can now process files with full thread dumps.
</li><li>MVStore: the file format was changed slightly.
</li><li>MVStore mode: the CLOB and BLOB storage was re-implemented and is </li><li>MVStore mode: the CLOB and BLOB storage was re-implemented and is
now much faster than with the PageStore (which is still the default storage). now much faster than with the PageStore (which is still the default storage).
</li><li>MVStore mode: creating indexes is now much faster </li><li>MVStore mode: creating indexes is now much faster
......
...@@ -15,7 +15,7 @@ import java.util.HashMap; ...@@ -15,7 +15,7 @@ import java.util.HashMap;
* Chunks are page aligned (each page is usually 4096 bytes). * Chunks are page aligned (each page is usually 4096 bytes).
* There are at most 67 million (2^26) chunks, * There are at most 67 million (2^26) chunks,
* each chunk is at most 2 GB large. * each chunk is at most 2 GB large.
* File format: * Chunk format:
* 1 byte: 'c' * 1 byte: 'c'
* 4 bytes: length * 4 bytes: length
* 4 bytes: chunk id (an incrementing number) * 4 bytes: chunk id (an incrementing number)
...@@ -26,6 +26,11 @@ import java.util.HashMap; ...@@ -26,6 +26,11 @@ import java.util.HashMap;
*/ */
public class Chunk { public class Chunk {
/**
* The maximum length of a chunk header, in bytes.
*/
static final int MAX_HEADER_LENGTH = 1024;
/** /**
* The chunk id. * The chunk id.
*/ */
...@@ -93,26 +98,23 @@ public class Chunk { ...@@ -93,26 +98,23 @@ public class Chunk {
* @return the chunk * @return the chunk
*/ */
static Chunk fromHeader(ByteBuffer buff, long start) { static Chunk fromHeader(ByteBuffer buff, long start) {
if (buff.get() != 'c') { int pos = buff.position();
if (buff.get() != '{') {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_CORRUPT, DataUtils.ERROR_FILE_CORRUPT,
"File corrupt reading chunk at position {0}", start); "File corrupt reading chunk at position {0}", start);
} }
int length = buff.getInt(); byte[] data = new byte[Math.min(buff.remaining(), MAX_HEADER_LENGTH)];
int chunkId = buff.getInt(); // set the position to the start of the first page
int pageCount = buff.getInt(); buff.get(data);
long metaRootPos = buff.getLong(); for (int i = 0; i < data.length; i++) {
long maxLength = buff.getLong(); if (data[i] == '\n') {
long maxLengthLive = buff.getLong(); buff.position(pos + i + 2);
Chunk c = new Chunk(chunkId); break;
c.length = length; }
c.pageCount = pageCount; }
c.pageCountLive = pageCount; String s = new String(data, 0, data.length, DataUtils.UTF8);
c.start = start; return fromString(s);
c.metaRootPos = metaRootPos;
c.maxLength = maxLength;
c.maxLengthLive = maxLengthLive;
return c;
} }
/** /**
...@@ -121,13 +123,10 @@ public class Chunk { ...@@ -121,13 +123,10 @@ public class Chunk {
* @param buff the target buffer * @param buff the target buffer
*/ */
void writeHeader(WriteBuffer buff) { void writeHeader(WriteBuffer buff) {
buff.put((byte) 'c'). buff.put((byte) '{');
putInt(length). buff.put(asString().getBytes(DataUtils.UTF8));
putInt(id). buff.put((byte) '}');
putInt(pageCount). buff.put((byte) ' ');
putLong(metaRootPos).
putLong(maxLength).
putLong(maxLengthLive);
} }
/** /**
...@@ -138,7 +137,7 @@ public class Chunk { ...@@ -138,7 +137,7 @@ public class Chunk {
*/ */
public static Chunk fromString(String s) { public static Chunk fromString(String s) {
HashMap<String, String> map = DataUtils.parseMap(s); HashMap<String, String> map = DataUtils.parseMap(s);
int id = Integer.parseInt(map.get("id")); int id = Integer.parseInt(map.get("chunk"));
Chunk c = new Chunk(id); Chunk c = new Chunk(id);
c.start = Long.parseLong(map.get("start")); c.start = Long.parseLong(map.get("start"));
c.length = Integer.parseInt(map.get("length")); c.length = Integer.parseInt(map.get("length"));
...@@ -173,7 +172,7 @@ public class Chunk { ...@@ -173,7 +172,7 @@ public class Chunk {
*/ */
public String asString() { public String asString() {
return return
"id:" + id + "," + "chunk:" + id + "," +
"length:" + length + "," + "length:" + length + "," +
"maxLength:" + maxLength + "," + "maxLength:" + maxLength + "," +
"maxLengthLive:" + maxLengthLive + "," + "maxLengthLive:" + maxLengthLive + "," +
......
...@@ -570,7 +570,7 @@ public class DataUtils { ...@@ -570,7 +570,7 @@ public class DataUtils {
} }
buff.append(key).append(':'); buff.append(key).append(':');
String v = value.toString(); String v = value.toString();
if (v.indexOf(',') < 0 && v.indexOf('\"') < 0) { if (v.indexOf(',') < 0 && v.indexOf('\"') < 0 && v.indexOf('}') < 0) {
buff.append(value); buff.append(value);
} else { } else {
buff.append('\"'); buff.append('\"');
...@@ -595,6 +595,9 @@ public class DataUtils { ...@@ -595,6 +595,9 @@ public class DataUtils {
public static HashMap<String, String> parseMap(String s) { public static HashMap<String, String> parseMap(String s) {
HashMap<String, String> map = New.hashMap(); HashMap<String, String> map = New.hashMap();
for (int i = 0, size = s.length(); i < size;) { for (int i = 0, size = s.length(); i < size;) {
if (s.charAt(i) == '}') {
break;
}
int startKey = i; int startKey = i;
i = s.indexOf(':', i); i = s.indexOf(':', i);
if (i < 0) { if (i < 0) {
...@@ -607,6 +610,9 @@ public class DataUtils { ...@@ -607,6 +610,9 @@ public class DataUtils {
char c = s.charAt(i++); char c = s.charAt(i++);
if (c == ',') { if (c == ',') {
break; break;
} else if (c == '}') {
i--;
break;
} else if (c == '\"') { } else if (c == '\"') {
while (i < size) { while (i < size) {
c = s.charAt(i++); c = s.charAt(i++);
...@@ -641,7 +647,8 @@ public class DataUtils { ...@@ -641,7 +647,8 @@ public class DataUtils {
public static int getFletcher32(byte[] bytes, int length) { public static int getFletcher32(byte[] bytes, int length) {
int s1 = 0xffff, s2 = 0xffff; int s1 = 0xffff, s2 = 0xffff;
for (int i = 0; i < length;) { for (int i = 0; i < length;) {
for (int end = Math.min(i + 718, length); i < end;) { // reduce after 360 words (each word is two bytes)
for (int end = Math.min(i + 720, length); i < end;) {
int x = ((bytes[i++] & 0xff) << 8) | (bytes[i++] & 0xff); int x = ((bytes[i++] & 0xff) << 8) | (bytes[i++] & 0xff);
s2 += s1 += x; s2 += s1 += x;
} }
...@@ -735,8 +742,8 @@ public class DataUtils { ...@@ -735,8 +742,8 @@ public class DataUtils {
Object a = arguments[i]; Object a = arguments[i];
if (!(a instanceof Exception)) { if (!(a instanceof Exception)) {
String s = a == null ? "null" : a.toString(); String s = a == null ? "null" : a.toString();
if (s.length() > 100) { if (s.length() > 1000) {
s = s.substring(0, 100); s = s.substring(0, 1000) + "...";
} }
arguments[i] = s; arguments[i] = s;
} }
......
...@@ -30,13 +30,15 @@ import org.h2.util.New; ...@@ -30,13 +30,15 @@ import org.h2.util.New;
/* /*
File format: File format:
header: (blockSize) bytes
header: (blockSize) bytes store header: (blockSize) bytes
store header: (blockSize) bytes
[ chunk ] * [ chunk ] *
(there are two headers for security at the beginning of the file, (there are two headers for security at the beginning of the file,
and there is a header after each chunk) and there is a store header at the end of each chunk)
header:
H:3,... store header:
{H:2,...
Format: Format:
Current store header: Current store header:
...@@ -63,8 +65,6 @@ pageCountLive -> livePages, maxLength -> max, maxLengthLive -> liveMax, ...@@ -63,8 +65,6 @@ pageCountLive -> livePages, maxLength -> max, maxLengthLive -> liveMax,
metaRootPos -> root (offset)) metaRootPos -> root (offset))
+, if different: maxLive:1030,pagesLive:30 +, if different: maxLive:1030,pagesLive:30
compression: support multiple algorithms
TODO: TODO:
Documentation Documentation
...@@ -148,8 +148,7 @@ MVStore: ...@@ -148,8 +148,7 @@ MVStore:
specially for large pages (when using the StreamStore) specially for large pages (when using the StreamStore)
- StreamStore: split blocks similar to rsync crypto, where the split is made - StreamStore: split blocks similar to rsync crypto, where the split is made
"if the sum of the past 8196 bytes divides by 4096 with zero remainder" "if the sum of the past 8196 bytes divides by 4096 with zero remainder"
- Compression: try using a bloom filter before trying to match - Compression: try using a bloom filter (64 bit) before trying to match
- DataType: change to reading and writing arrays, not individual entries
*/ */
...@@ -336,7 +335,6 @@ public class MVStore { ...@@ -336,7 +335,6 @@ public class MVStore {
creationTime = 0; creationTime = 0;
creationTime = getTime(); creationTime = getTime();
lastCommitTime = creationTime; lastCommitTime = creationTime;
storeHeader.put("H", "3");
storeHeader.put("blockSize", "" + BLOCK_SIZE); storeHeader.put("blockSize", "" + BLOCK_SIZE);
storeHeader.put("format", "" + FORMAT_WRITE); storeHeader.put("format", "" + FORMAT_WRITE);
storeHeader.put("creationTime", "" + creationTime); storeHeader.put("creationTime", "" + creationTime);
...@@ -625,7 +623,7 @@ public class MVStore { ...@@ -625,7 +623,7 @@ public class MVStore {
HashMap<String, String> m; HashMap<String, String> m;
try { try {
m = DataUtils.parseMap(s); m = DataUtils.parseMap(s);
} catch (IllegalArgumentException e) { } catch (IllegalStateException e) {
continue; continue;
} }
String f = m.remove("fletcher"); String f = m.remove("fletcher");
...@@ -664,7 +662,7 @@ public class MVStore { ...@@ -664,7 +662,7 @@ public class MVStore {
} }
private byte[] getStoreHeaderBytes() { private byte[] getStoreHeaderBytes() {
StringBuilder buff = new StringBuilder(); StringBuilder buff = new StringBuilder("{H:2");
storeHeader.put("lastMapId", "" + lastMapId); storeHeader.put("lastMapId", "" + lastMapId);
storeHeader.put("chunk", "" + lastChunkId); storeHeader.put("chunk", "" + lastChunkId);
storeHeader.put("rootChunk", "" + rootChunkStart); storeHeader.put("rootChunk", "" + rootChunkStart);
...@@ -673,6 +671,7 @@ public class MVStore { ...@@ -673,6 +671,7 @@ public class MVStore {
byte[] bytes = buff.toString().getBytes(DataUtils.UTF8); byte[] bytes = buff.toString().getBytes(DataUtils.UTF8);
int checksum = DataUtils.getFletcher32(bytes, bytes.length / 2 * 2); int checksum = DataUtils.getFletcher32(bytes, bytes.length / 2 * 2);
DataUtils.appendMap(buff, "fletcher", Integer.toHexString(checksum)); DataUtils.appendMap(buff, "fletcher", Integer.toHexString(checksum));
buff.append("}\n");
bytes = buff.toString().getBytes(DataUtils.UTF8); bytes = buff.toString().getBytes(DataUtils.UTF8);
if (bytes.length > BLOCK_SIZE) { if (bytes.length > BLOCK_SIZE) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
...@@ -887,8 +886,11 @@ public class MVStore { ...@@ -887,8 +886,11 @@ public class MVStore {
} }
Chunk c; Chunk c;
c = new Chunk(++lastChunkId); c = new Chunk(++lastChunkId);
c.pageCount = Integer.MAX_VALUE;
c.pageCountLive = Integer.MAX_VALUE;
c.maxLength = Long.MAX_VALUE; c.maxLength = Long.MAX_VALUE;
c.maxLengthLive = Long.MAX_VALUE; c.maxLengthLive = Long.MAX_VALUE;
c.metaRootPos = Long.MAX_VALUE;
c.start = Long.MAX_VALUE; c.start = Long.MAX_VALUE;
c.length = Integer.MAX_VALUE; c.length = Integer.MAX_VALUE;
c.time = time; c.time = time;
...@@ -924,6 +926,9 @@ public class MVStore { ...@@ -924,6 +926,9 @@ public class MVStore {
WriteBuffer buff = getWriteBuffer(); WriteBuffer buff = getWriteBuffer();
// need to patch the header later // need to patch the header later
c.writeHeader(buff); c.writeHeader(buff);
long endHeader = buff.position();
c.pageCount = 0;
c.pageCountLive = 0;
c.maxLength = 0; c.maxLength = 0;
c.maxLengthLive = 0; c.maxLengthLive = 0;
for (MVMap<?, ?> m : changed) { for (MVMap<?, ?> m : changed) {
...@@ -973,6 +978,10 @@ public class MVStore { ...@@ -973,6 +978,10 @@ public class MVStore {
c.metaRootPos = metaRoot.getPos(); c.metaRootPos = metaRoot.getPos();
buff.position(0); buff.position(0);
c.writeHeader(buff); c.writeHeader(buff);
while (buff.position() < endHeader - 1) {
buff.put((byte) ' ');
}
buff.put((byte) '\n');
rootChunkStart = filePos; rootChunkStart = filePos;
revertTemp(storeVersion); revertTemp(storeVersion);
...@@ -1185,7 +1194,7 @@ public class MVStore { ...@@ -1185,7 +1194,7 @@ public class MVStore {
} }
private Chunk readChunkHeader(long start) { private Chunk readChunkHeader(long start) {
ByteBuffer buff = fileStore.readFully(start, 40); ByteBuffer buff = fileStore.readFully(start, Chunk.MAX_HEADER_LENGTH);
return Chunk.fromHeader(buff, start); return Chunk.fromHeader(buff, start);
} }
......
...@@ -73,30 +73,24 @@ public class MVStoreTool { ...@@ -73,30 +73,24 @@ public class MVStoreTool {
block.rewind(); block.rewind();
DataUtils.readFully(file, pos, block); DataUtils.readFully(file, pos, block);
block.rewind(); block.rewind();
int tag = block.get(); if (block.get() != '{') {
if (tag == 'H') { continue;
pw.println(" header at " + pos); }
byte headerType = block.get();
if (headerType == 'H') {
pw.println(" store header at " + pos);
pw.println(" " + new String(block.array(), "UTF-8").trim()); pw.println(" " + new String(block.array(), "UTF-8").trim());
pos += blockSize; pos += blockSize;
continue; continue;
} }
if (tag != 'c') { if (headerType != 'c') {
pos += blockSize; pos += blockSize;
continue; continue;
} }
int chunkLength = block.getInt(); block.position(0);
int chunkId = block.getInt(); Chunk c = Chunk.fromHeader(block, pos);
int pageCount = block.getInt(); int chunkLength = c.length;
long metaRootPos = block.getLong(); pw.println(" " + c.toString());
long maxLength = block.getLong();
long maxLengthLive = block.getLong();
pw.println(" chunk " + chunkId +
" at " + pos +
" length " + chunkLength +
" pageCount " + pageCount +
" root " + getPosString(metaRootPos) +
" maxLength " + maxLength +
" maxLengthLive " + maxLengthLive);
ByteBuffer chunk = ByteBuffer.allocate(chunkLength); ByteBuffer chunk = ByteBuffer.allocate(chunkLength);
DataUtils.readFully(file, pos, chunk); DataUtils.readFully(file, pos, chunk);
int p = block.position(); int p = block.position();
......
...@@ -758,11 +758,7 @@ public class Page { ...@@ -758,11 +758,7 @@ public class Page {
buff = ByteBuffer.allocate(l); buff = ByteBuffer.allocate(l);
compressor.expand(comp, 0, compLen, buff.array(), buff.arrayOffset(), l); compressor.expand(comp, 0, compLen, buff.array(), buff.arrayOffset(), l);
} }
DataType keyType = map.getKeyType(); map.getKeyType().read(buff, keys, len, true);
for (int i = 0; i < len; i++) {
Object k = keyType.read(buff);
keys[i] = k;
}
if (node) { if (node) {
childCount = len + 1; childCount = len + 1;
children = new long[len + 1]; children = new long[len + 1];
...@@ -780,11 +776,7 @@ public class Page { ...@@ -780,11 +776,7 @@ public class Page {
totalCount = total; totalCount = total;
} else { } else {
values = new Object[len]; values = new Object[len];
DataType valueType = map.getValueType(); map.getValueType().read(buff, values, len, false);
for (int i = 0; i < len; i++) {
Object v = valueType.read(buff);
values[i] = v;
}
totalCount = len; totalCount = len;
} }
recalculateMemory(); recalculateMemory();
...@@ -807,10 +799,7 @@ public class Page { ...@@ -807,10 +799,7 @@ public class Page {
putVarInt(len). putVarInt(len).
put((byte) type); put((byte) type);
int compressStart = buff.position(); int compressStart = buff.position();
DataType keyType = map.getKeyType(); map.getKeyType().write(buff, keys, len, true);
for (int i = 0; i < len; i++) {
keyType.write(buff, keys[i]);
}
if (type == DataUtils.PAGE_TYPE_NODE) { if (type == DataUtils.PAGE_TYPE_NODE) {
for (int i = 0; i <= len; i++) { for (int i = 0; i <= len; i++) {
buff.putLong(children[i]); buff.putLong(children[i]);
...@@ -819,10 +808,7 @@ public class Page { ...@@ -819,10 +808,7 @@ public class Page {
buff.putVarLong(counts[i]); buff.putVarLong(counts[i]);
} }
} else { } else {
DataType valueType = map.getValueType(); map.getValueType().write(buff, values, len, false);
for (int i = 0; i < len; i++) {
valueType.write(buff, values[i]);
}
} }
MVStore store = map.getStore(); MVStore store = map.getStore();
if (store.getCompress()) { if (store.getCompress()) {
......
...@@ -1509,6 +1509,20 @@ public class TransactionStore { ...@@ -1509,6 +1509,20 @@ public class TransactionStore {
return Long.signum(comp); return Long.signum(comp);
} }
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
write(buff, obj[i]);
}
}
@Override @Override
public void write(WriteBuffer buff, Object obj) { public void write(WriteBuffer buff, Object obj) {
VersionedValue v = (VersionedValue) obj; VersionedValue v = (VersionedValue) obj;
...@@ -1578,6 +1592,20 @@ public class TransactionStore { ...@@ -1578,6 +1592,20 @@ public class TransactionStore {
return 0; return 0;
} }
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
write(buff, obj[i]);
}
}
@Override @Override
public void write(WriteBuffer buff, Object obj) { public void write(WriteBuffer buff, Object obj) {
Object[] array = (Object[]) obj; Object[] array = (Object[]) obj;
......
...@@ -144,6 +144,20 @@ public class ValueDataType implements DataType { ...@@ -144,6 +144,20 @@ public class ValueDataType implements DataType {
return v == null ? 0 : v.getMemory(); return v == null ? 0 : v.getMemory();
} }
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
write(buff, obj[i]);
}
}
@Override @Override
public Object read(ByteBuffer buff) { public Object read(ByteBuffer buff) {
return readValue(buff); return readValue(buff);
......
...@@ -53,6 +53,20 @@ public class SpatialDataType implements DataType { ...@@ -53,6 +53,20 @@ public class SpatialDataType implements DataType {
return 40 + dimensions * 4; return 40 + dimensions * 4;
} }
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
write(buff, obj[i]);
}
}
@Override @Override
public void write(WriteBuffer buff, Object obj) { public void write(WriteBuffer buff, Object obj) {
SpatialKey k = (SpatialKey) obj; SpatialKey k = (SpatialKey) obj;
......
...@@ -34,13 +34,23 @@ public interface DataType { ...@@ -34,13 +34,23 @@ public interface DataType {
int getMemory(Object obj); int getMemory(Object obj);
/** /**
* Write the object. * Write an object.
* *
* @param buff the target buffer * @param buff the target buffer
* @param obj the value * @param obj the value
*/ */
void write(WriteBuffer buff, Object obj); void write(WriteBuffer buff, Object obj);
/**
* Write a list of objects.
*
* @param buff the target buffer
* @param obj the objects
* @param len the number of objects to write
* @param key whether the objects are keys
*/
void write(WriteBuffer buff, Object[] obj, int len, boolean key);
/** /**
* Read an object. * Read an object.
* *
...@@ -49,5 +59,15 @@ public interface DataType { ...@@ -49,5 +59,15 @@ public interface DataType {
*/ */
Object read(ByteBuffer buff); Object read(ByteBuffer buff);
/**
* Read a list of objects.
*
* @param buff the target buffer
* @param obj the objects
* @param len the number of objects to read
* @param key whether the objects are keys
*/
void read(ByteBuffer buff, Object[] obj, int len, boolean key);
} }
...@@ -27,6 +27,20 @@ public class StringDataType implements DataType { ...@@ -27,6 +27,20 @@ public class StringDataType implements DataType {
return 24 + 2 * obj.toString().length(); return 24 + 2 * obj.toString().length();
} }
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
write(buff, obj[i]);
}
}
@Override @Override
public String read(ByteBuffer buff) { public String read(ByteBuffer buff) {
int len = DataUtils.readVarInt(buff); int len = DataUtils.readVarInt(buff);
......
...@@ -59,6 +59,20 @@ public class RowDataType implements DataType { ...@@ -59,6 +59,20 @@ public class RowDataType implements DataType {
return memory; return memory;
} }
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
write(buff, obj[i]);
}
}
@Override @Override
public Object[] read(ByteBuffer buff) { public Object[] read(ByteBuffer buff) {
int len = DataUtils.readVarInt(buff); int len = DataUtils.readVarInt(buff);
......
...@@ -79,15 +79,17 @@ public class TestDataUtils extends TestBase { ...@@ -79,15 +79,17 @@ public class TestDataUtils extends TestBase {
DataUtils.appendMap(buff, "b", ","); DataUtils.appendMap(buff, "b", ",");
DataUtils.appendMap(buff, "c", "1,2"); DataUtils.appendMap(buff, "c", "1,2");
DataUtils.appendMap(buff, "d", "\"test\""); DataUtils.appendMap(buff, "d", "\"test\"");
assertEquals(":,a:1,b:\",\",c:\"1,2\",d:\"\\\"test\\\"\"", buff.toString()); DataUtils.appendMap(buff, "e", "}");
assertEquals(":,a:1,b:\",\",c:\"1,2\",d:\"\\\"test\\\"\",e:\"}\"", buff.toString());
HashMap<String, String> m = DataUtils.parseMap(buff.toString()); HashMap<String, String> m = DataUtils.parseMap(buff.toString());
assertEquals(5, m.size()); assertEquals(6, m.size());
assertEquals("", m.get("")); assertEquals("", m.get(""));
assertEquals("1", m.get("a")); assertEquals("1", m.get("a"));
assertEquals(",", m.get("b")); assertEquals(",", m.get("b"));
assertEquals("1,2", m.get("c")); assertEquals("1,2", m.get("c"));
assertEquals("\"test\"", m.get("d")); assertEquals("\"test\"", m.get("d"));
assertEquals("}", m.get("e"));
} }
private void testMapRandomized() { private void testMapRandomized() {
......
...@@ -597,7 +597,7 @@ public class TestMVStore extends TestBase { ...@@ -597,7 +597,7 @@ public class TestMVStore extends TestBase {
} }
s.close(); s.close();
int[] expectedReadsForCacheSize = { int[] expectedReadsForCacheSize = {
3406, 2590, 1924, 1440, 1102, 956, 918 3407, 2590, 1924, 1440, 1096, 956, 918
}; };
for (int cacheSize = 0; cacheSize <= 6; cacheSize += 4) { for (int cacheSize = 0; cacheSize <= 6; cacheSize += 4) {
int cacheMB = 1 + 3 * cacheSize; int cacheMB = 1 + 3 * cacheSize;
...@@ -647,7 +647,7 @@ public class TestMVStore extends TestBase { ...@@ -647,7 +647,7 @@ public class TestMVStore extends TestBase {
String fileName = getBaseDir() + "/testFileHeader.h3"; String fileName = getBaseDir() + "/testFileHeader.h3";
MVStore s = openStore(fileName); MVStore s = openStore(fileName);
long time = System.currentTimeMillis(); long time = System.currentTimeMillis();
assertEquals("3", s.getStoreHeader().get("H")); assertEquals("1", s.getStoreHeader().get("format"));
long creationTime = Long.parseLong(s.getStoreHeader() long creationTime = Long.parseLong(s.getStoreHeader()
.get("creationTime")); .get("creationTime"));
assertTrue(Math.abs(time - creationTime) < 100); assertTrue(Math.abs(time - creationTime) < 100);
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论