提交 831e6937 authored 作者: Thomas Mueller's avatar Thomas Mueller

MVStore: the file format was changed slightly.

上级 b9726376
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -262,7 +262,8 @@ public class Comparison extends Condition {
* @param l the first value
* @param r the second value
* @param compareType the compare type
* @return true if the comparison indicated by the comparison type evaluates to true
* @return true if the comparison indicated by the comparison type evaluates
* to true
*/
static boolean compareNotNull(Database database, Value l, Value r, int compareType) {
boolean result;
......
......@@ -16,6 +16,7 @@ import java.util.Map.Entry;
import java.util.Properties;
import org.h2.constant.ErrorCode;
import org.h2.engine.Constants;
import org.h2.jdbc.JdbcSQLException;
import org.h2.util.SortedProperties;
import org.h2.util.StringUtils;
......@@ -46,7 +47,7 @@ public class DbException extends RuntimeException {
// message: translated message + english
// (otherwise certain applications don't work)
if (translations != null) {
Properties p = SortedProperties.fromLines(new String(translations, "UTF-8"));
Properties p = SortedProperties.fromLines(new String(translations, Constants.UTF8));
for (Entry<Object, Object> e : p.entrySet()) {
String key = (String) e.getKey();
String translation = (String) e.getValue();
......
......@@ -30,7 +30,8 @@ public class Chunk {
/**
* The length of the chunk footer. The longest footer is:
* chunk:ffffffff,block:ffffffffffffffff,version:ffffffffffffffff,fletcher:ffffffff
* chunk:ffffffff,block:ffffffffffffffff,
* version:ffffffffffffffff,fletcher:ffffffff
*/
static final int FOOTER_LENGTH = 128;
......@@ -62,7 +63,7 @@ public class Chunk {
/**
* The sum of the max length of all pages.
*/
public long maxLength;
public long maxLen;
/**
* The sum of the max length of all pages that are in use.
......@@ -98,7 +99,6 @@ public class Chunk {
* The predicted position of the next chunk.
*/
public long next;
public long nextSize;
Chunk(int id) {
this.id = id;
......@@ -136,6 +136,7 @@ public class Chunk {
* Write the chunk header.
*
* @param buff the target buffer
* @param minLength the minimum length
*/
void writeChunkHeader(WriteBuffer buff, int minLength) {
long pos = buff.position();
......@@ -146,6 +147,12 @@ public class Chunk {
buff.put((byte) '\n');
}
/**
* Get the metadata key for the given chunk id.
*
* @param chunkId the chunk id
* @return the metadata key
*/
static String getMetaKey(int chunkId) {
return "chunk." + Integer.toHexString(chunkId);
}
......@@ -165,8 +172,8 @@ public class Chunk {
c.pageCount = DataUtils.readHexInt(map, "pages", 0);
c.pageCountLive = DataUtils.readHexInt(map, "livePages", c.pageCount);
c.mapId = DataUtils.readHexInt(map, "map", 0);
c.maxLength = DataUtils.readHexLong(map, "max", 0);
c.maxLenLive = DataUtils.readHexLong(map, "liveMax", c.maxLength);
c.maxLen = DataUtils.readHexLong(map, "max", 0);
c.maxLenLive = DataUtils.readHexLong(map, "liveMax", c.maxLen);
c.metaRootPos = DataUtils.readHexLong(map, "root", 0);
c.time = DataUtils.readHexLong(map, "time", 0);
c.version = DataUtils.readHexLong(map, "version", id);
......@@ -175,7 +182,7 @@ public class Chunk {
}
public int getFillRate() {
return (int) (maxLength == 0 ? 0 : 100 * maxLenLive / maxLength);
return (int) (maxLen == 0 ? 0 : 100 * maxLenLive / maxLen);
}
@Override
......@@ -198,14 +205,14 @@ public class Chunk {
DataUtils.appendMap(buff, "chunk", id);
DataUtils.appendMap(buff, "block", block);
DataUtils.appendMap(buff, "len", len);
if (maxLength != maxLenLive) {
if (maxLen != maxLenLive) {
DataUtils.appendMap(buff, "liveMax", maxLenLive);
}
if (pageCount != pageCountLive) {
DataUtils.appendMap(buff, "livePages", pageCountLive);
}
DataUtils.appendMap(buff, "map", mapId);
DataUtils.appendMap(buff, "max", maxLength);
DataUtils.appendMap(buff, "max", maxLen);
if (next != 0) {
DataUtils.appendMap(buff, "next", next);
}
......
......@@ -130,6 +130,11 @@ public class DataUtils {
*/
public static final int PAGE_MEMORY_CHILD = 16;
/**
* The marker size of a very large page.
*/
public static final int PAGE_LARGE = 2 * 1024 * 1024;
/**
* The UTF-8 character encoding format.
*/
......@@ -480,7 +485,7 @@ public class DataUtils {
/**
* Get the maximum length for the given code.
* For the code 31, Integer.MAX_VALUE is returned.
* For the code 31, PAGE_LARGE is returned.
*
* @param pos the position
* @return the maximum length
......@@ -488,7 +493,7 @@ public class DataUtils {
public static int getPageMaxLength(long pos) {
int code = (int) ((pos >> 1) & 31);
if (code == 31) {
return Integer.MAX_VALUE;
return PAGE_LARGE;
}
return (2 + (code & 1)) << ((code >> 1) + 4);
}
......
......@@ -65,18 +65,18 @@ public class MVStoreTool {
int blockSize = MVStore.BLOCK_SIZE;
try {
file = FilePath.get(fileName).open("r");
long fileLength = file.size();
pw.println("file " + fileName);
pw.println(" length " + Long.toHexString(fileLength));
long fileSize = file.size();
int len = Long.toHexString(fileSize).length();
ByteBuffer block = ByteBuffer.allocate(4096);
for (long pos = 0; pos < fileLength;) {
for (long pos = 0; pos < fileSize;) {
block.rewind();
DataUtils.readFully(file, pos, block);
block.rewind();
int headerType = block.get();
if (headerType == 'H') {
pw.println(" store header at " + Long.toHexString(pos));
pw.println(" " + new String(block.array(), "UTF-8").trim());
pw.printf("%0" + len + "x fileHeader %s%n",
pos,
new String(block.array(), DataUtils.LATIN).trim());
pos += blockSize;
continue;
}
......@@ -87,7 +87,7 @@ public class MVStoreTool {
block.position(0);
Chunk c = Chunk.readChunkHeader(block, pos);
int length = c.len * MVStore.BLOCK_SIZE;
pw.println(" " + c.toString());
pw.printf("%n%0" + len + "x chunkHeader %s%n", pos, c.toString());
ByteBuffer chunk = ByteBuffer.allocate(length);
DataUtils.readFully(file, pos, chunk);
int p = block.position();
......@@ -95,62 +95,94 @@ public class MVStoreTool {
int remaining = c.pageCount;
while (remaining > 0) {
chunk.position(p);
int pageLength = chunk.getInt();
int pageSize = chunk.getInt();
// check value (ignored)
chunk.getShort();
int mapId = DataUtils.readVarInt(chunk);
int len = DataUtils.readVarInt(chunk);
int entries = DataUtils.readVarInt(chunk);
int type = chunk.get();
boolean compressed = (type & 2) != 0;
boolean node = (type & 1) != 0;
pw.println(
" map " + Integer.toHexString(mapId) +
" at " + Long.toHexString(p) + " " +
(node ? " node" : " leaf") +
(compressed ? " compressed" : "") +
" len: " + Integer.toHexString(pageLength) +
" entries: " + Integer.toHexString(len));
p += pageLength;
pw.printf(
"+%0" + len + "x %s, map %x, %d entries, %d bytes%n",
p,
(node ? "node" : "leaf") +
(compressed ? " compressed" : ""),
mapId,
node ? entries + 1 : entries,
pageSize);
p += pageSize;
remaining--;
if (mapId == 0 && !compressed) {
String[] keys = new String[len];
for (int i = 0; i < len; i++) {
String k = StringDataType.INSTANCE.read(chunk);
keys[i] = k;
if (compressed) {
continue;
}
String[] keys = new String[entries];
long[] children = null;
long[] counts = null;
if (node) {
long[] children = new long[len + 1];
for (int i = 0; i <= len; i++) {
children = new long[entries + 1];
for (int i = 0; i <= entries; i++) {
children[i] = chunk.getLong();
}
long[] counts = new long[len + 1];
for (int i = 0; i <= len; i++) {
counts = new long[entries + 1];
for (int i = 0; i <= entries; i++) {
long s = DataUtils.readVarLong(chunk);
counts[i] = s;
}
for (int i = 0; i < len; i++) {
pw.println(" < " + keys[i] + ": " +
counts[i] + " -> " + getPosString(children[i]));
}
pw.println(" >= : " +
counts[len] + " -> " + getPosString(children[len]));
if (mapId == 0) {
for (int i = 0; i < entries; i++) {
String k = StringDataType.INSTANCE.read(chunk);
keys[i] = k;
}
if (node) {
// meta map node
for (int i = 0; i < entries; i++) {
long cp = children[i];
pw.printf(" %d children < %s @ chunk %x +%0" + len + "x%n",
counts[i],
keys[i],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
}
long cp = children[entries];
pw.printf(" %d children >= %s @ chunk %x +%0" + len + "x%n",
counts[entries],
keys[entries],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
} else {
// meta map leaf
String[] values = new String[len];
for (int i = 0; i < len; i++) {
String[] values = new String[entries];
for (int i = 0; i < entries; i++) {
String v = StringDataType.INSTANCE.read(chunk);
values[i] = v;
}
for (int i = 0; i < len; i++) {
pw.println(" " + keys[i] + "=" + values[i]);
for (int i = 0; i < entries; i++) {
pw.println(" " + keys[i] + " = " + values[i]);
}
}
} else {
if (node) {
for (int i = 0; i <= entries; i++) {
long cp = children[i];
pw.printf(" %d children @ chunk %x +%0" + len + "x%n",
counts[i],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
}
}
}
}
chunk.position(chunk.limit() - Chunk.FOOTER_LENGTH);
pw.println(" chunk footer");
pw.println(" " + new String(chunk.array(), chunk.position(), Chunk.FOOTER_LENGTH, "UTF-8").trim());
int footerPos = chunk.limit() - Chunk.FOOTER_LENGTH;
chunk.position(footerPos);
pw.printf(
"+%0" + len + "x chunkFooter %s%n",
footerPos,
new String(chunk.array(), chunk.position(),
Chunk.FOOTER_LENGTH, DataUtils.LATIN).trim());
}
pw.printf("%n%0" + len + "x eof%n", fileSize);
} catch (IOException e) {
pw.println("ERROR: " + e);
e.printStackTrace(pw);
......@@ -163,15 +195,7 @@ public class MVStoreTool {
}
}
}
pw.println();
pw.flush();
}
private static String getPosString(long pos) {
return "pos " + Long.toHexString(pos) +
", chunk " + Integer.toHexString(DataUtils.getPageChunkId(pos)) +
", offset " + Integer.toHexString(DataUtils.getPageOffset(pos));
}
}
......@@ -178,7 +178,7 @@ public class Page {
long pos, long filePos, long fileSize) {
ByteBuffer buff;
int maxLength = DataUtils.getPageMaxLength(pos);
if (maxLength == Integer.MAX_VALUE) {
if (maxLength == DataUtils.PAGE_LARGE) {
buff = fileStore.readFully(filePos, 128);
maxLength = buff.getInt();
// read the first bytes again
......@@ -758,7 +758,6 @@ public class Page {
buff = ByteBuffer.allocate(l);
compressor.expand(comp, 0, compLen, buff.array(), buff.arrayOffset(), l);
}
map.getKeyType().read(buff, keys, len, true);
if (node) {
childCount = len + 1;
children = new long[len + 1];
......@@ -774,7 +773,9 @@ public class Page {
counts[i] = s;
}
totalCount = total;
} else {
}
map.getKeyType().read(buff, keys, len, true);
if (!node) {
values = new Object[len];
map.getValueType().read(buff, values, len, false);
totalCount = len;
......@@ -799,7 +800,6 @@ public class Page {
putVarInt(len).
put((byte) type);
int compressStart = buff.position();
map.getKeyType().write(buff, keys, len, true);
if (type == DataUtils.PAGE_TYPE_NODE) {
for (int i = 0; i <= len; i++) {
buff.putLong(children[i]);
......@@ -807,7 +807,9 @@ public class Page {
for (int i = 0; i <= len; i++) {
buff.putVarLong(counts[i]);
}
} else {
}
map.getKeyType().write(buff, keys, len, true);
if (type == DataUtils.PAGE_TYPE_LEAF) {
map.getValueType().write(buff, values, len, false);
}
MVStore store = map.getStore();
......@@ -840,7 +842,7 @@ public class Page {
pos = DataUtils.getPagePos(chunkId, start, pageLength, type);
store.cachePage(pos, this, getMemory());
long max = DataUtils.getPageMaxLength(pos);
chunk.maxLength += max;
chunk.maxLen += max;
chunk.maxLenLive += max;
chunk.pageCount++;
chunk.pageCountLive++;
......
......@@ -393,7 +393,7 @@ public class WebApp {
try {
tool.runTool(argList);
out.flush();
String o = new String(outBuff.toByteArray(), "UTF-8");
String o = new String(outBuff.toByteArray(), Constants.UTF8);
String result = PageParser.escapeHtml(o);
session.put("toolResult", result);
} catch (Exception e) {
......
......@@ -450,7 +450,7 @@ public class WebServer implements Service {
trace("translation: "+language);
byte[] trans = getFile("_text_"+language+".prop");
trace(" "+new String(trans));
text = SortedProperties.fromLines(new String(trans, "UTF-8"));
text = SortedProperties.fromLines(new String(trans, Constants.UTF8));
// remove starting # (if not translated yet)
for (Entry<Object, Object> entry : text.entrySet()) {
String value = (String) entry.getValue();
......
......@@ -197,7 +197,7 @@ public class Recover extends Tool implements DataHandler {
* INTERNAL
*/
public static Reader readClob(String fileName) throws IOException {
return new BufferedReader(new InputStreamReader(readBlob(fileName), "UTF-8"));
return new BufferedReader(new InputStreamReader(readBlob(fileName), Constants.UTF8));
}
/**
......@@ -273,7 +273,7 @@ public class Recover extends Tool implements DataHandler {
*/
public static Reader readClobMap(Connection conn, long lobId, long precision) throws Exception {
InputStream in = readBlobMap(conn, lobId, precision);
return new BufferedReader(new InputStreamReader(in, "UTF-8"));
return new BufferedReader(new InputStreamReader(in, Constants.UTF8));
}
private void trace(String message) {
......
......@@ -25,6 +25,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import org.h2.constant.ErrorCode;
import org.h2.constant.SysProperties;
import org.h2.engine.Constants;
import org.h2.message.DbException;
import org.h2.store.fs.FileUtils;
......@@ -312,7 +313,7 @@ public class SourceCompiler {
copyInThread(p.getInputStream(), buff);
copyInThread(p.getErrorStream(), buff);
p.waitFor();
String err = new String(buff.toByteArray(), "UTF-8");
String err = new String(buff.toByteArray(), Constants.UTF8);
throwSyntaxError(err);
return p.exitValue();
} catch (Exception e) {
......@@ -343,7 +344,7 @@ public class SourceCompiler {
"-d", COMPILE_DIR,
"-encoding", "UTF-8",
javaFile.getAbsolutePath() });
String err = new String(buff.toByteArray(), "UTF-8");
String err = new String(buff.toByteArray(), Constants.UTF8);
throwSyntaxError(err);
} catch (Exception e) {
throw DbException.convert(e);
......
......@@ -633,7 +633,7 @@ public class Transfer {
if (magic != LOB_MAGIC) {
throw DbException.get(ErrorCode.CONNECTION_BROKEN_1, "magic=" + magic);
}
byte[] small = new String(buff).getBytes("UTF-8");
byte[] small = new String(buff).getBytes(Constants.UTF8);
return ValueLobDb.createSmallLob(Value.CLOB, small, length);
}
Value v = session.getDataHandler().getLobStorage().createClob(new DataReader(in), length);
......
......@@ -252,7 +252,7 @@ public class TestDataUtils extends TestBase {
assertEquals(max, DataUtils.parseHexLong(hex));
assertEquals(Chunk.MAX_ID, DataUtils.getPageChunkId(max));
assertEquals(Integer.MAX_VALUE, DataUtils.getPageOffset(max));
assertEquals(Integer.MAX_VALUE, DataUtils.getPageMaxLength(max));
assertEquals(DataUtils.PAGE_LARGE, DataUtils.getPageMaxLength(max));
assertEquals(DataUtils.PAGE_TYPE_NODE, DataUtils.getPageType(max));
long overflow = DataUtils.getPagePos(Chunk.MAX_ID + 1,
......
......@@ -14,6 +14,7 @@ import java.util.Random;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicReference;
import org.h2.mvstore.Chunk;
import org.h2.mvstore.Cursor;
import org.h2.mvstore.DataUtils;
import org.h2.mvstore.FileStore;
......@@ -48,6 +49,8 @@ public class TestMVStore extends TestBase {
public void test() throws Exception {
FileUtils.deleteRecursive(getBaseDir(), true);
FileUtils.createDirectories(getBaseDir());
testFileFormatExample();
testMaxChunkLength();
testCacheInfo();
testRollback();
testVersionsToKeep();
......@@ -100,6 +103,37 @@ public class TestMVStore extends TestBase {
testLargerThan2G();
}
private void testFileFormatExample() {
String fileName = getBaseDir() + "/testFileFormatExample.h3";
MVStore s = MVStore.open(fileName);
MVMap<Integer, String> map = s.openMap("data");
for (int i = 0; i < 400; i++) {
map.put(i, "Hello");
}
s.commit();
for (int i = 0; i < 100; i++) {
map.put(0, "Hi");
}
s.commit();
s.close();
// MVStoreTool.dump(fileName);
}
private void testMaxChunkLength() {
String fileName = getBaseDir() + "/testMaxChunkLength.h3";
MVStore s = new MVStore.Builder().fileName(fileName).open();
MVMap<Integer, byte[]> map = s.openMap("data");
map.put(0, new byte[2 * 1024 * 1024]);
s.commit();
map.put(1, new byte[10 * 1024]);
s.commit();
MVMap<String, String> meta = s.getMetaMap();
Chunk c = Chunk.fromString(meta.get("chunk.1"));
assertTrue(c.maxLen < Integer.MAX_VALUE);
assertTrue(c.maxLenLive < Integer.MAX_VALUE);
s.close();
}
private void testCacheInfo() {
String fileName = getBaseDir() + "/testCloseMap.h3";
MVStore s = new MVStore.Builder().fileName(fileName).cacheSize(2).open();
......
......@@ -751,3 +751,5 @@ sameorigin nobuffer francois hikari duske phromros thailand kritchai mendonca
maginatics jdbclint lint lsm unmappable adams douglas definer invoker
fmrn fmxxx fmday fml syyyy tzd nov iyy iyyy fmc fmb fmxx tzr btc yyfxyy scc syear
overwrote though randomize readability datagram rsync mongodb divides crypto
predicted prediction wojtek hops jurczyk cbtree predict vast assumption upside
adjusted lastly sgtatham
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论