提交 831e6937 authored 作者: Thomas Mueller's avatar Thomas Mueller

MVStore: the file format was changed slightly.

上级 b9726376
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -262,7 +262,8 @@ public class Comparison extends Condition {
* @param l the first value
* @param r the second value
* @param compareType the compare type
* @return true if the comparison indicated by the comparison type evaluates to true
* @return true if the comparison indicated by the comparison type evaluates
* to true
*/
static boolean compareNotNull(Database database, Value l, Value r, int compareType) {
boolean result;
......
......@@ -16,6 +16,7 @@ import java.util.Map.Entry;
import java.util.Properties;
import org.h2.constant.ErrorCode;
import org.h2.engine.Constants;
import org.h2.jdbc.JdbcSQLException;
import org.h2.util.SortedProperties;
import org.h2.util.StringUtils;
......@@ -46,7 +47,7 @@ public class DbException extends RuntimeException {
// message: translated message + english
// (otherwise certain applications don't work)
if (translations != null) {
Properties p = SortedProperties.fromLines(new String(translations, "UTF-8"));
Properties p = SortedProperties.fromLines(new String(translations, Constants.UTF8));
for (Entry<Object, Object> e : p.entrySet()) {
String key = (String) e.getKey();
String translation = (String) e.getValue();
......
......@@ -30,7 +30,8 @@ public class Chunk {
/**
* The length of the chunk footer. The longest footer is:
* chunk:ffffffff,block:ffffffffffffffff,version:ffffffffffffffff,fletcher:ffffffff
* chunk:ffffffff,block:ffffffffffffffff,
* version:ffffffffffffffff,fletcher:ffffffff
*/
static final int FOOTER_LENGTH = 128;
......@@ -62,7 +63,7 @@ public class Chunk {
/**
* The sum of the max length of all pages.
*/
public long maxLength;
public long maxLen;
/**
* The sum of the max length of all pages that are in use.
......@@ -98,7 +99,6 @@ public class Chunk {
* The predicted position of the next chunk.
*/
public long next;
public long nextSize;
Chunk(int id) {
this.id = id;
......@@ -136,6 +136,7 @@ public class Chunk {
* Write the chunk header.
*
* @param buff the target buffer
* @param minLength the minimum length
*/
void writeChunkHeader(WriteBuffer buff, int minLength) {
long pos = buff.position();
......@@ -146,6 +147,12 @@ public class Chunk {
buff.put((byte) '\n');
}
/**
* Get the metadata key for the given chunk id.
*
* @param chunkId the chunk id
* @return the metadata key
*/
static String getMetaKey(int chunkId) {
return "chunk." + Integer.toHexString(chunkId);
}
......@@ -165,8 +172,8 @@ public class Chunk {
c.pageCount = DataUtils.readHexInt(map, "pages", 0);
c.pageCountLive = DataUtils.readHexInt(map, "livePages", c.pageCount);
c.mapId = DataUtils.readHexInt(map, "map", 0);
c.maxLength = DataUtils.readHexLong(map, "max", 0);
c.maxLenLive = DataUtils.readHexLong(map, "liveMax", c.maxLength);
c.maxLen = DataUtils.readHexLong(map, "max", 0);
c.maxLenLive = DataUtils.readHexLong(map, "liveMax", c.maxLen);
c.metaRootPos = DataUtils.readHexLong(map, "root", 0);
c.time = DataUtils.readHexLong(map, "time", 0);
c.version = DataUtils.readHexLong(map, "version", id);
......@@ -175,7 +182,7 @@ public class Chunk {
}
public int getFillRate() {
return (int) (maxLength == 0 ? 0 : 100 * maxLenLive / maxLength);
return (int) (maxLen == 0 ? 0 : 100 * maxLenLive / maxLen);
}
@Override
......@@ -198,14 +205,14 @@ public class Chunk {
DataUtils.appendMap(buff, "chunk", id);
DataUtils.appendMap(buff, "block", block);
DataUtils.appendMap(buff, "len", len);
if (maxLength != maxLenLive) {
if (maxLen != maxLenLive) {
DataUtils.appendMap(buff, "liveMax", maxLenLive);
}
if (pageCount != pageCountLive) {
DataUtils.appendMap(buff, "livePages", pageCountLive);
}
DataUtils.appendMap(buff, "map", mapId);
DataUtils.appendMap(buff, "max", maxLength);
DataUtils.appendMap(buff, "max", maxLen);
if (next != 0) {
DataUtils.appendMap(buff, "next", next);
}
......
......@@ -130,6 +130,11 @@ public class DataUtils {
*/
public static final int PAGE_MEMORY_CHILD = 16;
/**
* The marker size of a very large page.
*/
public static final int PAGE_LARGE = 2 * 1024 * 1024;
/**
* The UTF-8 character encoding format.
*/
......@@ -480,7 +485,7 @@ public class DataUtils {
/**
* Get the maximum length for the given code.
* For the code 31, Integer.MAX_VALUE is returned.
* For the code 31, PAGE_LARGE is returned.
*
* @param pos the position
* @return the maximum length
......@@ -488,7 +493,7 @@ public class DataUtils {
public static int getPageMaxLength(long pos) {
int code = (int) ((pos >> 1) & 31);
if (code == 31) {
return Integer.MAX_VALUE;
return PAGE_LARGE;
}
return (2 + (code & 1)) << ((code >> 1) + 4);
}
......
......@@ -29,20 +29,6 @@ import org.h2.util.New;
/*
File format:
store header: (blockSize) bytes
store header: (blockSize) bytes
[ chunk ] *
(there are two headers for security at the beginning of the file,
and there is a store header at the end of each chunk)
H:2,block:0,blockSize:1000,chunk:0,created:143fd8e5767,format:1,fletcher:a3acedfb
chunk:1,block:2,len:1,map:6,max:1c0,pages:2,root:4000004c8c,time:20a,version:1
chunk:2,block:3,fletcher:ca8cb347
maybe split chunk metadata into static and variable
TODO:
Documentation
......@@ -66,10 +52,10 @@ TransactionStore:
MVStore:
- page format: for nodes, maybe store child pointers first,
so we can dump them in the MVStoreTool even if the dataType is unknown
- ensure data is overwritten eventually if the system doesn't have a
real-time clock (Raspberry Pi) and if there are few writes per startup
- when opening, verify the footer of the chunk (also when following next pointers)
- test max length sum with length code 31 (which is Integer.MAX_VALUE)
- maybe change the length code to have lower gaps
- test chunk id rollover
- document and review the file format
......@@ -129,6 +115,9 @@ MVStore:
"if the sum of the past 8196 bytes divides by 4096 with zero remainder"
- Compression: try using a bloom filter (64 bit) before trying to match
- LIRS cache: maybe remove 'mask' field, and dynamically grow the arrays
- chunk metadata: maybe split into static and variable,
or use a small page size for metadata
- data type "string": maybe use prefix compression for keys
*/
......@@ -521,7 +510,7 @@ public class MVStore {
boolean validHeader = false;
// we don't know yet which chunk and version are the newest
long newestVersion = -1;
long newestChunkBlock = -1;
long chunkBlock = -1;
// read the first two blocks
ByteBuffer fileHeaderBlocks = fileStore.readFully(0, 2 * BLOCK_SIZE);
byte[] buff = new byte[BLOCK_SIZE];
......@@ -550,7 +539,7 @@ public class MVStore {
if (version > newestVersion) {
newestVersion = version;
fileHeader.putAll(m);
newestChunkBlock = DataUtils.readHexLong(m, "block", 0);
chunkBlock = DataUtils.readHexLong(m, "block", 0);
creationTime = DataUtils.readHexLong(m, "created", 0);
validHeader = true;
}
......@@ -579,78 +568,60 @@ public class MVStore {
format, FORMAT_READ);
}
lastStoredVersion = -1;
chunks.clear();
// read the chunk footer of the last block of the file
ByteBuffer lastBlock = fileStore.readFully(
fileStore.size() - Chunk.FOOTER_LENGTH, Chunk.FOOTER_LENGTH);
buff = new byte[Chunk.FOOTER_LENGTH];
lastBlock.get(buff);
// the following can fail for various reasons
try {
String s = new String(buff, DataUtils.LATIN).trim();
HashMap<String, String> m = DataUtils.parseMap(s);
int check = DataUtils.readHexInt(m, "fletcher", 0);
m.remove("fletcher");
s = s.substring(0, s.lastIndexOf("fletcher") - 1);
byte[] bytes = s.getBytes(DataUtils.LATIN);
int checksum = DataUtils.getFletcher32(bytes, bytes.length / 2 * 2);
if (check == checksum) {
int chunk = DataUtils.readHexInt(m, "chunk", 0);
long version = DataUtils.readHexLong(m, "version", chunk);
if (version > newestVersion) {
newestVersion = version;
newestChunkBlock = DataUtils.readHexLong(m, "block", 0);
validHeader = true;
Chunk footer = readChunkFooter(fileStore.size());
if (footer != null) {
if (footer.version > newestVersion) {
newestVersion = footer.version;
chunkBlock = footer.block;
}
}
} catch (Exception e) {
// ignore
if (chunkBlock <= 0) {
// no chunk
return;
}
// follow the chain of next chunks
long testChunkBlock = newestChunkBlock;
// read the chunk header and footer,
// and follow the chain of next chunks
lastChunk = null;
while (true) {
Chunk header;
try {
header = readChunkHeader(testChunkBlock);
header = readChunkHeader(chunkBlock);
} catch (Exception e) {
// ignore the exception, but exit the loop
// invalid chunk header: ignore, but stop
break;
}
if (header.version < newestVersion) {
// we have reached the end
break;
}
newestChunkBlock = testChunkBlock;
footer = readChunkFooter((chunkBlock + header.len) * BLOCK_SIZE);
if (footer == null || footer.id != header.id) {
// invalid chunk footer, or the wrong one
break;
}
lastChunk = header;
newestVersion = header.version;
if (header.next == 0 || header.next >= fileStore.size() / BLOCK_SIZE) {
// no (valid) next
break;
}
testChunkBlock = header.next;
chunkBlock = header.next;
}
if (newestChunkBlock > 0) {
readMeta(newestChunkBlock);
}
}
private void readMeta(long chunkBlock) {
chunks.clear();
Chunk header = readChunkHeader(chunkBlock);
if (header.block == Long.MAX_VALUE) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_CORRUPT,
"Chunk {0} is invalid", header.id);
if (lastChunk == null) {
// no valid chunk
return;
}
lastChunk = header;
lastMapId = header.mapId;
currentVersion = header.version;
lastMapId = lastChunk.mapId;
currentVersion = lastChunk.version;
setWriteVersion(currentVersion);
chunks.put(header.id, header);
meta.setRootPos(header.metaRootPos, -1);
// we can load the chunk in any order,
// because loading chunk metadata
// might recursively load another chunk
chunks.put(lastChunk.id, lastChunk);
meta.setRootPos(lastChunk.metaRootPos, -1);
// load the chunk metadata: we can load in any order,
// because loading chunk metadata might recursively load another chunk
for (Iterator<String> it = meta.keyIterator("chunk."); it.hasNext();) {
String s = it.next();
if (!s.startsWith("chunk.")) {
......@@ -679,6 +650,40 @@ public class MVStore {
}
}
/**
* Try to read a chunk footer.
*
* @param end the end of the chunk
* @return the chunk, or null if not successful
*/
private Chunk readChunkFooter(long end) {
// read the chunk footer of the last block of the file
ByteBuffer lastBlock = fileStore.readFully(
end - Chunk.FOOTER_LENGTH, Chunk.FOOTER_LENGTH);
byte[] buff = new byte[Chunk.FOOTER_LENGTH];
lastBlock.get(buff);
// the following can fail for various reasons
try {
String s = new String(buff, DataUtils.LATIN).trim();
HashMap<String, String> m = DataUtils.parseMap(s);
int check = DataUtils.readHexInt(m, "fletcher", 0);
m.remove("fletcher");
s = s.substring(0, s.lastIndexOf("fletcher") - 1);
byte[] bytes = s.getBytes(DataUtils.LATIN);
int checksum = DataUtils.getFletcher32(bytes, bytes.length / 2 * 2);
if (check == checksum) {
int chunk = DataUtils.readHexInt(m, "chunk", 0);
Chunk c = new Chunk(chunk);
c.version = DataUtils.readHexLong(m, "version", 0);
c.block = DataUtils.readHexLong(m, "block", 0);
return c;
}
} catch (Exception e) {
// ignore
}
return null;
}
private void writeFileHeader() {
StringBuilder buff = new StringBuilder();
if (lastChunk != null) {
......@@ -904,7 +909,7 @@ public class MVStore {
c.pageCount = Integer.MAX_VALUE;
c.pageCountLive = Integer.MAX_VALUE;
c.maxLength = Long.MAX_VALUE;
c.maxLen = Long.MAX_VALUE;
c.maxLenLive = Long.MAX_VALUE;
c.metaRootPos = Long.MAX_VALUE;
c.block = Long.MAX_VALUE;
......@@ -950,7 +955,7 @@ public class MVStore {
int headerLength = buff.position();
c.pageCount = 0;
c.pageCountLive = 0;
c.maxLength = 0;
c.maxLen = 0;
c.maxLenLive = 0;
for (MVMap<?, ?> m : changed) {
Page p = m.getRoot();
......@@ -1007,7 +1012,6 @@ public class MVStore {
// calculate and set the likely next position
if (reuseSpace) {
int predictBlocks = c.len;
c.nextSize = predictBlocks;
long predictedNextStart = fileStore.allocate(predictBlocks * BLOCK_SIZE);
fileStore.free(predictedNextStart, predictBlocks * BLOCK_SIZE);
c.next = predictedNextStart / BLOCK_SIZE;
......@@ -1417,7 +1421,7 @@ public class MVStore {
long maxLengthSum = 0;
long maxLengthLiveSum = 0;
for (Chunk c : chunks.values()) {
maxLengthSum += c.maxLength;
maxLengthSum += c.maxLen;
maxLengthLiveSum += c.maxLenLive;
}
// the fill rate of all chunks combined
......@@ -1459,7 +1463,7 @@ public class MVStore {
long saved = 0;
Chunk move = null;
for (Chunk c : old) {
long save = c.maxLength - c.maxLenLive;
long save = c.maxLen - c.maxLenLive;
if (move != null) {
if (saved > minSaving) {
break;
......
......@@ -65,18 +65,18 @@ public class MVStoreTool {
int blockSize = MVStore.BLOCK_SIZE;
try {
file = FilePath.get(fileName).open("r");
long fileLength = file.size();
pw.println("file " + fileName);
pw.println(" length " + Long.toHexString(fileLength));
long fileSize = file.size();
int len = Long.toHexString(fileSize).length();
ByteBuffer block = ByteBuffer.allocate(4096);
for (long pos = 0; pos < fileLength;) {
for (long pos = 0; pos < fileSize;) {
block.rewind();
DataUtils.readFully(file, pos, block);
block.rewind();
int headerType = block.get();
if (headerType == 'H') {
pw.println(" store header at " + Long.toHexString(pos));
pw.println(" " + new String(block.array(), "UTF-8").trim());
pw.printf("%0" + len + "x fileHeader %s%n",
pos,
new String(block.array(), DataUtils.LATIN).trim());
pos += blockSize;
continue;
}
......@@ -87,7 +87,7 @@ public class MVStoreTool {
block.position(0);
Chunk c = Chunk.readChunkHeader(block, pos);
int length = c.len * MVStore.BLOCK_SIZE;
pw.println(" " + c.toString());
pw.printf("%n%0" + len + "x chunkHeader %s%n", pos, c.toString());
ByteBuffer chunk = ByteBuffer.allocate(length);
DataUtils.readFully(file, pos, chunk);
int p = block.position();
......@@ -95,62 +95,94 @@ public class MVStoreTool {
int remaining = c.pageCount;
while (remaining > 0) {
chunk.position(p);
int pageLength = chunk.getInt();
int pageSize = chunk.getInt();
// check value (ignored)
chunk.getShort();
int mapId = DataUtils.readVarInt(chunk);
int len = DataUtils.readVarInt(chunk);
int entries = DataUtils.readVarInt(chunk);
int type = chunk.get();
boolean compressed = (type & 2) != 0;
boolean node = (type & 1) != 0;
pw.println(
" map " + Integer.toHexString(mapId) +
" at " + Long.toHexString(p) + " " +
(node ? " node" : " leaf") +
(compressed ? " compressed" : "") +
" len: " + Integer.toHexString(pageLength) +
" entries: " + Integer.toHexString(len));
p += pageLength;
pw.printf(
"+%0" + len + "x %s, map %x, %d entries, %d bytes%n",
p,
(node ? "node" : "leaf") +
(compressed ? " compressed" : ""),
mapId,
node ? entries + 1 : entries,
pageSize);
p += pageSize;
remaining--;
if (mapId == 0 && !compressed) {
String[] keys = new String[len];
for (int i = 0; i < len; i++) {
String k = StringDataType.INSTANCE.read(chunk);
keys[i] = k;
if (compressed) {
continue;
}
String[] keys = new String[entries];
long[] children = null;
long[] counts = null;
if (node) {
long[] children = new long[len + 1];
for (int i = 0; i <= len; i++) {
children = new long[entries + 1];
for (int i = 0; i <= entries; i++) {
children[i] = chunk.getLong();
}
long[] counts = new long[len + 1];
for (int i = 0; i <= len; i++) {
counts = new long[entries + 1];
for (int i = 0; i <= entries; i++) {
long s = DataUtils.readVarLong(chunk);
counts[i] = s;
}
for (int i = 0; i < len; i++) {
pw.println(" < " + keys[i] + ": " +
counts[i] + " -> " + getPosString(children[i]));
}
pw.println(" >= : " +
counts[len] + " -> " + getPosString(children[len]));
if (mapId == 0) {
for (int i = 0; i < entries; i++) {
String k = StringDataType.INSTANCE.read(chunk);
keys[i] = k;
}
if (node) {
// meta map node
for (int i = 0; i < entries; i++) {
long cp = children[i];
pw.printf(" %d children < %s @ chunk %x +%0" + len + "x%n",
counts[i],
keys[i],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
}
long cp = children[entries];
pw.printf(" %d children >= %s @ chunk %x +%0" + len + "x%n",
counts[entries],
keys[entries],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
} else {
// meta map leaf
String[] values = new String[len];
for (int i = 0; i < len; i++) {
String[] values = new String[entries];
for (int i = 0; i < entries; i++) {
String v = StringDataType.INSTANCE.read(chunk);
values[i] = v;
}
for (int i = 0; i < len; i++) {
pw.println(" " + keys[i] + "=" + values[i]);
for (int i = 0; i < entries; i++) {
pw.println(" " + keys[i] + " = " + values[i]);
}
}
} else {
if (node) {
for (int i = 0; i <= entries; i++) {
long cp = children[i];
pw.printf(" %d children @ chunk %x +%0" + len + "x%n",
counts[i],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
}
}
}
}
chunk.position(chunk.limit() - Chunk.FOOTER_LENGTH);
pw.println(" chunk footer");
pw.println(" " + new String(chunk.array(), chunk.position(), Chunk.FOOTER_LENGTH, "UTF-8").trim());
int footerPos = chunk.limit() - Chunk.FOOTER_LENGTH;
chunk.position(footerPos);
pw.printf(
"+%0" + len + "x chunkFooter %s%n",
footerPos,
new String(chunk.array(), chunk.position(),
Chunk.FOOTER_LENGTH, DataUtils.LATIN).trim());
}
pw.printf("%n%0" + len + "x eof%n", fileSize);
} catch (IOException e) {
pw.println("ERROR: " + e);
e.printStackTrace(pw);
......@@ -163,15 +195,7 @@ public class MVStoreTool {
}
}
}
pw.println();
pw.flush();
}
private static String getPosString(long pos) {
return "pos " + Long.toHexString(pos) +
", chunk " + Integer.toHexString(DataUtils.getPageChunkId(pos)) +
", offset " + Integer.toHexString(DataUtils.getPageOffset(pos));
}
}
......@@ -178,7 +178,7 @@ public class Page {
long pos, long filePos, long fileSize) {
ByteBuffer buff;
int maxLength = DataUtils.getPageMaxLength(pos);
if (maxLength == Integer.MAX_VALUE) {
if (maxLength == DataUtils.PAGE_LARGE) {
buff = fileStore.readFully(filePos, 128);
maxLength = buff.getInt();
// read the first bytes again
......@@ -758,7 +758,6 @@ public class Page {
buff = ByteBuffer.allocate(l);
compressor.expand(comp, 0, compLen, buff.array(), buff.arrayOffset(), l);
}
map.getKeyType().read(buff, keys, len, true);
if (node) {
childCount = len + 1;
children = new long[len + 1];
......@@ -774,7 +773,9 @@ public class Page {
counts[i] = s;
}
totalCount = total;
} else {
}
map.getKeyType().read(buff, keys, len, true);
if (!node) {
values = new Object[len];
map.getValueType().read(buff, values, len, false);
totalCount = len;
......@@ -799,7 +800,6 @@ public class Page {
putVarInt(len).
put((byte) type);
int compressStart = buff.position();
map.getKeyType().write(buff, keys, len, true);
if (type == DataUtils.PAGE_TYPE_NODE) {
for (int i = 0; i <= len; i++) {
buff.putLong(children[i]);
......@@ -807,7 +807,9 @@ public class Page {
for (int i = 0; i <= len; i++) {
buff.putVarLong(counts[i]);
}
} else {
}
map.getKeyType().write(buff, keys, len, true);
if (type == DataUtils.PAGE_TYPE_LEAF) {
map.getValueType().write(buff, values, len, false);
}
MVStore store = map.getStore();
......@@ -840,7 +842,7 @@ public class Page {
pos = DataUtils.getPagePos(chunkId, start, pageLength, type);
store.cachePage(pos, this, getMemory());
long max = DataUtils.getPageMaxLength(pos);
chunk.maxLength += max;
chunk.maxLen += max;
chunk.maxLenLive += max;
chunk.pageCount++;
chunk.pageCountLive++;
......
......@@ -393,7 +393,7 @@ public class WebApp {
try {
tool.runTool(argList);
out.flush();
String o = new String(outBuff.toByteArray(), "UTF-8");
String o = new String(outBuff.toByteArray(), Constants.UTF8);
String result = PageParser.escapeHtml(o);
session.put("toolResult", result);
} catch (Exception e) {
......
......@@ -450,7 +450,7 @@ public class WebServer implements Service {
trace("translation: "+language);
byte[] trans = getFile("_text_"+language+".prop");
trace(" "+new String(trans));
text = SortedProperties.fromLines(new String(trans, "UTF-8"));
text = SortedProperties.fromLines(new String(trans, Constants.UTF8));
// remove starting # (if not translated yet)
for (Entry<Object, Object> entry : text.entrySet()) {
String value = (String) entry.getValue();
......
......@@ -197,7 +197,7 @@ public class Recover extends Tool implements DataHandler {
* INTERNAL
*/
public static Reader readClob(String fileName) throws IOException {
return new BufferedReader(new InputStreamReader(readBlob(fileName), "UTF-8"));
return new BufferedReader(new InputStreamReader(readBlob(fileName), Constants.UTF8));
}
/**
......@@ -273,7 +273,7 @@ public class Recover extends Tool implements DataHandler {
*/
public static Reader readClobMap(Connection conn, long lobId, long precision) throws Exception {
InputStream in = readBlobMap(conn, lobId, precision);
return new BufferedReader(new InputStreamReader(in, "UTF-8"));
return new BufferedReader(new InputStreamReader(in, Constants.UTF8));
}
private void trace(String message) {
......
......@@ -25,6 +25,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import org.h2.constant.ErrorCode;
import org.h2.constant.SysProperties;
import org.h2.engine.Constants;
import org.h2.message.DbException;
import org.h2.store.fs.FileUtils;
......@@ -312,7 +313,7 @@ public class SourceCompiler {
copyInThread(p.getInputStream(), buff);
copyInThread(p.getErrorStream(), buff);
p.waitFor();
String err = new String(buff.toByteArray(), "UTF-8");
String err = new String(buff.toByteArray(), Constants.UTF8);
throwSyntaxError(err);
return p.exitValue();
} catch (Exception e) {
......@@ -343,7 +344,7 @@ public class SourceCompiler {
"-d", COMPILE_DIR,
"-encoding", "UTF-8",
javaFile.getAbsolutePath() });
String err = new String(buff.toByteArray(), "UTF-8");
String err = new String(buff.toByteArray(), Constants.UTF8);
throwSyntaxError(err);
} catch (Exception e) {
throw DbException.convert(e);
......
......@@ -633,7 +633,7 @@ public class Transfer {
if (magic != LOB_MAGIC) {
throw DbException.get(ErrorCode.CONNECTION_BROKEN_1, "magic=" + magic);
}
byte[] small = new String(buff).getBytes("UTF-8");
byte[] small = new String(buff).getBytes(Constants.UTF8);
return ValueLobDb.createSmallLob(Value.CLOB, small, length);
}
Value v = session.getDataHandler().getLobStorage().createClob(new DataReader(in), length);
......
......@@ -252,7 +252,7 @@ public class TestDataUtils extends TestBase {
assertEquals(max, DataUtils.parseHexLong(hex));
assertEquals(Chunk.MAX_ID, DataUtils.getPageChunkId(max));
assertEquals(Integer.MAX_VALUE, DataUtils.getPageOffset(max));
assertEquals(Integer.MAX_VALUE, DataUtils.getPageMaxLength(max));
assertEquals(DataUtils.PAGE_LARGE, DataUtils.getPageMaxLength(max));
assertEquals(DataUtils.PAGE_TYPE_NODE, DataUtils.getPageType(max));
long overflow = DataUtils.getPagePos(Chunk.MAX_ID + 1,
......
......@@ -14,6 +14,7 @@ import java.util.Random;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicReference;
import org.h2.mvstore.Chunk;
import org.h2.mvstore.Cursor;
import org.h2.mvstore.DataUtils;
import org.h2.mvstore.FileStore;
......@@ -48,6 +49,8 @@ public class TestMVStore extends TestBase {
public void test() throws Exception {
FileUtils.deleteRecursive(getBaseDir(), true);
FileUtils.createDirectories(getBaseDir());
testFileFormatExample();
testMaxChunkLength();
testCacheInfo();
testRollback();
testVersionsToKeep();
......@@ -100,6 +103,37 @@ public class TestMVStore extends TestBase {
testLargerThan2G();
}
private void testFileFormatExample() {
String fileName = getBaseDir() + "/testFileFormatExample.h3";
MVStore s = MVStore.open(fileName);
MVMap<Integer, String> map = s.openMap("data");
for (int i = 0; i < 400; i++) {
map.put(i, "Hello");
}
s.commit();
for (int i = 0; i < 100; i++) {
map.put(0, "Hi");
}
s.commit();
s.close();
// MVStoreTool.dump(fileName);
}
private void testMaxChunkLength() {
String fileName = getBaseDir() + "/testMaxChunkLength.h3";
MVStore s = new MVStore.Builder().fileName(fileName).open();
MVMap<Integer, byte[]> map = s.openMap("data");
map.put(0, new byte[2 * 1024 * 1024]);
s.commit();
map.put(1, new byte[10 * 1024]);
s.commit();
MVMap<String, String> meta = s.getMetaMap();
Chunk c = Chunk.fromString(meta.get("chunk.1"));
assertTrue(c.maxLen < Integer.MAX_VALUE);
assertTrue(c.maxLenLive < Integer.MAX_VALUE);
s.close();
}
private void testCacheInfo() {
String fileName = getBaseDir() + "/testCloseMap.h3";
MVStore s = new MVStore.Builder().fileName(fileName).cacheSize(2).open();
......
......@@ -751,3 +751,5 @@ sameorigin nobuffer francois hikari duske phromros thailand kritchai mendonca
maginatics jdbclint lint lsm unmappable adams douglas definer invoker
fmrn fmxxx fmday fml syyyy tzd nov iyy iyyy fmc fmb fmxx tzr btc yyfxyy scc syear
overwrote though randomize readability datagram rsync mongodb divides crypto
predicted prediction wojtek hops jurczyk cbtree predict vast assumption upside
adjusted lastly sgtatham
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论