提交 831e6937 authored 作者: Thomas Mueller's avatar Thomas Mueller

MVStore: the file format was changed slightly.

上级 b9726376
......@@ -18,11 +18,11 @@ Change Log
<h1>Change Log</h1>
<h2>Next Version (unreleased)</h2>
<ul><li>Referential integrity constraints sometimes used the wrong index,
<ul><li>Referential integrity constraints sometimes used the wrong index,
such that updating a row in the referenced table incorrectly failed with
a constraint violation.
</li><li>The Polish translation was completed and corrected by Wojtek Jurczyk. Thanks a lot!
</li><li>Issue 545: Unnecessary duplicate code was removed.
</li><li>Issue 545: Unnecessary duplicate code was removed.
</li><li>The profiler tool can now process files with full thread dumps.
</li><li>MVStore: the file format was changed slightly.
</li><li>MVStore mode: the CLOB and BLOB storage was re-implemented and is
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -329,7 +329,7 @@ public class FunctionAlias extends SchemaObjectBase {
public boolean isBufferResultSetToLocalTemp() {
return bufferResultSetToLocalTemp;
}
/**
* There may be multiple Java methods that match a function name.
* Each method must have a different number of parameters however.
......
......@@ -262,7 +262,8 @@ public class Comparison extends Condition {
* @param l the first value
* @param r the second value
* @param compareType the compare type
* @return true if the comparison indicated by the comparison type evaluates to true
* @return true if the comparison indicated by the comparison type evaluates
* to true
*/
static boolean compareNotNull(Database database, Value l, Value r, int compareType) {
boolean result;
......
......@@ -16,6 +16,7 @@ import java.util.Map.Entry;
import java.util.Properties;
import org.h2.constant.ErrorCode;
import org.h2.engine.Constants;
import org.h2.jdbc.JdbcSQLException;
import org.h2.util.SortedProperties;
import org.h2.util.StringUtils;
......@@ -46,7 +47,7 @@ public class DbException extends RuntimeException {
// message: translated message + english
// (otherwise certain applications don't work)
if (translations != null) {
Properties p = SortedProperties.fromLines(new String(translations, "UTF-8"));
Properties p = SortedProperties.fromLines(new String(translations, Constants.UTF8));
for (Entry<Object, Object> e : p.entrySet()) {
String key = (String) e.getKey();
String translation = (String) e.getValue();
......
......@@ -17,7 +17,7 @@ import java.util.HashMap;
* each chunk is at most 2 GB large.
*/
public class Chunk {
/**
* The maximum chunk id.
*/
......@@ -30,7 +30,8 @@ public class Chunk {
/**
* The length of the chunk footer. The longest footer is:
* chunk:ffffffff,block:ffffffffffffffff,version:ffffffffffffffff,fletcher:ffffffff
* chunk:ffffffff,block:ffffffffffffffff,
* version:ffffffffffffffff,fletcher:ffffffff
*/
static final int FOOTER_LENGTH = 128;
......@@ -62,7 +63,7 @@ public class Chunk {
/**
* The sum of the max length of all pages.
*/
public long maxLength;
public long maxLen;
/**
* The sum of the max length of all pages that are in use.
......@@ -93,12 +94,11 @@ public class Chunk {
* The last used map id.
*/
public int mapId;
/**
* The predicted position of the next chunk.
*/
public long next;
public long nextSize;
Chunk(int id) {
this.id = id;
......@@ -136,6 +136,7 @@ public class Chunk {
* Write the chunk header.
*
* @param buff the target buffer
* @param minLength the minimum length
*/
void writeChunkHeader(WriteBuffer buff, int minLength) {
long pos = buff.position();
......@@ -145,7 +146,13 @@ public class Chunk {
}
buff.put((byte) '\n');
}
/**
* Get the metadata key for the given chunk id.
*
* @param chunkId the chunk id
* @return the metadata key
*/
static String getMetaKey(int chunkId) {
return "chunk." + Integer.toHexString(chunkId);
}
......@@ -165,8 +172,8 @@ public class Chunk {
c.pageCount = DataUtils.readHexInt(map, "pages", 0);
c.pageCountLive = DataUtils.readHexInt(map, "livePages", c.pageCount);
c.mapId = DataUtils.readHexInt(map, "map", 0);
c.maxLength = DataUtils.readHexLong(map, "max", 0);
c.maxLenLive = DataUtils.readHexLong(map, "liveMax", c.maxLength);
c.maxLen = DataUtils.readHexLong(map, "max", 0);
c.maxLenLive = DataUtils.readHexLong(map, "liveMax", c.maxLen);
c.metaRootPos = DataUtils.readHexLong(map, "root", 0);
c.time = DataUtils.readHexLong(map, "time", 0);
c.version = DataUtils.readHexLong(map, "version", id);
......@@ -175,7 +182,7 @@ public class Chunk {
}
public int getFillRate() {
return (int) (maxLength == 0 ? 0 : 100 * maxLenLive / maxLength);
return (int) (maxLen == 0 ? 0 : 100 * maxLenLive / maxLen);
}
@Override
......@@ -198,14 +205,14 @@ public class Chunk {
DataUtils.appendMap(buff, "chunk", id);
DataUtils.appendMap(buff, "block", block);
DataUtils.appendMap(buff, "len", len);
if (maxLength != maxLenLive) {
if (maxLen != maxLenLive) {
DataUtils.appendMap(buff, "liveMax", maxLenLive);
}
if (pageCount != pageCountLive) {
DataUtils.appendMap(buff, "livePages", pageCountLive);
}
DataUtils.appendMap(buff, "map", mapId);
DataUtils.appendMap(buff, "max", maxLength);
DataUtils.appendMap(buff, "max", maxLen);
if (next != 0) {
DataUtils.appendMap(buff, "next", next);
}
......@@ -215,7 +222,7 @@ public class Chunk {
DataUtils.appendMap(buff, "version", version);
return buff.toString();
}
byte[] getFooterBytes() {
StringBuilder buff = new StringBuilder();
DataUtils.appendMap(buff, "chunk", id);
......
......@@ -130,11 +130,16 @@ public class DataUtils {
*/
public static final int PAGE_MEMORY_CHILD = 16;
/**
* The marker size of a very large page.
*/
public static final int PAGE_LARGE = 2 * 1024 * 1024;
/**
* The UTF-8 character encoding format.
*/
public static final Charset UTF8 = Charset.forName("UTF-8");
/**
* The ISO Latin character encoding format.
*/
......@@ -480,7 +485,7 @@ public class DataUtils {
/**
* Get the maximum length for the given code.
* For the code 31, Integer.MAX_VALUE is returned.
* For the code 31, PAGE_LARGE is returned.
*
* @param pos the position
* @return the maximum length
......@@ -488,7 +493,7 @@ public class DataUtils {
public static int getPageMaxLength(long pos) {
int code = (int) ((pos >> 1) & 31);
if (code == 31) {
return Integer.MAX_VALUE;
return PAGE_LARGE;
}
return (2 + (code & 1)) << ((code >> 1) + 4);
}
......@@ -559,7 +564,7 @@ public class DataUtils {
}
return buff;
}
/**
* Append a key-value pair to the string builder. Keys may not contain a
* colon. Values that contain a comma or a double quote are enclosed in
......@@ -856,7 +861,7 @@ public class DataUtils {
"Error parsing the value {0}", v, e);
}
}
/**
* Parse an unsigned, hex long.
*
......@@ -869,7 +874,7 @@ public class DataUtils {
if (x.length() == 16) {
// avoid problems with overflow
// in Java 8, this special case is not needed
return (Long.parseLong(x.substring(0, 8), 16) << 32) |
return (Long.parseLong(x.substring(0, 8), 16) << 32) |
Long.parseLong(x.substring(8, 16), 16);
}
return Long.parseLong(x, 16);
......@@ -878,7 +883,7 @@ public class DataUtils {
"Error parsing the value {0}", x, e);
}
}
/**
* Parse an unsigned, hex long.
*
......@@ -896,7 +901,7 @@ public class DataUtils {
"Error parsing the value {0}", x, e);
}
}
/**
* Read a hex int value from a map.
*
......
......@@ -65,18 +65,18 @@ public class MVStoreTool {
int blockSize = MVStore.BLOCK_SIZE;
try {
file = FilePath.get(fileName).open("r");
long fileLength = file.size();
pw.println("file " + fileName);
pw.println(" length " + Long.toHexString(fileLength));
long fileSize = file.size();
int len = Long.toHexString(fileSize).length();
ByteBuffer block = ByteBuffer.allocate(4096);
for (long pos = 0; pos < fileLength;) {
for (long pos = 0; pos < fileSize;) {
block.rewind();
DataUtils.readFully(file, pos, block);
block.rewind();
int headerType = block.get();
if (headerType == 'H') {
pw.println(" store header at " + Long.toHexString(pos));
pw.println(" " + new String(block.array(), "UTF-8").trim());
pw.printf("%0" + len + "x fileHeader %s%n",
pos,
new String(block.array(), DataUtils.LATIN).trim());
pos += blockSize;
continue;
}
......@@ -87,7 +87,7 @@ public class MVStoreTool {
block.position(0);
Chunk c = Chunk.readChunkHeader(block, pos);
int length = c.len * MVStore.BLOCK_SIZE;
pw.println(" " + c.toString());
pw.printf("%n%0" + len + "x chunkHeader %s%n", pos, c.toString());
ByteBuffer chunk = ByteBuffer.allocate(length);
DataUtils.readFully(file, pos, chunk);
int p = block.position();
......@@ -95,62 +95,94 @@ public class MVStoreTool {
int remaining = c.pageCount;
while (remaining > 0) {
chunk.position(p);
int pageLength = chunk.getInt();
int pageSize = chunk.getInt();
// check value (ignored)
chunk.getShort();
int mapId = DataUtils.readVarInt(chunk);
int len = DataUtils.readVarInt(chunk);
int entries = DataUtils.readVarInt(chunk);
int type = chunk.get();
boolean compressed = (type & 2) != 0;
boolean node = (type & 1) != 0;
pw.println(
" map " + Integer.toHexString(mapId) +
" at " + Long.toHexString(p) + " " +
(node ? " node" : " leaf") +
(compressed ? " compressed" : "") +
" len: " + Integer.toHexString(pageLength) +
" entries: " + Integer.toHexString(len));
p += pageLength;
pw.printf(
"+%0" + len + "x %s, map %x, %d entries, %d bytes%n",
p,
(node ? "node" : "leaf") +
(compressed ? " compressed" : ""),
mapId,
node ? entries + 1 : entries,
pageSize);
p += pageSize;
remaining--;
if (mapId == 0 && !compressed) {
String[] keys = new String[len];
for (int i = 0; i < len; i++) {
if (compressed) {
continue;
}
String[] keys = new String[entries];
long[] children = null;
long[] counts = null;
if (node) {
children = new long[entries + 1];
for (int i = 0; i <= entries; i++) {
children[i] = chunk.getLong();
}
counts = new long[entries + 1];
for (int i = 0; i <= entries; i++) {
long s = DataUtils.readVarLong(chunk);
counts[i] = s;
}
}
if (mapId == 0) {
for (int i = 0; i < entries; i++) {
String k = StringDataType.INSTANCE.read(chunk);
keys[i] = k;
}
if (node) {
long[] children = new long[len + 1];
for (int i = 0; i <= len; i++) {
children[i] = chunk.getLong();
// meta map node
for (int i = 0; i < entries; i++) {
long cp = children[i];
pw.printf(" %d children < %s @ chunk %x +%0" + len + "x%n",
counts[i],
keys[i],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
}
long[] counts = new long[len + 1];
for (int i = 0; i <= len; i++) {
long s = DataUtils.readVarLong(chunk);
counts[i] = s;
}
for (int i = 0; i < len; i++) {
pw.println(" < " + keys[i] + ": " +
counts[i] + " -> " + getPosString(children[i]));
}
pw.println(" >= : " +
counts[len] + " -> " + getPosString(children[len]));
long cp = children[entries];
pw.printf(" %d children >= %s @ chunk %x +%0" + len + "x%n",
counts[entries],
keys[entries],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
} else {
// meta map leaf
String[] values = new String[len];
for (int i = 0; i < len; i++) {
String[] values = new String[entries];
for (int i = 0; i < entries; i++) {
String v = StringDataType.INSTANCE.read(chunk);
values[i] = v;
}
for (int i = 0; i < len; i++) {
pw.println(" " + keys[i] + "=" + values[i]);
for (int i = 0; i < entries; i++) {
pw.println(" " + keys[i] + " = " + values[i]);
}
}
}
} else {
if (node) {
for (int i = 0; i <= entries; i++) {
long cp = children[i];
pw.printf(" %d children @ chunk %x +%0" + len + "x%n",
counts[i],
DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp));
}
}
}
}
chunk.position(chunk.limit() - Chunk.FOOTER_LENGTH);
pw.println(" chunk footer");
pw.println(" " + new String(chunk.array(), chunk.position(), Chunk.FOOTER_LENGTH, "UTF-8").trim());
int footerPos = chunk.limit() - Chunk.FOOTER_LENGTH;
chunk.position(footerPos);
pw.printf(
"+%0" + len + "x chunkFooter %s%n",
footerPos,
new String(chunk.array(), chunk.position(),
Chunk.FOOTER_LENGTH, DataUtils.LATIN).trim());
}
pw.printf("%n%0" + len + "x eof%n", fileSize);
} catch (IOException e) {
pw.println("ERROR: " + e);
e.printStackTrace(pw);
......@@ -163,15 +195,7 @@ public class MVStoreTool {
}
}
}
pw.println();
pw.flush();
}
private static String getPosString(long pos) {
return "pos " + Long.toHexString(pos) +
", chunk " + Integer.toHexString(DataUtils.getPageChunkId(pos)) +
", offset " + Integer.toHexString(DataUtils.getPageOffset(pos));
}
}
......@@ -178,7 +178,7 @@ public class Page {
long pos, long filePos, long fileSize) {
ByteBuffer buff;
int maxLength = DataUtils.getPageMaxLength(pos);
if (maxLength == Integer.MAX_VALUE) {
if (maxLength == DataUtils.PAGE_LARGE) {
buff = fileStore.readFully(filePos, 128);
maxLength = buff.getInt();
// read the first bytes again
......@@ -758,7 +758,6 @@ public class Page {
buff = ByteBuffer.allocate(l);
compressor.expand(comp, 0, compLen, buff.array(), buff.arrayOffset(), l);
}
map.getKeyType().read(buff, keys, len, true);
if (node) {
childCount = len + 1;
children = new long[len + 1];
......@@ -774,7 +773,9 @@ public class Page {
counts[i] = s;
}
totalCount = total;
} else {
}
map.getKeyType().read(buff, keys, len, true);
if (!node) {
values = new Object[len];
map.getValueType().read(buff, values, len, false);
totalCount = len;
......@@ -799,7 +800,6 @@ public class Page {
putVarInt(len).
put((byte) type);
int compressStart = buff.position();
map.getKeyType().write(buff, keys, len, true);
if (type == DataUtils.PAGE_TYPE_NODE) {
for (int i = 0; i <= len; i++) {
buff.putLong(children[i]);
......@@ -807,7 +807,9 @@ public class Page {
for (int i = 0; i <= len; i++) {
buff.putVarLong(counts[i]);
}
} else {
}
map.getKeyType().write(buff, keys, len, true);
if (type == DataUtils.PAGE_TYPE_LEAF) {
map.getValueType().write(buff, values, len, false);
}
MVStore store = map.getStore();
......@@ -840,7 +842,7 @@ public class Page {
pos = DataUtils.getPagePos(chunkId, start, pageLength, type);
store.cachePage(pos, this, getMemory());
long max = DataUtils.getPageMaxLength(pos);
chunk.maxLength += max;
chunk.maxLen += max;
chunk.maxLenLive += max;
chunk.pageCount++;
chunk.pageCountLive++;
......
......@@ -322,7 +322,7 @@ public class MVTableEngine implements TableEngine {
}
} catch (IllegalStateException e) {
throw DbException.get(ErrorCode.IO_EXCEPTION_1, e, "Closing");
}
}
}
/**
......
......@@ -1508,14 +1508,14 @@ public class TransactionStore {
}
return Long.signum(comp);
}
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
......@@ -1591,14 +1591,14 @@ public class TransactionStore {
}
return 0;
}
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
......
......@@ -143,14 +143,14 @@ public class ValueDataType implements DataType {
private static int getMemory(Value v) {
return v == null ? 0 : v.getMemory();
}
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
......
......@@ -52,14 +52,14 @@ public class SpatialDataType implements DataType {
public int getMemory(Object obj) {
return 40 + dimensions * 4;
}
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
......
......@@ -40,7 +40,7 @@ public interface DataType {
* @param obj the value
*/
void write(WriteBuffer buff, Object obj);
/**
* Write a list of objects.
*
......@@ -58,7 +58,7 @@ public interface DataType {
* @return the object
*/
Object read(ByteBuffer buff);
/**
* Read a list of objects.
*
......
......@@ -116,7 +116,7 @@ public class ObjectDataType implements DataType {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
......@@ -128,7 +128,7 @@ public class ObjectDataType implements DataType {
public void write(WriteBuffer buff, Object obj) {
last.write(buff, obj);
}
private AutoDetectDataType newType(int typeId) {
switch (typeId) {
case TYPE_NULL:
......@@ -270,7 +270,7 @@ public class ObjectDataType implements DataType {
/**
* Switch the last remembered type to match the type of the given object.
*
*
* @param obj the object
* @return the auto-detected type used
*/
......@@ -285,7 +285,7 @@ public class ObjectDataType implements DataType {
/**
* Check whether this object is a BigInteger.
*
*
* @param obj the object
* @return true if yes
*/
......@@ -295,7 +295,7 @@ public class ObjectDataType implements DataType {
/**
* Check whether this object is a BigDecimal.
*
*
* @param obj the object
* @return true if yes
*/
......@@ -305,7 +305,7 @@ public class ObjectDataType implements DataType {
/**
* Check whether this object is a date.
*
*
* @param obj the object
* @return true if yes
*/
......@@ -315,7 +315,7 @@ public class ObjectDataType implements DataType {
/**
* Check whether this object is an array.
*
*
* @param obj the object
* @return true if yes
*/
......@@ -325,7 +325,7 @@ public class ObjectDataType implements DataType {
/**
* Get the class id, or null if not found.
*
*
* @param clazz the class
* @return the class id or null
*/
......@@ -342,7 +342,7 @@ public class ObjectDataType implements DataType {
/**
* Serialize the object to a byte array.
*
*
* @param obj the object to serialize
* @return the byte array
*/
......@@ -360,7 +360,7 @@ public class ObjectDataType implements DataType {
/**
* De-serialize the byte array to an object.
*
*
* @param data the byte array
* @return the object
*/
......@@ -382,7 +382,7 @@ public class ObjectDataType implements DataType {
* is returned. If the contents and lengths are the same, 0 is returned.
* <p>
* This method interprets bytes as unsigned.
*
*
* @param data1 the first byte array (must not be null)
* @param data2 the second byte array (must not be null)
* @return the result of the comparison (-1, 1 or 0)
......@@ -442,7 +442,7 @@ public class ObjectDataType implements DataType {
public void write(WriteBuffer buff, Object o) {
getType(o).write(buff, o);
}
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
......@@ -458,7 +458,7 @@ public class ObjectDataType implements DataType {
/**
* Get the type for the given object.
*
*
* @param o the object
* @return the type
*/
......@@ -468,7 +468,7 @@ public class ObjectDataType implements DataType {
/**
* Read an object from the buffer.
*
*
* @param buff the buffer
* @param tag the first byte of the object (usually the type)
* @return the read object
......
......@@ -26,14 +26,14 @@ public class StringDataType implements DataType {
public int getMemory(Object obj) {
return 24 + 2 * obj.toString().length();
}
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
......
......@@ -150,7 +150,7 @@
90126=Baza danych nie jest trwała
90127=Wynik nie jest uaktualnialny. Kwerenda musi wybrać wszystkie kolumny z klucza unikalnego. Tylko jedna tabela może zostać wybrana.
90128=Wynik nie jest typu SCROLLABLE i nie może być zresetowany. Być może powinieneś użyć conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ..).
90129=Transakcja {0} nie znaleziona
90129=Transakcja {0} nie znaleziona
90130=Ta metoda jest niedozwolona dla sparametryzowanych kwerend (prepared statement); użyj zwykłej kwerendy
90131=Jednoczesna zmiana w tabeli {0}: inna transakcja zaktualizowała lub usunęła ten sam wiersz
90132=Agregacja {0} nie znaleziona
......
......@@ -393,7 +393,7 @@ public class WebApp {
try {
tool.runTool(argList);
out.flush();
String o = new String(outBuff.toByteArray(), "UTF-8");
String o = new String(outBuff.toByteArray(), Constants.UTF8);
String result = PageParser.escapeHtml(o);
session.put("toolResult", result);
} catch (Exception e) {
......
......@@ -450,7 +450,7 @@ public class WebServer implements Service {
trace("translation: "+language);
byte[] trans = getFile("_text_"+language+".prop");
trace(" "+new String(trans));
text = SortedProperties.fromLines(new String(trans, "UTF-8"));
text = SortedProperties.fromLines(new String(trans, Constants.UTF8));
// remove starting # (if not translated yet)
for (Entry<Object, Object> entry : text.entrySet()) {
String value = (String) entry.getValue();
......
......@@ -197,7 +197,7 @@ public class Recover extends Tool implements DataHandler {
* INTERNAL
*/
public static Reader readClob(String fileName) throws IOException {
return new BufferedReader(new InputStreamReader(readBlob(fileName), "UTF-8"));
return new BufferedReader(new InputStreamReader(readBlob(fileName), Constants.UTF8));
}
/**
......@@ -273,7 +273,7 @@ public class Recover extends Tool implements DataHandler {
*/
public static Reader readClobMap(Connection conn, long lobId, long precision) throws Exception {
InputStream in = readBlobMap(conn, lobId, precision);
return new BufferedReader(new InputStreamReader(in, "UTF-8"));
return new BufferedReader(new InputStreamReader(in, Constants.UTF8));
}
private void trace(String message) {
......
......@@ -25,6 +25,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import org.h2.constant.ErrorCode;
import org.h2.constant.SysProperties;
import org.h2.engine.Constants;
import org.h2.message.DbException;
import org.h2.store.fs.FileUtils;
......@@ -312,7 +313,7 @@ public class SourceCompiler {
copyInThread(p.getInputStream(), buff);
copyInThread(p.getErrorStream(), buff);
p.waitFor();
String err = new String(buff.toByteArray(), "UTF-8");
String err = new String(buff.toByteArray(), Constants.UTF8);
throwSyntaxError(err);
return p.exitValue();
} catch (Exception e) {
......@@ -343,7 +344,7 @@ public class SourceCompiler {
"-d", COMPILE_DIR,
"-encoding", "UTF-8",
javaFile.getAbsolutePath() });
String err = new String(buff.toByteArray(), "UTF-8");
String err = new String(buff.toByteArray(), Constants.UTF8);
throwSyntaxError(err);
} catch (Exception e) {
throw DbException.convert(e);
......
......@@ -633,7 +633,7 @@ public class Transfer {
if (magic != LOB_MAGIC) {
throw DbException.get(ErrorCode.CONNECTION_BROKEN_1, "magic=" + magic);
}
byte[] small = new String(buff).getBytes("UTF-8");
byte[] small = new String(buff).getBytes(Constants.UTF8);
return ValueLobDb.createSmallLob(Value.CLOB, small, length);
}
Value v = session.getDataHandler().getLobStorage().createClob(new DataReader(in), length);
......
......@@ -103,7 +103,7 @@ public class TestCases extends TestBase {
testBinaryCollation();
deleteDb("cases");
}
private void testReferenceableIndexUsage() throws SQLException {
Connection conn = getConnection("cases");
Statement stat = conn.createStatement();
......
......@@ -404,10 +404,10 @@ public class TestOptimizations extends TestBase {
assertTrue(resultSet.next());
resultSet = stat.executeQuery("SELECT x FROM testValues WHERE x IN ('FOO','bar')");
assertTrue(resultSet.next());
conn.close();
}
private void testNestedInSelect() throws SQLException {
deleteDb("optimizations");
Connection conn = getConnection("optimizations");
......
......@@ -58,14 +58,14 @@ public class RowDataType implements DataType {
}
return memory;
}
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
......
......@@ -226,7 +226,7 @@ public class TestDataUtils extends TestBase {
// 1000... xor 0 = 1000...
assertEquals((short) (1 << 15), DataUtils.getCheckValue(1 << 31));
}
private void testParse() {
for (long i = -1; i != 0; i >>>= 1) {
String x = Long.toHexString(i);
......@@ -246,16 +246,16 @@ public class TestDataUtils extends TestBase {
assertEquals(0, DataUtils.PAGE_TYPE_LEAF);
assertEquals(1, DataUtils.PAGE_TYPE_NODE);
long max = DataUtils.getPagePos(Chunk.MAX_ID, Integer.MAX_VALUE,
long max = DataUtils.getPagePos(Chunk.MAX_ID, Integer.MAX_VALUE,
Integer.MAX_VALUE, DataUtils.PAGE_TYPE_NODE);
String hex = Long.toHexString(max);
assertEquals(max, DataUtils.parseHexLong(hex));
assertEquals(Chunk.MAX_ID, DataUtils.getPageChunkId(max));
assertEquals(Integer.MAX_VALUE, DataUtils.getPageOffset(max));
assertEquals(Integer.MAX_VALUE, DataUtils.getPageMaxLength(max));
assertEquals(DataUtils.PAGE_LARGE, DataUtils.getPageMaxLength(max));
assertEquals(DataUtils.PAGE_TYPE_NODE, DataUtils.getPageType(max));
long overflow = DataUtils.getPagePos(Chunk.MAX_ID + 1,
long overflow = DataUtils.getPagePos(Chunk.MAX_ID + 1,
Integer.MAX_VALUE, Integer.MAX_VALUE, DataUtils.PAGE_TYPE_NODE);
assertTrue(Chunk.MAX_ID + 1 != DataUtils.getPageChunkId(overflow));
......
......@@ -14,6 +14,7 @@ import java.util.Random;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicReference;
import org.h2.mvstore.Chunk;
import org.h2.mvstore.Cursor;
import org.h2.mvstore.DataUtils;
import org.h2.mvstore.FileStore;
......@@ -48,6 +49,8 @@ public class TestMVStore extends TestBase {
public void test() throws Exception {
FileUtils.deleteRecursive(getBaseDir(), true);
FileUtils.createDirectories(getBaseDir());
testFileFormatExample();
testMaxChunkLength();
testCacheInfo();
testRollback();
testVersionsToKeep();
......@@ -99,6 +102,37 @@ public class TestMVStore extends TestBase {
// longer running tests
testLargerThan2G();
}
private void testFileFormatExample() {
String fileName = getBaseDir() + "/testFileFormatExample.h3";
MVStore s = MVStore.open(fileName);
MVMap<Integer, String> map = s.openMap("data");
for (int i = 0; i < 400; i++) {
map.put(i, "Hello");
}
s.commit();
for (int i = 0; i < 100; i++) {
map.put(0, "Hi");
}
s.commit();
s.close();
// MVStoreTool.dump(fileName);
}
private void testMaxChunkLength() {
String fileName = getBaseDir() + "/testMaxChunkLength.h3";
MVStore s = new MVStore.Builder().fileName(fileName).open();
MVMap<Integer, byte[]> map = s.openMap("data");
map.put(0, new byte[2 * 1024 * 1024]);
s.commit();
map.put(1, new byte[10 * 1024]);
s.commit();
MVMap<String, String> meta = s.getMetaMap();
Chunk c = Chunk.fromString(meta.get("chunk.1"));
assertTrue(c.maxLen < Integer.MAX_VALUE);
assertTrue(c.maxLenLive < Integer.MAX_VALUE);
s.close();
}
private void testCacheInfo() {
String fileName = getBaseDir() + "/testCloseMap.h3";
......
......@@ -72,7 +72,7 @@ public class TestExit extends TestBase {
}
deleteDb("exit");
}
private String getURL(int action) {
String url = "";
switch (action) {
......
......@@ -590,7 +590,7 @@ public class TestFileSystem extends TestBase {
}
}
}
private void testPositionedReadWrite(String fsBase) throws IOException {
FileUtils.deleteRecursive(fsBase + "/testFile", false);
FileUtils.delete(fsBase + "/testFile");
......
......@@ -751,3 +751,5 @@ sameorigin nobuffer francois hikari duske phromros thailand kritchai mendonca
maginatics jdbclint lint lsm unmappable adams douglas definer invoker
fmrn fmxxx fmday fml syyyy tzd nov iyy iyyy fmc fmb fmxx tzr btc yyfxyy scc syear
overwrote though randomize readability datagram rsync mongodb divides crypto
predicted prediction wojtek hops jurczyk cbtree predict vast assumption upside
adjusted lastly sgtatham
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论