提交 10fa11b8 authored 作者: Thomas Mueller's avatar Thomas Mueller

Limit line length to 80 characters (comments) and 100 (code)

上级 9ade2f38
...@@ -138,7 +138,8 @@ public class RuleFixed implements Rule { ...@@ -138,7 +138,8 @@ public class RuleFixed implements Rule {
} }
break; break;
case AZ_UNDERSCORE: case AZ_UNDERSCORE:
if (s.length() > 0 && (Character.isLetter(s.charAt(0)) || s.charAt(0) == '_')) { if (s.length() > 0 &&
(Character.isLetter(s.charAt(0)) || s.charAt(0) == '_')) {
s = s.substring(1); s = s.substring(1);
} }
if (s.length() == 0) { if (s.length() == 0) {
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.expression; package org.h2.expression;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.expression; package org.h2.expression;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.expression; package org.h2.expression;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.expression; package org.h2.expression;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.index; package org.h2.index;
......
...@@ -284,7 +284,8 @@ public class DataUtils { ...@@ -284,7 +284,8 @@ public class DataUtils {
* @param len the number of characters * @param len the number of characters
* @return the byte buffer * @return the byte buffer
*/ */
public static ByteBuffer writeStringData(ByteBuffer buff, String s, int len) { public static ByteBuffer writeStringData(ByteBuffer buff,
String s, int len) {
buff = DataUtils.ensureCapacity(buff, 3 * len); buff = DataUtils.ensureCapacity(buff, 3 * len);
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
int c = s.charAt(i); int c = s.charAt(i);
...@@ -345,7 +346,8 @@ public class DataUtils { ...@@ -345,7 +346,8 @@ public class DataUtils {
* @param out the output stream * @param out the output stream
* @param x the value * @param x the value
*/ */
public static void writeVarLong(OutputStream out, long x) throws IOException { public static void writeVarLong(OutputStream out, long x)
throws IOException {
while ((x & ~0x7f) != 0) { while ((x & ~0x7f) != 0) {
out.write((byte) (0x80 | (x & 0x7f))); out.write((byte) (0x80 | (x & 0x7f)));
x >>>= 7; x >>>= 7;
...@@ -418,7 +420,8 @@ public class DataUtils { ...@@ -418,7 +420,8 @@ public class DataUtils {
} }
throw newIllegalStateException( throw newIllegalStateException(
ERROR_READING_FAILED, ERROR_READING_FAILED,
"Reading from {0} failed; file length {1} read length {2} at {3}", "Reading from {0} failed; file length {1} " +
"read length {2} at {3}",
file, size, dst.remaining(), pos, e); file, size, dst.remaining(), pos, e);
} }
} }
...@@ -534,7 +537,8 @@ public class DataUtils { ...@@ -534,7 +537,8 @@ public class DataUtils {
* @param type the page type (1 for node, 0 for leaf) * @param type the page type (1 for node, 0 for leaf)
* @return the position * @return the position
*/ */
public static long getPagePos(int chunkId, int offset, int length, int type) { public static long getPagePos(int chunkId, int offset,
int length, int type) {
long pos = (long) chunkId << 38; long pos = (long) chunkId << 38;
pos |= (long) offset << 6; pos |= (long) offset << 6;
pos |= encodeLength(length) << 1; pos |= encodeLength(length) << 1;
...@@ -712,8 +716,8 @@ public class DataUtils { ...@@ -712,8 +716,8 @@ public class DataUtils {
* @param message the message * @param message the message
* @return the exception * @return the exception
*/ */
public static UnsupportedOperationException newUnsupportedOperationException( public static UnsupportedOperationException
String message) { newUnsupportedOperationException(String message) {
return new UnsupportedOperationException(formatMessage(0, message)); return new UnsupportedOperationException(formatMessage(0, message));
} }
......
...@@ -148,11 +148,13 @@ public class FileStore { ...@@ -148,11 +148,13 @@ public class FileStore {
} }
} catch (OverlappingFileLockException e) { } catch (OverlappingFileLockException e) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_LOCKED, "The file is locked: {0}", fileName, e); DataUtils.ERROR_FILE_LOCKED,
"The file is locked: {0}", fileName, e);
} }
if (fileLock == null) { if (fileLock == null) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_LOCKED, "The file is locked: {0}", fileName); DataUtils.ERROR_FILE_LOCKED,
"The file is locked: {0}", fileName);
} }
fileSize = file.size(); fileSize = file.size();
} catch (IOException e) { } catch (IOException e) {
......
...@@ -930,7 +930,8 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -930,7 +930,8 @@ public class MVMap<K, V> extends AbstractMap<K, V>
protected void waitUntilWritten(long version) { protected void waitUntilWritten(long version) {
if (readOnly) { if (readOnly) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_INTERNAL, "Waiting for writes to a read-only map"); DataUtils.ERROR_INTERNAL,
"Waiting for writes to a read-only map");
} }
while (currentWriteVersion == version) { while (currentWriteVersion == version) {
Thread.yield(); Thread.yield();
...@@ -997,7 +998,8 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -997,7 +998,8 @@ public class MVMap<K, V> extends AbstractMap<K, V>
public MVMap<K, V> openVersion(long version) { public MVMap<K, V> openVersion(long version) {
if (readOnly) { if (readOnly) {
throw DataUtils.newUnsupportedOperationException( throw DataUtils.newUnsupportedOperationException(
"This map is read-only; need to call the method on the writable map"); "This map is read-only; need to call " +
"the method on the writable map");
} }
DataUtils.checkArgument(version >= createVersion, DataUtils.checkArgument(version >= createVersion,
"Unknown version {0}; this map was created in version is {1}", "Unknown version {0}; this map was created in version is {1}",
......
...@@ -164,7 +164,8 @@ public class MVStore { ...@@ -164,7 +164,8 @@ public class MVStore {
* is the unsaved version, the value is the map of chunks. The maps contains * is the unsaved version, the value is the map of chunks. The maps contains
* the number of freed entries per chunk. Access is synchronized. * the number of freed entries per chunk. Access is synchronized.
*/ */
private final ConcurrentHashMap<Long, HashMap<Integer, Chunk>> freedPageSpace = private final ConcurrentHashMap<Long,
HashMap<Integer, Chunk>> freedPageSpace =
new ConcurrentHashMap<Long, HashMap<Integer, Chunk>>(); new ConcurrentHashMap<Long, HashMap<Integer, Chunk>>();
/** /**
...@@ -508,9 +509,11 @@ public class MVStore { ...@@ -508,9 +509,11 @@ public class MVStore {
fileHeaderBlocks.get(buff); fileHeaderBlocks.get(buff);
// the following can fail for various reasons // the following can fail for various reasons
try { try {
String s = new String(buff, 0, BLOCK_SIZE, DataUtils.LATIN).trim(); String s = new String(buff, 0, BLOCK_SIZE,
DataUtils.LATIN).trim();
HashMap<String, String> m = DataUtils.parseMap(s); HashMap<String, String> m = DataUtils.parseMap(s);
int blockSize = DataUtils.readHexInt(m, "blockSize", BLOCK_SIZE); int blockSize = DataUtils.readHexInt(
m, "blockSize", BLOCK_SIZE);
if (blockSize != BLOCK_SIZE) { if (blockSize != BLOCK_SIZE) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_UNSUPPORTED_FORMAT, DataUtils.ERROR_UNSUPPORTED_FORMAT,
...@@ -521,7 +524,8 @@ public class MVStore { ...@@ -521,7 +524,8 @@ public class MVStore {
m.remove("fletcher"); m.remove("fletcher");
s = s.substring(0, s.lastIndexOf("fletcher") - 1); s = s.substring(0, s.lastIndexOf("fletcher") - 1);
byte[] bytes = s.getBytes(DataUtils.LATIN); byte[] bytes = s.getBytes(DataUtils.LATIN);
int checksum = DataUtils.getFletcher32(bytes, bytes.length / 2 * 2); int checksum = DataUtils.getFletcher32(bytes,
bytes.length / 2 * 2);
if (check != checksum) { if (check != checksum) {
continue; continue;
} }
...@@ -546,7 +550,8 @@ public class MVStore { ...@@ -546,7 +550,8 @@ public class MVStore {
if (format > FORMAT_WRITE && !fileStore.isReadOnly()) { if (format > FORMAT_WRITE && !fileStore.isReadOnly()) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_UNSUPPORTED_FORMAT, DataUtils.ERROR_UNSUPPORTED_FORMAT,
"The write format {0} is larger than the supported format {1}, " + "The write format {0} is larger " +
"than the supported format {1}, " +
"and the file was not opened in read-only mode", "and the file was not opened in read-only mode",
format, FORMAT_WRITE); format, FORMAT_WRITE);
} }
...@@ -554,7 +559,8 @@ public class MVStore { ...@@ -554,7 +559,8 @@ public class MVStore {
if (format > FORMAT_READ) { if (format > FORMAT_READ) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_UNSUPPORTED_FORMAT, DataUtils.ERROR_UNSUPPORTED_FORMAT,
"The read format {0} is larger than the supported format {1}", "The read format {0} is larger " +
"than the supported format {1}",
format, FORMAT_READ); format, FORMAT_READ);
} }
lastStoredVersion = -1; lastStoredVersion = -1;
...@@ -610,7 +616,8 @@ public class MVStore { ...@@ -610,7 +616,8 @@ public class MVStore {
} }
lastChunk = header; lastChunk = header;
newestVersion = header.version; newestVersion = header.version;
if (header.next == 0 || header.next >= fileStore.size() / BLOCK_SIZE) { if (header.next == 0 ||
header.next >= fileStore.size() / BLOCK_SIZE) {
// no (valid) next // no (valid) next
break; break;
} }
...@@ -1018,7 +1025,8 @@ public class MVStore { ...@@ -1018,7 +1025,8 @@ public class MVStore {
// calculate and set the likely next position // calculate and set the likely next position
if (reuseSpace) { if (reuseSpace) {
int predictBlocks = c.len; int predictBlocks = c.len;
long predictedNextStart = fileStore.allocate(predictBlocks * BLOCK_SIZE); long predictedNextStart = fileStore.allocate(
predictBlocks * BLOCK_SIZE);
fileStore.free(predictedNextStart, predictBlocks * BLOCK_SIZE); fileStore.free(predictedNextStart, predictBlocks * BLOCK_SIZE);
c.next = predictedNextStart / BLOCK_SIZE; c.next = predictedNextStart / BLOCK_SIZE;
} else { } else {
...@@ -1045,7 +1053,8 @@ public class MVStore { ...@@ -1045,7 +1053,8 @@ public class MVStore {
// the last prediction did not matched // the last prediction did not matched
needHeader = true; needHeader = true;
} else { } else {
long headerVersion = DataUtils.readHexLong(fileHeader, "version", 0); long headerVersion = DataUtils.readHexLong(
fileHeader, "version", 0);
if (lastChunk.version - headerVersion > 20) { if (lastChunk.version - headerVersion > 20) {
// we write after at least 20 entries // we write after at least 20 entries
needHeader = true; needHeader = true;
...@@ -1531,7 +1540,8 @@ public class MVStore { ...@@ -1531,7 +1540,8 @@ public class MVStore {
int pageLength = buff.getInt(); int pageLength = buff.getInt();
if (pageLength <= 0) { if (pageLength <= 0) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_FILE_CORRUPT, "Page length {0}", pageLength); DataUtils.ERROR_FILE_CORRUPT,
"Page length {0}", pageLength);
} }
buff.getShort(); buff.getShort();
int mapId = DataUtils.readVarInt(buff); int mapId = DataUtils.readVarInt(buff);
...@@ -1789,7 +1799,8 @@ public class MVStore { ...@@ -1789,7 +1799,8 @@ public class MVStore {
if (oldMeta == null) { if (oldMeta == null) {
return false; return false;
} }
for (Iterator<String> it = oldMeta.keyIterator("chunk."); it.hasNext();) { for (Iterator<String> it = oldMeta.keyIterator("chunk.");
it.hasNext();) {
String chunkKey = it.next(); String chunkKey = it.next();
if (!chunkKey.startsWith("chunk.")) { if (!chunkKey.startsWith("chunk.")) {
break; break;
...@@ -1968,7 +1979,8 @@ public class MVStore { ...@@ -1968,7 +1979,8 @@ public class MVStore {
} }
private void revertTemp(long storeVersion) { private void revertTemp(long storeVersion) {
for (Iterator<Long> it = freedPageSpace.keySet().iterator(); it.hasNext();) { for (Iterator<Long> it = freedPageSpace.keySet().iterator();
it.hasNext();) {
long v = it.next(); long v = it.next();
if (v > storeVersion) { if (v > storeVersion) {
continue; continue;
......
...@@ -87,7 +87,8 @@ public class MVStoreTool { ...@@ -87,7 +87,8 @@ public class MVStoreTool {
block.position(0); block.position(0);
Chunk c = Chunk.readChunkHeader(block, pos); Chunk c = Chunk.readChunkHeader(block, pos);
int length = c.len * MVStore.BLOCK_SIZE; int length = c.len * MVStore.BLOCK_SIZE;
pw.printf("%n%0" + len + "x chunkHeader %s%n", pos, c.toString()); pw.printf("%n%0" + len + "x chunkHeader %s%n",
pos, c.toString());
ByteBuffer chunk = ByteBuffer.allocate(length); ByteBuffer chunk = ByteBuffer.allocate(length);
DataUtils.readFully(file, pos, chunk); DataUtils.readFully(file, pos, chunk);
int p = block.position(); int p = block.position();
...@@ -104,7 +105,8 @@ public class MVStoreTool { ...@@ -104,7 +105,8 @@ public class MVStoreTool {
boolean compressed = (type & 2) != 0; boolean compressed = (type & 2) != 0;
boolean node = (type & 1) != 0; boolean node = (type & 1) != 0;
pw.printf( pw.printf(
"+%0" + len + "x %s, map %x, %d entries, %d bytes%n", "+%0" + len +
"x %s, map %x, %d entries, %d bytes%n",
p, p,
(node ? "node" : "leaf") + (node ? "node" : "leaf") +
(compressed ? " compressed" : ""), (compressed ? " compressed" : ""),
...@@ -138,14 +140,17 @@ public class MVStoreTool { ...@@ -138,14 +140,17 @@ public class MVStoreTool {
// meta map node // meta map node
for (int i = 0; i < entries; i++) { for (int i = 0; i < entries; i++) {
long cp = children[i]; long cp = children[i];
pw.printf(" %d children < %s @ chunk %x +%0" + len + "x%n", pw.printf(" %d children < %s @ " +
"chunk %x +%0" +
len + "x%n",
counts[i], counts[i],
keys[i], keys[i],
DataUtils.getPageChunkId(cp), DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp)); DataUtils.getPageOffset(cp));
} }
long cp = children[entries]; long cp = children[entries];
pw.printf(" %d children >= %s @ chunk %x +%0" + len + "x%n", pw.printf(" %d children >= %s @ chunk %x +%0" +
len + "x%n",
counts[entries], counts[entries],
keys[entries], keys[entries],
DataUtils.getPageChunkId(cp), DataUtils.getPageChunkId(cp),
...@@ -158,14 +163,16 @@ public class MVStoreTool { ...@@ -158,14 +163,16 @@ public class MVStoreTool {
values[i] = v; values[i] = v;
} }
for (int i = 0; i < entries; i++) { for (int i = 0; i < entries; i++) {
pw.println(" " + keys[i] + " = " + values[i]); pw.println(" " + keys[i] +
" = " + values[i]);
} }
} }
} else { } else {
if (node) { if (node) {
for (int i = 0; i <= entries; i++) { for (int i = 0; i <= entries; i++) {
long cp = children[i]; long cp = children[i];
pw.printf(" %d children @ chunk %x +%0" + len + "x%n", pw.printf(" %d children @ chunk %x +%0" +
len + "x%n",
counts[i], counts[i],
DataUtils.getPageChunkId(cp), DataUtils.getPageChunkId(cp),
DataUtils.getPageOffset(cp)); DataUtils.getPageOffset(cp));
......
...@@ -34,7 +34,8 @@ public class OffHeapStore extends FileStore { ...@@ -34,7 +34,8 @@ public class OffHeapStore extends FileStore {
public ByteBuffer readFully(long pos, int len) { public ByteBuffer readFully(long pos, int len) {
Entry<Long, ByteBuffer> memEntry = memory.floorEntry(pos); Entry<Long, ByteBuffer> memEntry = memory.floorEntry(pos);
if (memEntry == null) { if (memEntry == null) {
throw DataUtils.newIllegalStateException(DataUtils.ERROR_READING_FAILED, throw DataUtils.newIllegalStateException(
DataUtils.ERROR_READING_FAILED,
"Could not read from position {0}", pos); "Could not read from position {0}", pos);
} }
readCount++; readCount++;
...@@ -53,7 +54,8 @@ public class OffHeapStore extends FileStore { ...@@ -53,7 +54,8 @@ public class OffHeapStore extends FileStore {
if (buff == null) { if (buff == null) {
// nothing was written (just allocated) // nothing was written (just allocated)
} else if (buff.remaining() != length) { } else if (buff.remaining() != length) {
throw DataUtils.newIllegalStateException(DataUtils.ERROR_READING_FAILED, throw DataUtils.newIllegalStateException(
DataUtils.ERROR_READING_FAILED,
"Partial remove is not supported at position {0}", pos); "Partial remove is not supported at position {0}", pos);
} }
} }
...@@ -117,8 +119,10 @@ public class OffHeapStore extends FileStore { ...@@ -117,8 +119,10 @@ public class OffHeapStore extends FileStore {
} }
ByteBuffer buff = memory.get(pos); ByteBuffer buff = memory.get(pos);
if (buff.capacity() > size) { if (buff.capacity() > size) {
throw DataUtils.newIllegalStateException(DataUtils.ERROR_READING_FAILED, throw DataUtils.newIllegalStateException(
"Could not truncate to {0}; partial truncate is not supported", pos); DataUtils.ERROR_READING_FAILED,
"Could not truncate to {0}; " +
"partial truncate is not supported", pos);
} }
it.remove(); it.remove();
} }
......
...@@ -138,10 +138,10 @@ public class Page { ...@@ -138,10 +138,10 @@ public class Page {
* @param memory the memory used in bytes * @param memory the memory used in bytes
* @return the page * @return the page
*/ */
public static Page create(MVMap<?, ?> map, long version, public static Page create(MVMap<?, ?> map, long version, int keyCount,
int keyCount, Object[] keys, Object[] values, Object[] keys, Object[] values, int childCount, long[] children,
int childCount, long[] children, Page[] childrenPages, long[] counts, Page[] childrenPages, long[] counts, long totalCount,
long totalCount, int sharedFlags, int memory) { int sharedFlags, int memory) {
Page p = new Page(map, version); Page p = new Page(map, version);
// the position is 0 // the position is 0
p.keyCount = keyCount; p.keyCount = keyCount;
...@@ -479,7 +479,8 @@ public class Page { ...@@ -479,7 +479,8 @@ public class Page {
if (c != childrenPages[index] || c.getPos() != children[index]) { if (c != childrenPages[index] || c.getPos() != children[index]) {
if ((sharedFlags & SHARED_CHILDREN) != 0) { if ((sharedFlags & SHARED_CHILDREN) != 0) {
children = Arrays.copyOf(children, children.length); children = Arrays.copyOf(children, children.length);
childrenPages = Arrays.copyOf(childrenPages, childrenPages.length); childrenPages = Arrays.copyOf(childrenPages,
childrenPages.length);
sharedFlags &= ~SHARED_CHILDREN; sharedFlags &= ~SHARED_CHILDREN;
} }
children[index] = c.getPos(); children[index] = c.getPos();
...@@ -590,8 +591,10 @@ public class Page { ...@@ -590,8 +591,10 @@ public class Page {
public void insertLeaf(int index, Object key, Object value) { public void insertLeaf(int index, Object key, Object value) {
if (((sharedFlags & SHARED_KEYS) == 0) && keys.length > keyCount + 1) { if (((sharedFlags & SHARED_KEYS) == 0) && keys.length > keyCount + 1) {
if (index < keyCount) { if (index < keyCount) {
System.arraycopy(keys, index, keys, index + 1, keyCount - index); System.arraycopy(keys, index, keys, index + 1,
System.arraycopy(values, index, values, index + 1, keyCount - index); keyCount - index);
System.arraycopy(values, index, values, index + 1,
keyCount - index);
} }
} else { } else {
int len = keyCount + 6; int len = keyCount + 6;
...@@ -633,7 +636,8 @@ public class Page { ...@@ -633,7 +636,8 @@ public class Page {
children = newChildren; children = newChildren;
Page[] newChildrenPages = new Page[childCount + 1]; Page[] newChildrenPages = new Page[childCount + 1];
DataUtils.copyWithGap(childrenPages, newChildrenPages, childCount, index); DataUtils.copyWithGap(childrenPages, newChildrenPages, childCount,
index);
newChildrenPages[index] = childPage; newChildrenPages[index] = childPage;
childrenPages = newChildrenPages; childrenPages = newChildrenPages;
...@@ -659,9 +663,11 @@ public class Page { ...@@ -659,9 +663,11 @@ public class Page {
int keyIndex = index >= keyCount ? index - 1 : index; int keyIndex = index >= keyCount ? index - 1 : index;
Object old = keys[keyIndex]; Object old = keys[keyIndex];
addMemory(-map.getKeyType().getMemory(old)); addMemory(-map.getKeyType().getMemory(old));
if ((sharedFlags & SHARED_KEYS) == 0 && keys.length > keyCount - 4) { if ((sharedFlags & SHARED_KEYS) == 0 &&
keys.length > keyCount - 4) {
if (keyIndex < keyCount - 1) { if (keyIndex < keyCount - 1) {
System.arraycopy(keys, keyIndex + 1, keys, keyIndex, keyCount - keyIndex - 1); System.arraycopy(keys, keyIndex + 1, keys, keyIndex, keyCount -
keyIndex - 1);
} }
keys[keyCount - 1] = null; keys[keyCount - 1] = null;
} else { } else {
...@@ -674,9 +680,11 @@ public class Page { ...@@ -674,9 +680,11 @@ public class Page {
if (values != null) { if (values != null) {
old = values[index]; old = values[index];
addMemory(-map.getValueType().getMemory(old)); addMemory(-map.getValueType().getMemory(old));
if ((sharedFlags & SHARED_VALUES) == 0 && values.length > keyCount - 4) { if ((sharedFlags & SHARED_VALUES) == 0 &&
values.length > keyCount - 4) {
if (index < keyCount - 1) { if (index < keyCount - 1) {
System.arraycopy(values, index + 1, values, index, keyCount - index - 1); System.arraycopy(values, index + 1, values, index,
keyCount - index - 1);
} }
values[keyCount - 1] = null; values[keyCount - 1] = null;
} else { } else {
...@@ -776,7 +784,8 @@ public class Page { ...@@ -776,7 +784,8 @@ public class Page {
buff.get(comp); buff.get(comp);
int l = compLen + lenAdd; int l = compLen + lenAdd;
buff = ByteBuffer.allocate(l); buff = ByteBuffer.allocate(l);
compressor.expand(comp, 0, compLen, buff.array(), buff.arrayOffset(), l); compressor.expand(comp, 0, compLen, buff.array(),
buff.arrayOffset(), l);
} }
map.getKeyType().read(buff, keys, len, true); map.getKeyType().read(buff, keys, len, true);
if (!node) { if (!node) {
......
...@@ -164,7 +164,8 @@ public class StreamStore { ...@@ -164,7 +164,8 @@ public class StreamStore {
return eof; return eof;
} }
private static byte[] read(InputStream in, byte[] target) throws IOException { private static byte[] read(InputStream in, byte[] target)
throws IOException {
int copied = 0; int copied = 0;
int remaining = target.length; int remaining = target.length;
while (remaining > 0) { while (remaining > 0) {
...@@ -470,7 +471,8 @@ public class StreamStore { ...@@ -470,7 +471,8 @@ public class StreamStore {
} }
default: default:
throw DataUtils.newIllegalArgumentException( throw DataUtils.newIllegalArgumentException(
"Unsupported id {0}", Arrays.toString(idBuffer.array())); "Unsupported id {0}",
Arrays.toString(idBuffer.array()));
} }
} }
return null; return null;
......
...@@ -94,7 +94,8 @@ public class CacheLongKeyLIRS<V> { ...@@ -94,7 +94,8 @@ public class CacheLongKeyLIRS<V> {
this.stackMoveDistance = stackMoveDistance; this.stackMoveDistance = stackMoveDistance;
segments = new Segment[segmentCount]; segments = new Segment[segmentCount];
clear(); clear();
this.segmentShift = Integer.numberOfTrailingZeros(segments[0].entries.length); this.segmentShift = Integer.numberOfTrailingZeros(
segments[0].entries.length);
} }
/** /**
...@@ -929,11 +930,13 @@ public class CacheLongKeyLIRS<V> { ...@@ -929,11 +930,13 @@ public class CacheLongKeyLIRS<V> {
ArrayList<Long> keys = new ArrayList<Long>(); ArrayList<Long> keys = new ArrayList<Long>();
if (cold) { if (cold) {
Entry<V> start = nonResident ? queue2 : queue; Entry<V> start = nonResident ? queue2 : queue;
for (Entry<V> e = start.queueNext; e != start; e = e.queueNext) { for (Entry<V> e = start.queueNext; e != start;
e = e.queueNext) {
keys.add(e.key); keys.add(e.key);
} }
} else { } else {
for (Entry<V> e = stack.stackNext; e != stack; e = e.stackNext) { for (Entry<V> e = stack.stackNext; e != stack;
e = e.stackNext) {
keys.add(e.key); keys.add(e.key);
} }
} }
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.mvstore.db; package org.h2.mvstore.db;
......
...@@ -177,7 +177,9 @@ public class MVTable extends TableBase { ...@@ -177,7 +177,9 @@ public class MVTable extends TableBase {
if (checkDeadlock) { if (checkDeadlock) {
ArrayList<Session> sessions = checkDeadlock(session, null, null); ArrayList<Session> sessions = checkDeadlock(session, null, null);
if (sessions != null) { if (sessions != null) {
throw DbException.get(ErrorCode.DEADLOCK_1, getDeadlockDetails(sessions)); throw DbException.get(
ErrorCode.DEADLOCK_1,
getDeadlockDetails(sessions));
} }
} else { } else {
// check for deadlocks from now on // check for deadlocks from now on
...@@ -188,7 +190,8 @@ public class MVTable extends TableBase { ...@@ -188,7 +190,8 @@ public class MVTable extends TableBase {
// try at least one more time // try at least one more time
max = now + session.getLockTimeout(); max = now + session.getLockTimeout();
} else if (now >= max) { } else if (now >= max) {
traceLock(session, exclusive, "timeout after " + session.getLockTimeout()); traceLock(session, exclusive,
"timeout after " + session.getLockTimeout());
throw DbException.get(ErrorCode.LOCK_TIMEOUT_1, getName()); throw DbException.get(ErrorCode.LOCK_TIMEOUT_1, getName());
} }
try { try {
...@@ -299,7 +302,8 @@ public class MVTable extends TableBase { ...@@ -299,7 +302,8 @@ public class MVTable extends TableBase {
private void traceLock(Session session, boolean exclusive, String s) { private void traceLock(Session session, boolean exclusive, String s) {
if (traceLock.isDebugEnabled()) { if (traceLock.isDebugEnabled()) {
traceLock.debug("{0} {1} {2} {3}", session.getId(), traceLock.debug("{0} {1} {2} {3}", session.getId(),
exclusive ? "exclusive write lock" : "shared read lock", s, getName()); exclusive ? "exclusive write lock" : "shared read lock",
s, getName());
} }
} }
...@@ -612,10 +616,13 @@ public class MVTable extends TableBase { ...@@ -612,10 +616,13 @@ public class MVTable extends TableBase {
if (de.getErrorCode() == ErrorCode.DUPLICATE_KEY_1) { if (de.getErrorCode() == ErrorCode.DUPLICATE_KEY_1) {
for (int j = 0; j < indexes.size(); j++) { for (int j = 0; j < indexes.size(); j++) {
Index index = indexes.get(j); Index index = indexes.get(j);
if (index.getIndexType().isUnique() && index instanceof MultiVersionIndex) { if (index.getIndexType().isUnique() &&
index instanceof MultiVersionIndex) {
MultiVersionIndex mv = (MultiVersionIndex) index; MultiVersionIndex mv = (MultiVersionIndex) index;
if (mv.isUncommittedFromOtherSession(session, row)) { if (mv.isUncommittedFromOtherSession(session, row)) {
throw DbException.get(ErrorCode.CONCURRENT_UPDATE_1, index.getName()); throw DbException.get(
ErrorCode.CONCURRENT_UPDATE_1,
index.getName());
} }
} }
} }
...@@ -697,7 +704,8 @@ public class MVTable extends TableBase { ...@@ -697,7 +704,8 @@ public class MVTable extends TableBase {
} }
database.getMvStore().removeTable(this); database.getMvStore().removeTable(this);
super.removeChildrenAndResources(session); super.removeChildrenAndResources(session);
// go backwards because database.removeIndex will call table.removeIndex // go backwards because database.removeIndex will
// call table.removeIndex
while (indexes.size() > 1) { while (indexes.size() > 1) {
Index index = indexes.get(1); Index index = indexes.get(1);
if (index.getName() != null) { if (index.getName() != null) {
...@@ -708,7 +716,8 @@ public class MVTable extends TableBase { ...@@ -708,7 +716,8 @@ public class MVTable extends TableBase {
for (SchemaObject obj : database.getAllSchemaObjects(DbObject.INDEX)) { for (SchemaObject obj : database.getAllSchemaObjects(DbObject.INDEX)) {
Index index = (Index) obj; Index index = (Index) obj;
if (index.getTable() == this) { if (index.getTable() == this) {
DbException.throwInternalError("index not dropped: " + index.getName()); DbException.throwInternalError(
"index not dropped: " + index.getName());
} }
} }
} }
......
...@@ -73,7 +73,8 @@ public class MVTableEngine implements TableEngine { ...@@ -73,7 +73,8 @@ public class MVTableEngine implements TableEngine {
if (key != null) { if (key != null) {
char[] password = new char[key.length / 2]; char[] password = new char[key.length / 2];
for (int i = 0; i < password.length; i++) { for (int i = 0; i < password.length; i++) {
password[i] = (char) (((key[i + i] & 255) << 16) | ((key[i + i + 1]) & 255)); password[i] = (char) (((key[i + i] & 255) << 16) |
((key[i + i + 1]) & 255));
} }
builder.encryptionKey(password); builder.encryptionKey(password);
} }
...@@ -96,12 +97,18 @@ public class MVTableEngine implements TableEngine { ...@@ -96,12 +97,18 @@ public class MVTableEngine implements TableEngine {
int errorCode = DataUtils.getErrorCode(e.getMessage()); int errorCode = DataUtils.getErrorCode(e.getMessage());
if (errorCode == DataUtils.ERROR_FILE_CORRUPT) { if (errorCode == DataUtils.ERROR_FILE_CORRUPT) {
if (key != null) { if (key != null) {
throw DbException.get(ErrorCode.FILE_ENCRYPTION_ERROR_1, e, fileName); throw DbException.get(
ErrorCode.FILE_ENCRYPTION_ERROR_1,
e, fileName);
} }
} else if (errorCode == DataUtils.ERROR_FILE_LOCKED) { } else if (errorCode == DataUtils.ERROR_FILE_LOCKED) {
throw DbException.get(ErrorCode.DATABASE_ALREADY_OPEN_1, e, fileName); throw DbException.get(
ErrorCode.DATABASE_ALREADY_OPEN_1,
e, fileName);
} }
throw DbException.get(ErrorCode.FILE_CORRUPTED_1, e, fileName); throw DbException.get(
ErrorCode.FILE_CORRUPTED_1,
e, fileName);
} }
} }
db.setMvStore(store); db.setMvStore(store);
......
...@@ -59,7 +59,8 @@ public class TransactionStore { ...@@ -59,7 +59,8 @@ public class TransactionStore {
/** /**
* The map of maps. * The map of maps.
*/ */
private HashMap<Integer, MVMap<Object, VersionedValue>> maps = New.hashMap(); private HashMap<Integer, MVMap<Object, VersionedValue>> maps =
New.hashMap();
private final DataType dataType; private final DataType dataType;
...@@ -192,7 +193,8 @@ public class TransactionStore { ...@@ -192,7 +193,8 @@ public class TransactionStore {
status = (Integer) data[0]; status = (Integer) data[0];
name = (String) data[1]; name = (String) data[1];
} }
Transaction t = new Transaction(this, transactionId, status, name, logId); Transaction t = new Transaction(this, transactionId, status,
name, logId);
list.add(t); list.add(t);
key = undoLog.ceilingKey(getOperationId(transactionId + 1, 0)); key = undoLog.ceilingKey(getOperationId(transactionId + 1, 0));
} }
...@@ -227,7 +229,8 @@ public class TransactionStore { ...@@ -227,7 +229,8 @@ public class TransactionStore {
* @param t the transaction * @param t the transaction
*/ */
synchronized void storeTransaction(Transaction t) { synchronized void storeTransaction(Transaction t) {
if (t.getStatus() == Transaction.STATUS_PREPARED || t.getName() != null) { if (t.getStatus() == Transaction.STATUS_PREPARED ||
t.getName() != null) {
Object[] v = { t.getStatus(), t.getName() }; Object[] v = { t.getStatus(), t.getName() };
preparedTransactions.put(t.getId(), v); preparedTransactions.put(t.getId(), v);
} }
...@@ -251,7 +254,8 @@ public class TransactionStore { ...@@ -251,7 +254,8 @@ public class TransactionStore {
if (undoLog.containsKey(undoKey)) { if (undoLog.containsKey(undoKey)) {
throw DataUtils.newIllegalStateException( throw DataUtils.newIllegalStateException(
DataUtils.ERROR_TRANSACTION_STILL_OPEN, DataUtils.ERROR_TRANSACTION_STILL_OPEN,
"An old transaction with the same id is still open: {0}", "An old transaction with the same id " +
"is still open: {0}",
t.getId()); t.getId());
} }
} }
...@@ -303,7 +307,8 @@ public class TransactionStore { ...@@ -303,7 +307,8 @@ public class TransactionStore {
if (op == null) { if (op == null) {
// partially committed: load next // partially committed: load next
undoKey = undoLog.ceilingKey(undoKey); undoKey = undoLog.ceilingKey(undoKey);
if (undoKey == null || getTransactionId(undoKey) != t.getId()) { if (undoKey == null ||
getTransactionId(undoKey) != t.getId()) {
break; break;
} }
logId = getLogId(undoKey) - 1; logId = getLogId(undoKey) - 1;
...@@ -460,7 +465,8 @@ public class TransactionStore { ...@@ -460,7 +465,8 @@ public class TransactionStore {
if (op == null) { if (op == null) {
// partially rolled back: load previous // partially rolled back: load previous
undoKey = undoLog.floorKey(undoKey); undoKey = undoLog.floorKey(undoKey);
if (undoKey == null || getTransactionId(undoKey) != t.getId()) { if (undoKey == null ||
getTransactionId(undoKey) != t.getId()) {
break; break;
} }
logId = getLogId(undoKey) + 1; logId = getLogId(undoKey) + 1;
...@@ -513,7 +519,8 @@ public class TransactionStore { ...@@ -513,7 +519,8 @@ public class TransactionStore {
if (op == null) { if (op == null) {
// partially rolled back: load previous // partially rolled back: load previous
undoKey = undoLog.floorKey(undoKey); undoKey = undoLog.floorKey(undoKey);
if (undoKey == null || getTransactionId(undoKey) != t.getId()) { if (undoKey == null ||
getTransactionId(undoKey) != t.getId()) {
break; break;
} }
logId = getLogId(undoKey); logId = getLogId(undoKey);
...@@ -528,7 +535,8 @@ public class TransactionStore { ...@@ -528,7 +535,8 @@ public class TransactionStore {
current.mapName = m.getName(); current.mapName = m.getName();
current.key = op[1]; current.key = op[1];
VersionedValue oldValue = (VersionedValue) op[2]; VersionedValue oldValue = (VersionedValue) op[2];
current.value = oldValue == null ? null : oldValue.value; current.value = oldValue == null ?
null : oldValue.value;
return; return;
} }
} }
...@@ -712,7 +720,8 @@ public class TransactionStore { ...@@ -712,7 +720,8 @@ public class TransactionStore {
public <K, V> TransactionMap<K, V> openMap(String name, public <K, V> TransactionMap<K, V> openMap(String name,
DataType keyType, DataType valueType) { DataType keyType, DataType valueType) {
checkNotClosed(); checkNotClosed();
MVMap<K, VersionedValue> map = store.openMap(name, keyType, valueType); MVMap<K, VersionedValue> map = store.openMap(name, keyType,
valueType);
int mapId = map.getId(); int mapId = map.getId();
return new TransactionMap<K, V>(this, map, mapId); return new TransactionMap<K, V>(this, map, mapId);
} }
...@@ -725,7 +734,8 @@ public class TransactionStore { ...@@ -725,7 +734,8 @@ public class TransactionStore {
* @param map the base map * @param map the base map
* @return the transactional map * @return the transactional map
*/ */
public <K, V> TransactionMap<K, V> openMap(MVMap<K, VersionedValue> map) { public <K, V> TransactionMap<K, V> openMap(
MVMap<K, VersionedValue> map) {
checkNotClosed(); checkNotClosed();
int mapId = map.getId(); int mapId = map.getId();
return new TransactionMap<K, V>(this, map, mapId); return new TransactionMap<K, V>(this, map, mapId);
...@@ -1521,14 +1531,16 @@ public class TransactionStore { ...@@ -1521,14 +1531,16 @@ public class TransactionStore {
} }
@Override @Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) { public void read(ByteBuffer buff, Object[] obj,
int len, boolean key) {
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
obj[i] = read(buff); obj[i] = read(buff);
} }
} }
@Override @Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) { public void write(WriteBuffer buff, Object[] obj,
int len, boolean key) {
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
write(buff, obj[i]); write(buff, obj[i]);
} }
...@@ -1604,14 +1616,16 @@ public class TransactionStore { ...@@ -1604,14 +1616,16 @@ public class TransactionStore {
} }
@Override @Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) { public void read(ByteBuffer buff, Object[] obj,
int len, boolean key) {
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
obj[i] = read(buff); obj[i] = read(buff);
} }
} }
@Override @Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) { public void write(WriteBuffer buff, Object[] obj,
int len, boolean key) {
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
write(buff, obj[i]); write(buff, obj[i]);
} }
......
...@@ -385,8 +385,10 @@ public class ValueDataType implements DataType { ...@@ -385,8 +385,10 @@ public class ValueDataType implements DataType {
while (rs.next()) { while (rs.next()) {
buff.put((byte) 1); buff.put((byte) 1);
for (int i = 0; i < columnCount; i++) { for (int i = 0; i < columnCount; i++) {
int t = org.h2.value.DataType.getValueTypeFromResultSet(meta, i + 1); int t = org.h2.value.DataType.
Value val = org.h2.value.DataType.readValue(null, rs, i + 1, t); getValueTypeFromResultSet(meta, i + 1);
Value val = org.h2.value.DataType.readValue(
null, rs, i + 1, t);
writeValue(buff, val); writeValue(buff, val);
} }
} }
......
...@@ -63,7 +63,8 @@ public class MVRTreeMap<V> extends MVMap<SpatialKey, V> { ...@@ -63,7 +63,8 @@ public class MVRTreeMap<V> extends MVMap<SpatialKey, V> {
public RTreeCursor findIntersectingKeys(SpatialKey x) { public RTreeCursor findIntersectingKeys(SpatialKey x) {
return new RTreeCursor(root, x) { return new RTreeCursor(root, x) {
@Override @Override
protected boolean check(boolean leaf, SpatialKey key, SpatialKey test) { protected boolean check(boolean leaf, SpatialKey key,
SpatialKey test) {
return keyType.isOverlap(key, test); return keyType.isOverlap(key, test);
} }
}; };
......
...@@ -278,7 +278,8 @@ public class SpatialDataType implements DataType { ...@@ -278,7 +278,8 @@ public class SpatialDataType implements DataType {
float min = boundsInner.min(bestDim); float min = boundsInner.min(bestDim);
float max = boundsInner.max(bestDim); float max = boundsInner.max(bestDim);
int firstIndex = -1, lastIndex = -1; int firstIndex = -1, lastIndex = -1;
for (int i = 0; i < list.size() && (firstIndex < 0 || lastIndex < 0); i++) { for (int i = 0; i < list.size() &&
(firstIndex < 0 || lastIndex < 0); i++) {
SpatialKey o = (SpatialKey) list.get(i); SpatialKey o = (SpatialKey) list.get(i);
if (firstIndex < 0 && o.max(bestDim) == min) { if (firstIndex < 0 && o.max(bestDim) == min) {
firstIndex = i; firstIndex = i;
......
...@@ -432,7 +432,8 @@ public class ObjectDataType implements DataType { ...@@ -432,7 +432,8 @@ public class ObjectDataType implements DataType {
} }
@Override @Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) { public void write(WriteBuffer buff, Object[] obj,
int len, boolean key) {
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
write(buff, obj[i]); write(buff, obj[i]);
} }
...@@ -444,7 +445,8 @@ public class ObjectDataType implements DataType { ...@@ -444,7 +445,8 @@ public class ObjectDataType implements DataType {
} }
@Override @Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) { public void read(ByteBuffer buff, Object[] obj,
int len, boolean key) {
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
obj[i] = read(buff); obj[i] = read(buff);
} }
......
...@@ -357,7 +357,8 @@ public class DateTimeUtils { ...@@ -357,7 +357,8 @@ public class DateTimeUtils {
if (hour < 0 || hour > 23) { if (hour < 0 || hour > 23) {
throw e; throw e;
} }
return getTimeTry(true, tz, year, month, day, hour, minute, second, millis); return getTimeTry(true, tz, year, month, day, hour, minute,
second, millis);
} else if (message.indexOf("DAY_OF_MONTH") > 0) { } else if (message.indexOf("DAY_OF_MONTH") > 0) {
int maxDay; int maxDay;
if (month == 2) { if (month == 2) {
...@@ -372,9 +373,11 @@ public class DateTimeUtils { ...@@ -372,9 +373,11 @@ public class DateTimeUtils {
// using the timezone Brasilia and others, // using the timezone Brasilia and others,
// for example for 2042-10-12 00:00:00. // for example for 2042-10-12 00:00:00.
hour += 6; hour += 6;
return getTimeTry(true, tz, year, month, day, hour, minute, second, millis); return getTimeTry(true, tz, year, month, day, hour, minute,
second, millis);
} else { } else {
return getTimeTry(true, tz, year, month, day, hour, minute, second, millis); return getTimeTry(true, tz, year, month, day, hour, minute,
second, millis);
} }
} }
} }
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.util; package org.h2.util;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.util; package org.h2.util;
......
...@@ -117,7 +117,8 @@ public class CreateScriptFile { ...@@ -117,7 +117,8 @@ public class CreateScriptFile {
} else { } else {
out = FileUtils.newOutputStream(fileName, false); out = FileUtils.newOutputStream(fileName, false);
out = new BufferedOutputStream(out, Constants.IO_BUFFER_SIZE); out = new BufferedOutputStream(out, Constants.IO_BUFFER_SIZE);
out = CompressTool.wrapOutputStream(out, compressionAlgorithm, "script.sql"); out = CompressTool.wrapOutputStream(out,
compressionAlgorithm, "script.sql");
} }
return new PrintWriter(new OutputStreamWriter(out, charset)); return new PrintWriter(new OutputStreamWriter(out, charset));
} catch (Exception e) { } catch (Exception e) {
...@@ -145,7 +146,8 @@ public class CreateScriptFile { ...@@ -145,7 +146,8 @@ public class CreateScriptFile {
byte[] key = SHA256.getKeyPasswordHash("script", password.toCharArray()); byte[] key = SHA256.getKeyPasswordHash("script", password.toCharArray());
FileStore store = FileStore.open(null, fileName, "rw", cipher, key); FileStore store = FileStore.open(null, fileName, "rw", cipher, key);
store.init(); store.init();
in = new FileStoreInputStream(store, null, compressionAlgorithm != null, false); in = new FileStoreInputStream(store, null,
compressionAlgorithm != null, false);
in = new BufferedInputStream(in, Constants.IO_BUFFER_SIZE_COMPRESS); in = new BufferedInputStream(in, Constants.IO_BUFFER_SIZE_COMPRESS);
} else { } else {
in = FileUtils.newInputStream(fileName); in = FileUtils.newInputStream(fileName);
......
...@@ -56,7 +56,9 @@ public class CsvSample { ...@@ -56,7 +56,9 @@ public class CsvSample {
ResultSetMetaData meta = rs.getMetaData(); ResultSetMetaData meta = rs.getMetaData();
while (rs.next()) { while (rs.next()) {
for (int i = 0; i < meta.getColumnCount(); i++) { for (int i = 0; i < meta.getColumnCount(); i++) {
System.out.println(meta.getColumnLabel(i + 1) + ": " + rs.getString(i + 1)); System.out.println(
meta.getColumnLabel(i + 1) + ": " +
rs.getString(i + 1));
} }
System.out.println(); System.out.println();
} }
......
...@@ -397,7 +397,8 @@ public class SQLInjection { ...@@ -397,7 +397,8 @@ public class SQLInjection {
stat.execute("CREATE CONSTANT HASH_ITERATIONS VALUE 100"); stat.execute("CREATE CONSTANT HASH_ITERATIONS VALUE 100");
stat.execute("CREATE CONSTANT HASH_ALGORITHM VALUE 'SHA256'"); stat.execute("CREATE CONSTANT HASH_ALGORITHM VALUE 'SHA256'");
stat.execute("UPDATE USERS2 SET " + stat.execute("UPDATE USERS2 SET " +
"HASH=HASH(HASH_ALGORITHM, STRINGTOUTF8('abc' || SALT), HASH_ITERATIONS) " + "HASH=HASH(HASH_ALGORITHM, " +
"STRINGTOUTF8('abc' || SALT), HASH_ITERATIONS) " +
"WHERE ID=1"); "WHERE ID=1");
String user = input("user?"); String user = input("user?");
String password = input("password?"); String password = input("password?");
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.bench; package org.h2.test.bench;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.db; package org.h2.test.db;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.db; package org.h2.test.db;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.db; package org.h2.test.db;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.db; package org.h2.test.db;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.store; package org.h2.test.store;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.test.unit; package org.h2.test.unit;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.android; package org.h2.android;
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group * (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/ */
package org.h2.android; package org.h2.android;
......
...@@ -190,8 +190,8 @@ public class CheckTextFiles { ...@@ -190,8 +190,8 @@ public class CheckTextFiles {
lastWasWhitespace = false; lastWasWhitespace = false;
line++; line++;
int lineLength = i - startLinePos; int lineLength = i - startLinePos;
if (lineLength > MAX_SOURCE_LINE_SIZE) { if (file.getName().endsWith(".java")) {
if (file.getName().endsWith(".java")) { if (lineLength > MAX_SOURCE_LINE_SIZE) {
fail(file, "line too long: " + lineLength, line); fail(file, "line too long: " + lineLength, line);
} }
} }
......
...@@ -944,7 +944,8 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> { ...@@ -944,7 +944,8 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
*/ */
void setMaxMemory(long maxMemory) { void setMaxMemory(long maxMemory) {
if (maxMemory <= 0) { if (maxMemory <= 0) {
throw new IllegalArgumentException("Max memory must be larger than 0"); throw new IllegalArgumentException(
"Max memory must be larger than 0");
} }
this.maxMemory = maxMemory; this.maxMemory = maxMemory;
} }
...@@ -957,7 +958,8 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> { ...@@ -957,7 +958,8 @@ public class CacheLIRS<K, V> extends AbstractMap<K, V> {
*/ */
void setAverageMemory(int averageMemory) { void setAverageMemory(int averageMemory) {
if (averageMemory <= 0) { if (averageMemory <= 0) {
throw new IllegalArgumentException("Average memory must be larger than 0"); throw new IllegalArgumentException(
"Average memory must be larger than 0");
} }
this.averageMemory = averageMemory; this.averageMemory = averageMemory;
} }
......
/* /*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, Version * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* 1.0, and under the Eclipse Public License, Version 1.0 * Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: James Moger * (http://h2database.com/html/license.html).
* Initial Developer: James Moger
*/ */
package org.h2.jaqu; package org.h2.jaqu;
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论