提交 d26f68eb authored 作者: StuMc's avatar StuMc 提交者: GitHub

Merge branch 'master' into Issue#589

......@@ -17,4 +17,5 @@
.checkstyle
/temp/
/h2web/
.pmd
\ No newline at end of file
.pmd
docs/html/testOutput.html
......@@ -12,8 +12,13 @@
<licenses>
<license>
<name>MPL 2.0 or EPL 1.0</name>
<url>http://h2database.com/html/license.html</url>
<name>MPL 2.0</name>
<url>https://www.mozilla.org/en-US/MPL/2.0/</url>
<distribution>repo</distribution>
</license>
<license>
<name>EPL 1.0</name>
<url>https://opensource.org/licenses/eclipse-1.0.php</url>
<distribution>repo</distribution>
</license>
</licenses>
......
......@@ -21,6 +21,22 @@ Change Log
<h2>Next Version (unreleased)</h2>
<ul>
<li>Issue #570: MySQL compatibility for ALTER TABLE .. DROP INDEX
</li>
<li>Issue #537: Include the COLUMN name in message "Numeric value out of range"
</li>
<li>Issue #600: ROW_NUMBER() behaviour change in H2 1.4.195
</li>
<li>Fix a bunch of race conditions found by vmlens.com, thank you to vmlens for giving us a license.
</li>
<li>PR #597: Support more types in getObject
</li>
<li>Issue #591: Generated SQL from WITH-CTEs does not include a table identifier
</li>
<li>PR #593: Make it possible to create a cluster without using temporary files.
</li>
<li>PR #592: "Connection is broken: "unexpected status 16777216" [90067-192]" message when using older h2 releases as client
</li>
<li>Issue #585: MySQL mode DELETE statements compatibility
</li>
<li>PR #586: remove extra tx preparation
......
......@@ -4,7 +4,7 @@
08000=Error opening database: {0}
21S02=Column count does not match
22001=Value too long for column {0}: {1}
22003=Numeric value out of range: {0}
22003=Numeric value out of range: {0} {1}
22007=Cannot parse {0} constant {1}
22012=Division by zero: {0}
22018=Data conversion error converting {0}
......
......@@ -33,7 +33,7 @@ public class Driver implements java.sql.Driver, JdbcDriverBackwardsCompat {
private static final Driver INSTANCE = new Driver();
private static final String DEFAULT_URL = "jdbc:default:connection";
private static final ThreadLocal<Connection> DEFAULT_CONNECTION =
new ThreadLocal<Connection>();
new ThreadLocal<>();
private static volatile boolean registered;
......
......@@ -106,6 +106,12 @@ public class ErrorCode {
*/
public static final int NUMERIC_VALUE_OUT_OF_RANGE_1 = 22003;
/**
* The error with code <code>22004</code> is thrown when a value is out of
* range when converting to another column's data type.
*/
public static final int NUMERIC_VALUE_OUT_OF_RANGE_2 = 22004;
/**
* The error with code <code>22007</code> is thrown when
* a text can not be converted to a date, time, or timestamp constant.
......
......@@ -65,6 +65,7 @@ import org.h2.command.ddl.DropUserDataType;
import org.h2.command.ddl.DropView;
import org.h2.command.ddl.GrantRevoke;
import org.h2.command.ddl.PrepareProcedure;
import org.h2.command.ddl.SchemaCommand;
import org.h2.command.ddl.SetComment;
import org.h2.command.ddl.TruncateTable;
import org.h2.command.dml.AlterSequence;
......@@ -4404,7 +4405,7 @@ public class Parser {
}
} else if (dataType.type == Value.ENUM) {
if (readIf("(")) {
java.util.List<String> enumeratorList = new ArrayList<String>();
java.util.List<String> enumeratorList = new ArrayList<>();
original += '(';
String enumerator0 = readString();
enumeratorList.add(enumerator0);
......@@ -5050,7 +5051,7 @@ public class Parser {
}
private Prepared parseWith() {
List<TableView> viewsCreated = new ArrayList<TableView>();
List<TableView> viewsCreated = new ArrayList<>();
readIf("RECURSIVE");
do {
viewsCreated.add(parseSingleCommonTableExpression());
......@@ -5154,6 +5155,7 @@ public class Parser {
Query withQuery = parseSelect();
read(")");
columnTemplateList = createQueryColumnTemplateList(cols, withQuery, querySQLOutput);
} finally {
session.removeLocalTempTable(recursiveTable);
}
......@@ -5942,9 +5944,18 @@ public class Parser {
return commandIfTableExists(schema, tableName, ifTableExists, command);
} else if (readIf("INDEX")) {
// MySQL compatibility
String indexName = readIdentifierWithSchema();
DropIndex command = new DropIndex(session, getSchema());
command.setIndexName(indexName);
String indexOrConstraintName = readIdentifierWithSchema();
final SchemaCommand command;
if (schema.findIndex(session, indexOrConstraintName) != null) {
DropIndex dropIndexCommand = new DropIndex(session, getSchema());
dropIndexCommand.setIndexName(indexOrConstraintName);
command = dropIndexCommand;
} else {
AlterTableDropConstraint dropCommand = new AlterTableDropConstraint(
session, getSchema(), false/*ifExists*/);
dropCommand.setConstraintName(indexOrConstraintName);
command = dropCommand;
}
return commandIfTableExists(schema, tableName, ifTableExists, command);
} else if (readIf("PRIMARY")) {
read("KEY");
......
......@@ -106,7 +106,7 @@ public class Explain extends Prepared {
total += e.getValue();
}
if (total > 0) {
statistics = new TreeMap<String, Integer>(statistics);
statistics = new TreeMap<>(statistics);
StringBuilder buff = new StringBuilder();
if (statistics.size() > 1) {
buff.append("total: ").append(total).append('\n');
......
......@@ -327,7 +327,7 @@ public class Insert extends Prepared implements ResultTarget {
throw de;
}
ArrayList<String> variableNames = new ArrayList<String>(
ArrayList<String> variableNames = new ArrayList<>(
duplicateKeyAssignmentMap.size());
for (int i = 0; i < columns.length; i++) {
String key = table.getSchema().getName() + "." +
......
......@@ -327,7 +327,7 @@ public class Select extends Query {
}
HashMap<Expression, Object> values = groups.get(key);
if (values == null) {
values = new HashMap<Expression, Object>();
values = new HashMap<>();
groups.put(key, values);
}
currentGroup = values;
......@@ -1451,8 +1451,9 @@ public class Select extends Query {
protected Value[] fetchNextRow() {
while ((sampleSize <= 0 || rowNumber < sampleSize) &&
topTableFilter.next()) {
setCurrentRowNumber(++rowNumber);
setCurrentRowNumber(rowNumber + 1);
if (isConditionMet()) {
++rowNumber;
Value[] row = new Value[columnCount];
for (int i = 0; i < columnCount; i++) {
Expression expr = expressions.get(i);
......
......@@ -16,6 +16,8 @@ import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.h2.api.DatabaseEventListener;
import org.h2.api.ErrorCode;
import org.h2.api.JavaObjectSerializer;
......@@ -115,7 +117,7 @@ public class Database implements DataHandler {
private final Set<Session> userSessions =
Collections.synchronizedSet(new HashSet<Session>());
private Session exclusiveSession;
private final AtomicReference<Session> exclusiveSession = new AtomicReference<>();
private final BitField objectIds = new BitField();
private final Object lobSyncObject = new Object();
......@@ -135,8 +137,8 @@ public class Database implements DataHandler {
private Trace trace;
private final int fileLockMethod;
private Role publicRole;
private long modificationDataId;
private long modificationMetaId;
private final AtomicLong modificationDataId = new AtomicLong();
private final AtomicLong modificationMetaId = new AtomicLong();
private CompareMode compareMode;
private String cluster = Constants.CLUSTERING_DISABLED;
private boolean readOnly;
......@@ -389,7 +391,7 @@ public class Database implements DataHandler {
}
public long getModificationDataId() {
return modificationDataId;
return modificationDataId.get();
}
/**
......@@ -466,18 +468,18 @@ public class Database implements DataHandler {
}
public long getNextModificationDataId() {
return ++modificationDataId;
return modificationDataId.incrementAndGet();
}
public long getModificationMetaId() {
return modificationMetaId;
return modificationMetaId.get();
}
public long getNextModificationMetaId() {
// if the meta data has been modified, the data is modified as well
// (because MetaTable returns modificationDataId)
modificationDataId++;
return modificationMetaId++;
modificationDataId.incrementAndGet();
return modificationMetaId.incrementAndGet() - 1;
}
public int getPowerOffCount() {
......@@ -1148,7 +1150,7 @@ public class Database implements DataHandler {
if (closing) {
return null;
}
if (exclusiveSession != null) {
if (exclusiveSession.get() != null) {
throw DbException.get(ErrorCode.DATABASE_IS_IN_EXCLUSIVE_MODE);
}
Session session = new Session(this, user, ++nextSessionId);
......@@ -1168,9 +1170,7 @@ public class Database implements DataHandler {
*/
public synchronized void removeSession(Session session) {
if (session != null) {
if (exclusiveSession == session) {
exclusiveSession = null;
}
exclusiveSession.compareAndSet(session, null);
userSessions.remove(session);
if (session != systemSession && session != lobSession) {
trace.info("disconnecting session #{0}", session.getId());
......@@ -1984,7 +1984,7 @@ public class Database implements DataHandler {
mvStore.getStore().setRetentionTime(value);
}
}
public void setAllowBuiltinAliasOverride(boolean b) {
allowBuiltinAliasOverride = b;
}
......@@ -1992,7 +1992,7 @@ public class Database implements DataHandler {
public boolean isAllowBuiltinAliasOverride() {
return allowBuiltinAliasOverride;
}
/**
* Check if flush-on-each-commit is enabled.
*
......@@ -2409,7 +2409,7 @@ public class Database implements DataHandler {
}
public Session getExclusiveSession() {
return exclusiveSession;
return exclusiveSession.get();
}
/**
......@@ -2419,10 +2419,10 @@ public class Database implements DataHandler {
* @param closeOthers whether other sessions are closed
*/
public void setExclusiveSession(Session session, boolean closeOthers) {
this.exclusiveSession = session;
if (closeOthers) {
closeAllSessionsException(session);
}
this.exclusiveSession.set(session);
if (closeOthers) {
closeAllSessionsException(session);
}
}
@Override
......
......@@ -22,7 +22,7 @@ class DatabaseCloser extends Thread {
private int delayInMillis;
DatabaseCloser(Database db, int delayInMillis, boolean shutdownHook) {
this.databaseRef = new WeakReference<Database>(db);
this.databaseRef = new WeakReference<>(db);
this.delayInMillis = delayInMillis;
this.shutdownHook = shutdownHook;
trace = db.getTrace(Trace.DATABASE);
......
......@@ -277,7 +277,7 @@ public class Mode {
mode.prohibitEmptyInPredicate = true;
mode.padFixedLengthStrings = true;
// Enumerate all H2 types NOT supported by PostgreSQL:
Set<String> disallowedTypes = new java.util.HashSet<String>();
Set<String> disallowedTypes = new java.util.HashSet<>();
disallowedTypes.add("NUMBER");
disallowedTypes.add("IDENTITY");
disallowedTypes.add("TINYINT");
......
......@@ -28,7 +28,7 @@ public class QueryStatisticsData {
};
private final HashMap<String, QueryEntry> map =
new HashMap<String, QueryEntry>();
new HashMap<>();
private int maxQueryEntries;
......@@ -43,7 +43,7 @@ public class QueryStatisticsData {
public synchronized List<QueryEntry> getQueries() {
// return a copy of the map so we don't have to
// worry about external synchronization
ArrayList<QueryEntry> list = new ArrayList<QueryEntry>();
ArrayList<QueryEntry> list = new ArrayList<>();
list.addAll(map.values());
// only return the newest 100 entries
Collections.sort(list, QUERY_ENTRY_COMPARATOR);
......@@ -71,12 +71,12 @@ public class QueryStatisticsData {
// Test against 1.5 x max-size so we don't do this too often
if (map.size() > maxQueryEntries * 1.5f) {
// Sort the entries by age
ArrayList<QueryEntry> list = new ArrayList<QueryEntry>();
ArrayList<QueryEntry> list = new ArrayList<>();
list.addAll(map.values());
Collections.sort(list, QUERY_ENTRY_COMPARATOR);
// Create a set of the oldest 1/3 of the entries
HashSet<QueryEntry> oldestSet =
new HashSet<QueryEntry>(list.subList(0, list.size() / 3));
new HashSet<>(list.subList(0, list.size() / 3));
// Loop over the map using the set and remove
// the oldest 1/3 of the entries.
for (Iterator<Entry<String, QueryEntry>> it =
......
......@@ -261,7 +261,7 @@ public class Session extends SessionWithState {
@Override
public ArrayList<String> getClusterServers() {
return new ArrayList<String>();
return new ArrayList<>();
}
public boolean setCommitOrRollbackDisabled(boolean x) {
......@@ -1679,12 +1679,12 @@ public class Session extends SessionWithState {
if (v.getTableId() == LobStorageFrontend.TABLE_RESULT ||
v.getTableId() == LobStorageFrontend.TABLE_TEMP) {
if (temporaryResultLobs == null) {
temporaryResultLobs = new LinkedList<TimeoutValue>();
temporaryResultLobs = new LinkedList<>();
}
temporaryResultLobs.add(new TimeoutValue(v));
} else {
if (temporaryLobs == null) {
temporaryLobs = new ArrayList<Value>();
temporaryLobs = new ArrayList<>();
}
temporaryLobs.add(v);
}
......
......@@ -100,7 +100,7 @@ public class SessionRemote extends SessionWithState implements DataHandler {
@Override
public ArrayList<String> getClusterServers() {
ArrayList<String> serverList = new ArrayList<String>();
ArrayList<String> serverList = new ArrayList<>();
for (int i = 0; i < transferList.size(); i++) {
Transfer transfer = transferList.get(i);
serverList.add(transfer.getSocket().getInetAddress().
......
......@@ -43,7 +43,7 @@ public class ConditionInConstantSet extends Condition {
ArrayList<Expression> valueList) {
this.left = left;
this.valueList = valueList;
this.valueSet = new TreeSet<Value>(new Comparator<Value>() {
this.valueSet = new TreeSet<>(new Comparator<Value>() {
@Override
public int compare(Value o1, Value o2) {
return session.getDatabase().compare(o1, o2);
......
......@@ -23,6 +23,7 @@ import org.h2.table.TableFilter;
import org.h2.value.Value;
import org.h2.value.ValueBoolean;
import org.h2.value.ValueEnum;
import org.h2.value.ValueNull;
/**
* A expression that represents a column of a table or view.
......@@ -188,7 +189,7 @@ public class ExpressionColumn extends Expression {
columnResolver.getValue(column);
throw DbException.get(ErrorCode.MUST_GROUP_BY_COLUMN_1, getSQL());
}
if (column.getEnumerators() != null) {
if (column.getEnumerators() != null && value != ValueNull.INSTANCE) {
return ValueEnum.get(column.getEnumerators(), value.getInt());
}
return value;
......
......@@ -52,7 +52,7 @@ final class FullTextSettings {
*/
private final SoftHashMap<Connection,
SoftHashMap<String, PreparedStatement>> cache =
new SoftHashMap<Connection, SoftHashMap<String, PreparedStatement>>();
new SoftHashMap<>();
/**
* The whitespace characters.
......@@ -209,7 +209,7 @@ final class FullTextSettings {
throws SQLException {
SoftHashMap<String, PreparedStatement> c = cache.get(conn);
if (c == null) {
c = new SoftHashMap<String, PreparedStatement>();
c = new SoftHashMap<>();
cache.put(conn, c);
}
PreparedStatement prep = c.get(sql);
......
......@@ -149,7 +149,7 @@ public class IndexCondition {
* @return the value list
*/
public Value[] getCurrentValueList(Session session) {
HashSet<Value> valueSet = new HashSet<Value>();
HashSet<Value> valueSet = new HashSet<>();
for (Expression e : expressionList) {
Value v = e.getValue(session);
v = column.convert(v);
......
......@@ -324,7 +324,7 @@ public class IndexCursor implements Cursor {
if (v != ValueNull.INSTANCE) {
v = inColumn.convert(v);
if (inResultTested == null) {
inResultTested = new HashSet<Value>();
inResultTested = new HashSet<>();
}
if (inResultTested.add(v)) {
find(v);
......
......@@ -236,7 +236,7 @@ public class PageDataLeaf extends PageData {
writeData();
// free up the space used by the row
Row r = rows[0];
rowRef = new SoftReference<Row>(r);
rowRef = new SoftReference<>(r);
rows[0] = null;
Data all = index.getPageStore().createData();
all.checkCapacity(data.length());
......@@ -353,7 +353,7 @@ public class PageDataLeaf extends PageData {
}
r.setKey(keys[at]);
if (firstOverflowPageId != 0) {
rowRef = new SoftReference<Row>(r);
rowRef = new SoftReference<>(r);
} else {
rows[at] = r;
memoryChange(true, r);
......
......@@ -150,7 +150,7 @@ public class JdbcConnection extends TraceObject implements Connection,
this.rollback = clone.rollback;
this.watcher = null;
if (clone.clientInfo != null) {
this.clientInfo = new HashMap<String, String>(clone.clientInfo);
this.clientInfo = new HashMap<>(clone.clientInfo);
}
}
......@@ -361,14 +361,14 @@ public class JdbcConnection extends TraceObject implements Connection,
}
CloseWatcher.unregister(watcher);
session.cancel();
if (executingStatement != null) {
try {
executingStatement.cancel();
} catch (NullPointerException e) {
// ignore
}
}
synchronized (session) {
if (executingStatement != null) {
try {
executingStatement.cancel();
} catch (NullPointerException e) {
// ignore
}
}
try {
if (!session.isClosed()) {
try {
......@@ -440,7 +440,9 @@ public class JdbcConnection extends TraceObject implements Connection,
if (autoCommit && !session.getAutoCommit()) {
commit();
}
session.setAutoCommit(autoCommit);
synchronized (session) {
session.setAutoCommit(autoCommit);
}
} catch (Exception e) {
throw logAndConvert(e);
}
......@@ -1592,7 +1594,9 @@ public class JdbcConnection extends TraceObject implements Connection,
try {
Value v = session.getDataHandler().getLobStorage().createBlob(
new ByteArrayInputStream(Utils.EMPTY_BYTES), 0);
session.addTemporaryLob(v);
synchronized (session) {
session.addTemporaryLob(v);
}
return new JdbcBlob(this, v, id);
} finally {
afterWriting();
......@@ -1733,7 +1737,7 @@ public class JdbcConnection extends TraceObject implements Connection,
if (clientInfoNameRegEx != null && clientInfoNameRegEx.matcher(name).matches()) {
if (clientInfo == null) {
clientInfo = new HashMap<String, String>();
clientInfo = new HashMap<>();
}
clientInfo.put(name, value);
} else {
......@@ -1775,7 +1779,7 @@ public class JdbcConnection extends TraceObject implements Connection,
}
checkClosed();
if (clientInfo == null) {
clientInfo = new HashMap<String, String>();
clientInfo = new HashMap<>();
} else {
clientInfo.clear();
}
......
......@@ -8,6 +8,7 @@ package org.h2.jdbc;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
......@@ -3771,6 +3772,8 @@ public class JdbcResultSet extends TraceObject implements ResultSet, JdbcResultS
}
if (type == BigDecimal.class) {
return type.cast(value.getBigDecimal());
} else if (type == BigInteger.class) {
return type.cast(BigInteger.valueOf(value.getLong()));
} else if (type == String.class) {
return type.cast(value.getString());
} else if (type == Boolean.class) {
......@@ -3793,6 +3796,12 @@ public class JdbcResultSet extends TraceObject implements ResultSet, JdbcResultS
return type.cast(value.getTime());
} else if (type == Timestamp.class) {
return type.cast(value.getTimestamp());
} else if (type == java.util.Date.class) {
return type.cast(new java.util.Date(value.getTimestamp().getTime()));
} else if (type == Calendar.class) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(value.getTimestamp());
return type.cast(calendar);
} else if (type == UUID.class) {
return type.cast(value.getObject());
} else if (type == byte[].class) {
......@@ -3800,6 +3809,12 @@ public class JdbcResultSet extends TraceObject implements ResultSet, JdbcResultS
} else if (type == java.sql.Array.class) {
int id = getNextId(TraceObject.ARRAY);
return type.cast(value == ValueNull.INSTANCE ? null : new JdbcArray(conn, value, id));
} else if (type == Blob.class) {
int id = getNextId(TraceObject.ARRAY);
return type.cast(value == ValueNull.INSTANCE ? null : new JdbcBlob(conn, value, id));
} else if (type == Clob.class) {
int id = getNextId(TraceObject.ARRAY);
return type.cast(value == ValueNull.INSTANCE ? null : new JdbcClob(conn, value, id));
} else if (type == TimestampWithTimeZone.class) {
return type.cast(value.getObject());
} else if (DataType.isGeometryClass(type)) {
......
......@@ -34,7 +34,7 @@ public class JdbcStatement extends TraceObject implements Statement, JdbcStateme
protected final int resultSetType;
protected final int resultSetConcurrency;
protected final boolean closedByResultSet;
private CommandInterface executingCommand;
private volatile CommandInterface executingCommand;
private int lastExecutedCommandType;
private ArrayList<String> batchCommands;
private boolean escapeProcessing = true;
......
......@@ -55,7 +55,7 @@ public class DatabaseInfo implements DatabaseInfoMBean {
throws JMException {
name = name.replace(':', '_');
path = path.replace(':', '_');
Hashtable<String, String> map = new Hashtable<String, String>();
Hashtable<String, String> map = new Hashtable<>();
map.put("name", name);
map.put("path", path);
return new ObjectName("org.h2", map);
......@@ -229,7 +229,7 @@ public class DatabaseInfo implements DatabaseInfoMBean {
public String listSettings() {
StringBuilder buff = new StringBuilder();
for (Map.Entry<String, String> e :
new TreeMap<String, String>(
new TreeMap<>(
database.getSettings().getSettings()).entrySet()) {
buff.append(e.getKey()).append(" = ").append(e.getValue()).append('\n');
}
......
......@@ -8,6 +8,7 @@ package org.h2.message;
import java.math.BigDecimal;
import java.sql.SQLException;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.h2.util.StringUtils;
/**
......@@ -91,7 +92,12 @@ public class TraceObject {
protected static final int ARRAY = 16;
private static final int LAST = ARRAY + 1;
private static final int[] ID = new int[LAST];
private static final AtomicInteger[] ID = new AtomicInteger[LAST];
static {
for (int i=0; i<LAST; i++) {
ID[i] = new AtomicInteger(-1);
}
}
private static final String[] PREFIX = { "call", "conn", "dbMeta", "prep",
"rs", "rsMeta", "sp", "ex", "stat", "blob", "clob", "pMeta", "ds",
"xads", "xares", "xid", "ar" };
......@@ -138,7 +144,7 @@ public class TraceObject {
* @return the new trace object id
*/
protected static int getNextId(int type) {
return ID[type]++;
return ID[type].incrementAndGet();
}
/**
......
......@@ -82,7 +82,7 @@ public class TraceSystem implements TraceWriter {
private int maxFileSize = DEFAULT_MAX_FILE_SIZE;
private String fileName;
private final AtomicReferenceArray<Trace> traces =
new AtomicReferenceArray<Trace>(Trace.MODULE_NAMES.length);
new AtomicReferenceArray<>(Trace.MODULE_NAMES.length);
private SimpleDateFormat dateFormat;
private Writer fileWriter;
private PrintWriter printWriter;
......
......@@ -22,7 +22,7 @@ import org.h2.util.New;
/**
* Utility methods
*/
public class DataUtils {
public final class DataUtils {
/**
* An error occurred while reading from the file.
......@@ -758,8 +758,8 @@ public class DataUtils {
int size = arguments.length;
if (size > 0) {
Object o = arguments[size - 1];
if (o instanceof Exception) {
e.initCause((Exception) o);
if (o instanceof Throwable) {
e.initCause((Throwable) o);
}
}
return e;
......@@ -776,6 +776,7 @@ public class DataUtils {
public static String formatMessage(int errorCode, String message,
Object... arguments) {
// convert arguments to strings, to avoid locale specific formatting
arguments = arguments.clone();
for (int i = 0; i < arguments.length; i++) {
Object a = arguments[i];
if (!(a instanceof Exception)) {
......@@ -936,10 +937,10 @@ public class DataUtils {
* @param <K> the key type
* @param <V> the value type
*/
public static class MapEntry<K, V> implements Map.Entry<K, V> {
public static final class MapEntry<K, V> implements Map.Entry<K, V> {
private final K key;
private V value;
private final V value;
public MapEntry(K key, V value) {
this.key = key;
......
......@@ -355,6 +355,10 @@ public class FileStore {
return freeSpace.getFirstFree();
}
long getFileLengthInUse() {
return freeSpace.getLastFree();
}
/**
* Mark the file as empty.
*/
......
......@@ -170,6 +170,15 @@ public class FreeSpaceBitSet {
return getPos(set.nextClearBit(0));
}
/**
* Get the position of the last (infinite) free space.
*
* @return the position.
*/
public long getLastFree() {
return getPos(set.previousSetBit(set.size()-1) + 1);
}
@Override
public String toString() {
StringBuilder buff = new StringBuilder();
......
......@@ -57,7 +57,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
private final DataType valueType;
private ConcurrentArrayList<Page> oldRoots =
new ConcurrentArrayList<Page>();
new ConcurrentArrayList<>();
/**
......@@ -818,7 +818,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
* @return the cursor
*/
public Cursor<K, V> cursor(K from) {
return new Cursor<K, V>(this, root, from);
return new Cursor<>(this, root, from);
}
@Override
......@@ -829,7 +829,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
@Override
public Iterator<Entry<K, V>> iterator() {
final Cursor<K, V> cursor = new Cursor<K, V>(map, root, null);
final Cursor<K, V> cursor = new Cursor<>(map, root, null);
return new Iterator<Entry<K, V>>() {
@Override
......@@ -840,7 +840,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
@Override
public Entry<K, V> next() {
K k = cursor.next();
return new DataUtils.MapEntry<K, V>(k, cursor.getValue());
return new DataUtils.MapEntry<>(k, cursor.getValue());
}
@Override
......@@ -1111,7 +1111,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
* @return the opened map
*/
MVMap<K, V> openReadOnly() {
MVMap<K, V> m = new MVMap<K, V>(keyType, valueType);
MVMap<K, V> m = new MVMap<>(keyType, valueType);
m.readOnly = true;
HashMap<String, Object> config = New.hashMap();
config.put("id", id);
......@@ -1295,7 +1295,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
if (valueType == null) {
valueType = new ObjectDataType();
}
return new MVMap<K, V>(keyType, valueType);
return new MVMap<>(keyType, valueType);
}
}
......
......@@ -69,7 +69,7 @@ public class MVMapConcurrent<K, V> extends MVMap<K, V> {
if (valueType == null) {
valueType = new ObjectDataType();
}
return new MVMapConcurrent<K, V>(keyType, valueType);
return new MVMapConcurrent<>(keyType, valueType);
}
}
......
......@@ -110,7 +110,7 @@ public class MVStoreTool {
FileChannel file = null;
int blockSize = MVStore.BLOCK_SIZE;
TreeMap<Integer, Long> mapSizesTotal =
new TreeMap<Integer, Long>();
new TreeMap<>();
long pageSizeTotal = 0;
try {
file = FilePath.get(fileName).open("r");
......@@ -157,7 +157,7 @@ public class MVStoreTool {
int remaining = c.pageCount;
pageCount += c.pageCount;
TreeMap<Integer, Integer> mapSizes =
new TreeMap<Integer, Integer>();
new TreeMap<>();
int pageSizeSum = 0;
while (remaining > 0) {
int start = p;
......@@ -350,7 +350,7 @@ public class MVStoreTool {
MVMap<String, String> meta = store.getMetaMap();
Map<String, Object> header = store.getStoreHeader();
long fileCreated = DataUtils.readHexLong(header, "created", 0L);
TreeMap<Integer, Chunk> chunks = new TreeMap<Integer, Chunk>();
TreeMap<Integer, Chunk> chunks = new TreeMap<>();
long chunkLength = 0;
long maxLength = 0;
long maxLengthLive = 0;
......@@ -521,7 +521,7 @@ public class MVStoreTool {
}
for (String mapName : source.getMapNames()) {
MVMap.Builder<Object, Object> mp =
new MVMap.Builder<Object, Object>().
new MVMap.Builder<>().
keyType(new GenericDataType()).
valueType(new GenericDataType());
MVMap<Object, Object> sourceMap = source.openMap(mapName, mp);
......
......@@ -17,7 +17,7 @@ import java.util.TreeMap;
public class OffHeapStore extends FileStore {
private final TreeMap<Long, ByteBuffer> memory =
new TreeMap<Long, ByteBuffer>();
new TreeMap<>();
@Override
public void open(String fileName, boolean readOnly, char[] encryptionKey) {
......
......@@ -41,7 +41,7 @@ public class StreamStore {
private int maxBlockSize = 256 * 1024;
private final AtomicLong nextKey = new AtomicLong();
private final AtomicReference<byte[]> nextBuffer =
new AtomicReference<byte[]>();
new AtomicReference<>();
/**
* Create a stream store instance.
......
......@@ -82,13 +82,21 @@ public class CacheLongKeyLIRS<V> {
* Remove all entries.
*/
public void clear() {
long max = Math.max(1, maxMemory / segmentCount);
long max = getMaxItemSize();
for (int i = 0; i < segmentCount; i++) {
segments[i] = new Segment<V>(
segments[i] = new Segment<>(
max, stackMoveDistance, 8, nonResidentQueueSize);
}
}
/**
* Determines max size of the data item size to fit into cache
* @return data items size limit
*/
public long getMaxItemSize() {
return Math.max(1, maxMemory / segmentCount);
}
private Entry<V> find(long key) {
int hash = getHash(key);
return getSegment(hash).find(key, hash);
......@@ -162,7 +170,7 @@ public class CacheLongKeyLIRS<V> {
Segment<V> s2 = segments[segmentIndex];
if (s == s2) {
// no other thread resized, so we do
s = new Segment<V>(s, newLen);
s = new Segment<>(s, newLen);
segments[segmentIndex] = s;
}
return s;
......@@ -296,7 +304,7 @@ public class CacheLongKeyLIRS<V> {
* @return the entry set
*/
public synchronized Set<Map.Entry<Long, V>> entrySet() {
HashMap<Long, V> map = new HashMap<Long, V>();
HashMap<Long, V> map = new HashMap<>();
for (long k : keySet()) {
map.put(k, find(k).value);
}
......@@ -309,7 +317,7 @@ public class CacheLongKeyLIRS<V> {
* @return the set of keys
*/
public Set<Long> keySet() {
HashSet<Long> set = new HashSet<Long>();
HashSet<Long> set = new HashSet<>();
for (Segment<V> s : segments) {
set.addAll(s.keySet());
}
......@@ -403,7 +411,7 @@ public class CacheLongKeyLIRS<V> {
* @return the key list
*/
public List<Long> keys(boolean cold, boolean nonResident) {
ArrayList<Long> keys = new ArrayList<Long>();
ArrayList<Long> keys = new ArrayList<>();
for (Segment<V> s : segments) {
keys.addAll(s.keys(cold, nonResident));
}
......@@ -416,7 +424,7 @@ public class CacheLongKeyLIRS<V> {
* @return the entry set
*/
public List<V> values() {
ArrayList<V> list = new ArrayList<V>();
ArrayList<V> list = new ArrayList<>();
for (long k : keySet()) {
V value = find(k).value;
if (value != null) {
......@@ -451,7 +459,7 @@ public class CacheLongKeyLIRS<V> {
* @return the map
*/
public Map<Long, V> getMap() {
HashMap<Long, V> map = new HashMap<Long, V>();
HashMap<Long, V> map = new HashMap<>();
for (long k : keySet()) {
V x = find(k).value;
if (x != null) {
......@@ -591,11 +599,11 @@ public class CacheLongKeyLIRS<V> {
mask = len - 1;
// initialize the stack and queue heads
stack = new Entry<V>();
stack = new Entry<>();
stack.stackPrev = stack.stackNext = stack;
queue = new Entry<V>();
queue = new Entry<>();
queue.queuePrev = queue.queueNext = queue;
queue2 = new Entry<V>();
queue2 = new Entry<>();
queue2.queuePrev = queue2.queueNext = queue2;
@SuppressWarnings("unchecked")
......@@ -671,7 +679,7 @@ public class CacheLongKeyLIRS<V> {
}
private static <V> Entry<V> copy(Entry<V> old) {
Entry<V> e = new Entry<V>();
Entry<V> e = new Entry<>();
e.key = old.key;
e.value = old.value;
e.memory = old.memory;
......@@ -805,7 +813,7 @@ public class CacheLongKeyLIRS<V> {
// the new entry is too big to fit
return old;
}
e = new Entry<V>();
e = new Entry<>();
e.key = key;
e.value = value;
e.memory = memory;
......@@ -1033,7 +1041,7 @@ public class CacheLongKeyLIRS<V> {
* @return the key list
*/
synchronized List<Long> keys(boolean cold, boolean nonResident) {
ArrayList<Long> keys = new ArrayList<Long>();
ArrayList<Long> keys = new ArrayList<>();
if (cold) {
Entry<V> start = nonResident ? queue2 : queue;
for (Entry<V> e = start.queueNext; e != start;
......@@ -1068,7 +1076,7 @@ public class CacheLongKeyLIRS<V> {
* @return the set of keys
*/
synchronized Set<Long> keySet() {
HashSet<Long> set = new HashSet<Long>();
HashSet<Long> set = new HashSet<>();
for (Entry<V> e = stack.stackNext; e != stack; e = e.stackNext) {
set.add(e.key);
}
......
......@@ -58,7 +58,7 @@ public class FilePathCache extends FilePathWrapper {
CacheLongKeyLIRS.Config cc = new CacheLongKeyLIRS.Config();
// 1 MB cache size
cc.maxMemory = 1024 * 1024;
cache = new CacheLongKeyLIRS<ByteBuffer>(cc);
cache = new CacheLongKeyLIRS<>(cc);
}
FileCache(FileChannel base) {
......
......@@ -66,13 +66,13 @@ public class MVPrimaryIndex extends BaseIndex {
ValueDataType valueType = new ValueDataType(db.getCompareMode(), db,
sortTypes);
mapName = "table." + getId();
Transaction t = mvTable.getTransaction(null);
Transaction t = mvTable.getTransactionBegin();
dataMap = t.openMap(mapName, keyType, valueType);
t.commit();
if (!table.isPersistData()) {
dataMap.map.setVolatile(true);
}
Value k = dataMap.lastKey();
Value k = dataMap.map.lastKey(); // include uncommitted keys as well
lastKey.set(k == null ? 0 : k.getLong());
}
......@@ -246,7 +246,7 @@ public class MVPrimaryIndex extends BaseIndex {
public void remove(Session session) {
TransactionMap<Value, Value> map = getMap(session);
if (!map.isClosed()) {
Transaction t = mvTable.getTransaction(session);
Transaction t = session.getTransaction();
t.removeMap(map);
}
}
......@@ -374,7 +374,7 @@ public class MVPrimaryIndex extends BaseIndex {
if (session == null) {
return dataMap;
}
Transaction t = mvTable.getTransaction(session);
Transaction t = session.getTransaction();
return dataMap.getInstance(t, Long.MAX_VALUE);
}
......
......@@ -102,7 +102,7 @@ public class MVSpatialIndex extends BaseIndex implements SpatialIndex, MVIndex {
new MVRTreeMap.Builder<VersionedValue>().
valueType(valueType);
spatialMap = db.getMvStore().getStore().openMap(mapName, mapBuilder);
Transaction t = mvTable.getTransaction(null);
Transaction t = mvTable.getTransactionBegin();
dataMap = t.openMap(spatialMap);
t.commit();
}
......@@ -260,7 +260,7 @@ public class MVSpatialIndex extends BaseIndex implements SpatialIndex, MVIndex {
public void remove(Session session) {
TransactionMap<SpatialKey, Value> map = getMap(session);
if (!map.isClosed()) {
Transaction t = mvTable.getTransaction(session);
Transaction t = session.getTransaction();
t.removeMap(map);
}
}
......@@ -330,7 +330,7 @@ public class MVSpatialIndex extends BaseIndex implements SpatialIndex, MVIndex {
if (session == null) {
return dataMap;
}
Transaction t = mvTable.getTransaction(session);
Transaction t = session.getTransaction();
return dataMap.getInstance(t, Long.MAX_VALUE);
}
......
......@@ -66,9 +66,9 @@ public class MVTable extends TableBase {
static {
if (SysProperties.THREAD_DEADLOCK_DETECTOR) {
WAITING_FOR_LOCK = new DebuggingThreadLocal<String>();
EXCLUSIVE_LOCKS = new DebuggingThreadLocal<ArrayList<String>>();
SHARED_LOCKS = new DebuggingThreadLocal<ArrayList<String>>();
WAITING_FOR_LOCK = new DebuggingThreadLocal<>();
EXCLUSIVE_LOCKS = new DebuggingThreadLocal<>();
SHARED_LOCKS = new DebuggingThreadLocal<>();
} else {
WAITING_FOR_LOCK = null;
EXCLUSIVE_LOCKS = null;
......@@ -78,18 +78,18 @@ public class MVTable extends TableBase {
private MVPrimaryIndex primaryIndex;
private final ArrayList<Index> indexes = New.arrayList();
private long lastModificationId;
private volatile long lastModificationId;
private volatile Session lockExclusiveSession;
// using a ConcurrentHashMap as a set
private final ConcurrentHashMap<Session, Session> lockSharedSessions =
new ConcurrentHashMap<Session, Session>();
new ConcurrentHashMap<>();
/**
* The queue of sessions waiting to lock the table. It is a FIFO queue to
* prevent starvation, since Java's synchronized locking is biased.
*/
private final ArrayDeque<Session> waitingSessions = new ArrayDeque<Session>();
private final ArrayDeque<Session> waitingSessions = new ArrayDeque<>();
private final Trace traceLock;
private int changesSinceAnalyze;
private int nextAnalyze;
......@@ -670,7 +670,7 @@ public class MVTable extends TableBase {
@Override
public void removeRow(Session session, Row row) {
lastModificationId = database.getNextModificationDataId();
Transaction t = getTransaction(session);
Transaction t = session.getTransaction();
long savepoint = t.setSavepoint();
try {
for (int i = indexes.size() - 1; i >= 0; i--) {
......@@ -697,7 +697,7 @@ public class MVTable extends TableBase {
@Override
public void addRow(Session session, Row row) {
lastModificationId = database.getNextModificationDataId();
Transaction t = getTransaction(session);
Transaction t = session.getTransaction();
long savepoint = t.setSavepoint();
try {
for (int i = 0, size = indexes.size(); i < size; i++) {
......@@ -727,13 +727,15 @@ public class MVTable extends TableBase {
}
private void analyzeIfRequired(Session session) {
if (nextAnalyze == 0 || nextAnalyze > changesSinceAnalyze++) {
return;
}
changesSinceAnalyze = 0;
int n = 2 * nextAnalyze;
if (n > 0) {
nextAnalyze = n;
synchronized (this) {
if (nextAnalyze == 0 || nextAnalyze > changesSinceAnalyze++) {
return;
}
changesSinceAnalyze = 0;
int n = 2 * nextAnalyze;
if (n > 0) {
nextAnalyze = n;
}
}
session.markTableForAnalyze(this);
}
......@@ -844,17 +846,13 @@ public class MVTable extends TableBase {
}
/**
* Get the transaction to use for this session.
* Get a new transaction.
*
* @param session the session
* @return the transaction
*/
Transaction getTransaction(Session session) {
if (session == null) {
// TODO need to commit/rollback the transaction
return transactionStore.begin();
}
return session.getTransaction();
Transaction getTransactionBegin() {
// TODO need to commit/rollback the transaction
return transactionStore.begin();
}
@Override
......
......@@ -122,7 +122,7 @@ public class MVTableEngine implements TableEngine {
* Key: the map name, value: the table.
*/
final ConcurrentHashMap<String, MVTable> tableMap =
new ConcurrentHashMap<String, MVTable>();
new ConcurrentHashMap<>();
/**
* The store.
......@@ -162,7 +162,7 @@ public class MVTableEngine implements TableEngine {
}
this.transactionStore = new TransactionStore(
store,
new ValueDataType(null, db, null));
new ValueDataType(db.getCompareMode(), db, null));
transactionStore.init();
} catch (IllegalStateException e) {
throw convertIllegalStateException(e);
......@@ -208,7 +208,7 @@ public class MVTableEngine implements TableEngine {
}
public HashMap<String, MVTable> getTables() {
return new HashMap<String, MVTable>(tableMap);
return new HashMap<>(tableMap);
}
/**
......
......@@ -771,7 +771,7 @@ public class TransactionStore {
MVMap<K, VersionedValue> map = store.openMap(name, keyType,
valueType);
int mapId = map.getId();
return new TransactionMap<K, V>(this, map, mapId);
return new TransactionMap<>(this, map, mapId);
}
/**
......@@ -786,7 +786,7 @@ public class TransactionStore {
MVMap<K, VersionedValue> map) {
checkNotClosed();
int mapId = map.getId();
return new TransactionMap<K, V>(this, map, mapId);
return new TransactionMap<>(this, map, mapId);
}
/**
......@@ -925,7 +925,7 @@ public class TransactionStore {
public TransactionMap<K, V> getInstance(Transaction transaction,
long savepoint) {
TransactionMap<K, V> m =
new TransactionMap<K, V>(transaction, map, mapId);
new TransactionMap<>(transaction, map, mapId);
m.setSavepoint(savepoint);
return m;
}
......@@ -1512,7 +1512,7 @@ public class TransactionStore {
if (data != null && data.value != null) {
@SuppressWarnings("unchecked")
final V value = (V) data.value;
current = new DataUtils.MapEntry<K, V>(key, value);
current = new DataUtils.MapEntry<>(key, value);
currentKey = key;
return;
}
......
......@@ -101,7 +101,7 @@ public class ValueDataType implements DataType {
int bl = bx.length;
int len = Math.min(al, bl);
for (int i = 0; i < len; i++) {
int sortType = sortTypes[i];
int sortType = sortTypes == null ? SortOrder.ASCENDING : sortTypes[i];
int comp = compareValues(ax[i], bx[i], sortType);
if (comp != 0) {
return comp;
......@@ -464,6 +464,7 @@ public class ValueDataType implements DataType {
return ValueBoolean.get(false);
case INT_NEG:
return ValueInt.get(-readVarInt(buff));
case Value.ENUM:
case Value.INT:
return ValueInt.get(readVarInt(buff));
case LONG_NEG:
......
......@@ -44,7 +44,7 @@ public class MVRTreeMap<V> extends MVMap<SpatialKey, V> {
* @return the map
*/
public static <V> MVRTreeMap<V> create(int dimensions, DataType valueType) {
return new MVRTreeMap<V>(dimensions, valueType);
return new MVRTreeMap<>(dimensions, valueType);
}
@Override
......@@ -612,7 +612,7 @@ public class MVRTreeMap<V> extends MVMap<SpatialKey, V> {
if (valueType == null) {
valueType = new ObjectDataType();
}
return new MVRTreeMap<V>(dimensions, valueType);
return new MVRTreeMap<>(dimensions, valueType);
}
}
......
......@@ -5,6 +5,7 @@
21S02=Počet sloupců nesouhlasí
22001=Příliš dlouhá hodnota pro sloupec {0}: {1}
22003=Číselná hodnota je mimo rozsah: {0}
22004=Numeric value out of range: {0} in column {1}
22007=Nelze zpracovat konstantu {0} {1}
22012=Dělení nulou: {0}
22018=Chyba při převodu dat {0}
......
......@@ -5,6 +5,7 @@
21S02=Anzahl der Felder stimmt nicht überein
22001=Wert zu gross / lang für Feld {0}: {1}
22003=Zahlenwert ausserhalb des Bereichs: {0}
22004=Numeric value out of range: {0} in column {1}
22007=Kann {0} {1} nicht umwandeln
22012=Division durch 0: {0}
22018=Datenumwandlungsfehler beim Umwandeln von {0}
......
......@@ -5,6 +5,7 @@
21S02=Column count does not match
22001=Value too long for column {0}: {1}
22003=Numeric value out of range: {0}
22004=Numeric value out of range: {0} in column {1}
22007=Cannot parse {0} constant {1}
22012=Division by zero: {0}
22018=Data conversion error converting {0}
......
......@@ -5,6 +5,7 @@
21S02=La cantidad de columnas no coincide
22001=Valor demasiado largo para la columna {0}: {1}
22003=Valor numerico fuera de rango: {0}
22004=Numeric value out of range: {0} in column {1}
22007=Imposible interpretar la constante {0} {1}
22012=División por cero: {0}
22018=Conversión de datos fallida, convirtiendo {0}
......
差异被折叠。
......@@ -5,6 +5,7 @@
21S02=列番号が一致しません
22001=列 {0} の値が長過ぎます: {1}
22003=範囲外の数値です: {0}
22004=Numeric value out of range: {0} in column {1}
22007={0} 定数 {1} を解析できません
22012=ゼロで除算しました: {0}
22018=データ変換中にエラーが発生しました {0}
......
......@@ -5,6 +5,7 @@
21S02=Niezgodna ilość kolumn
22001=Wartość za długa dla kolumny {0}: {1}
22003=Wartość numeryczna poza zakresem: {0}
22004=Numeric value out of range: {0} in column {1}
22007=Nie można odczytać {0} jako {1}
22012=Dzielenie przez zero: {0}
22018=Błąd konwersji danych {0}
......
......@@ -5,6 +5,7 @@
21S02=A quantidade de colunas não corresponde
22001=Valor muito longo para a coluna {0}: {1}
22003=Valor númerico não esta dentro do limite: {0}
22004=Numeric value out of range: {0} in column {1}
22007=Não é possível converter {1} para {0}
22012=Divisão por zero: {0}
22018=Erro na conversão de dado, convertendo {0}
......
......@@ -5,6 +5,7 @@
21S02=Неверное количество столбцов
22001=Значение слишком длинное для поля {0}: {1}
22003=Численное значение вне допустимого диапазона: {0}
22004=Numeric value out of range: {0} in column {1}
22007=Невозможно преобразование строки {1} в тип {0}
22012=Деление на ноль: {0}
22018=Ошибка преобразования данных при конвертации {0}
......
......@@ -5,6 +5,7 @@
21S02=Počet stĺpcov sa nezhoduje
22001=Hodnota je príliš dlhá pre stĺpec {0}: {1}
22003=Číselná hodnota mimo rozsah: {0}
22004=Numeric value out of range: {0} in column {1}
22007=Nemožem rozobrať {0} konštantu {1}
22012=Delenie nulou: {0}
22018=Chyba konverzie dát pre {0}
......
......@@ -5,6 +5,7 @@
21S02=字段数目不匹配
22001=字段 {0}数值太大: {1}
22003=数值超出范围: {0}
22004=Numeric value out of range: {0} in column {1}
22007=不能解析字段 {0} 的数值 :{1}
22012=除数为零: {0}
22018=转换数据{0}期间出现转换错误
......
......@@ -177,7 +177,7 @@ public class CipherFactory {
if (list == null) {
return list;
}
List<String> algorithms = new LinkedList<String>(Arrays.asList(list.split("\\s*,\\s*")));
List<String> algorithms = new LinkedList<>(Arrays.asList(list.split("\\s*,\\s*")));
boolean dhAnonRemoved = algorithms.remove("DH_anon");
boolean ecdhAnonRemoved = algorithms.remove("ECDH_anon");
if (dhAnonRemoved || ecdhAnonRemoved) {
......@@ -387,7 +387,7 @@ public class CipherFactory {
}
private static String[] enableAnonymous(String[] enabled, String[] supported) {
LinkedHashSet<String> set = new LinkedHashSet<String>();
LinkedHashSet<String> set = new LinkedHashSet<>();
for (String x : supported) {
if (!x.startsWith("SSL") &&
x.indexOf("_anon_") >= 0 &&
......@@ -401,7 +401,7 @@ public class CipherFactory {
}
private static String[] disableSSL(String[] enabled) {
HashSet<String> set = new HashSet<String>();
HashSet<String> set = new HashSet<>();
for (String x : enabled) {
if (!x.startsWith("SSL")) {
set.add(x);
......
......@@ -67,9 +67,9 @@ public class PgServerThread implements Runnable {
private String clientEncoding = SysProperties.PG_DEFAULT_CLIENT_ENCODING;
private String dateStyle = "ISO";
private final HashMap<String, Prepared> prepared =
new CaseInsensitiveMap<Prepared>();
new CaseInsensitiveMap<>();
private final HashMap<String, Portal> portals =
new CaseInsensitiveMap<Portal>();
new CaseInsensitiveMap<>();
PgServerThread(Socket socket, PgServer server) {
this.server = server;
......
......@@ -481,7 +481,7 @@ public class WebServer implements Service {
} catch (IOException e) {
DbException.traceThrowable(e);
}
session.put("text", new HashMap<Object, Object>(text));
session.put("text", new HashMap<>(text));
}
ArrayList<HashMap<String, Object>> getSessions() {
......
......@@ -36,7 +36,7 @@ public class WriterThread implements Runnable {
private volatile boolean stop;
private WriterThread(Database database, int writeDelay) {
this.databaseRef = new WeakReference<Database>(database);
this.databaseRef = new WeakReference<>(database);
this.writeDelay = writeDelay;
}
......
......@@ -33,7 +33,7 @@ import org.h2.util.New;
public class FilePathMem extends FilePath {
private static final TreeMap<String, FileMemData> MEMORY_FILES =
new TreeMap<String, FileMemData>();
new TreeMap<>();
private static final FileMemData DIRECTORY = new FileMemData("", false);
@Override
......@@ -430,7 +430,7 @@ class FileMemData {
private static final byte[] COMPRESSED_EMPTY_BLOCK;
private static final Cache<CompressItem, CompressItem> COMPRESS_LATER =
new Cache<CompressItem, CompressItem>(CACHE_SIZE);
new Cache<>(CACHE_SIZE);
private String name;
private final int id;
......@@ -690,7 +690,7 @@ class FileMemData {
if (blocks != data.length) {
AtomicReference<byte[]>[] n = Arrays.copyOf(data, blocks);
for (int i = data.length; i < blocks; i++) {
n[i] = new AtomicReference<byte[]>(COMPRESSED_EMPTY_BLOCK);
n[i] = new AtomicReference<>(COMPRESSED_EMPTY_BLOCK);
}
data = n;
}
......
......@@ -97,7 +97,7 @@ class FileNioMapped extends FileBase {
}
if (useSystemGc) {
WeakReference<MappedByteBuffer> bufferWeakRef =
new WeakReference<MappedByteBuffer>(mapped);
new WeakReference<>(mapped);
mapped = null;
long start = System.nanoTime();
while (bufferWeakRef.get() != null) {
......
......@@ -32,7 +32,7 @@ import org.h2.util.New;
public class FilePathNioMem extends FilePath {
private static final TreeMap<String, FileNioMemData> MEMORY_FILES =
new TreeMap<String, FileNioMemData>();
new TreeMap<>();
/**
* The percentage of uncompressed (cached) entries.
......@@ -444,7 +444,7 @@ class FileNioMemData {
final int nameHashCode;
private final CompressLaterCache<CompressItem, CompressItem> compressLaterCache =
new CompressLaterCache<CompressItem, CompressItem>(CACHE_MIN_SIZE);
new CompressLaterCache<>(CACHE_MIN_SIZE);
private String name;
private final boolean compress;
......@@ -682,7 +682,7 @@ class FileNioMemData {
System.arraycopy(buffers, 0, newBuffers, 0,
Math.min(buffers.length, newBuffers.length));
for (int i = buffers.length; i < blocks; i++) {
newBuffers[i] = new AtomicReference<ByteBuffer>(COMPRESSED_EMPTY_BLOCK);
newBuffers[i] = new AtomicReference<>(COMPRESSED_EMPTY_BLOCK);
}
buffers = newBuffers;
}
......
......@@ -383,7 +383,7 @@ public class Column {
getCreateSQL(), s + " (" + value.getPrecision() + ")");
}
}
if (isEnumerated()) {
if (isEnumerated() && value != ValueNull.INSTANCE) {
if (!ValueEnum.isValid(enumerators, value)) {
String s = value.getTraceSQL();
if (s.length() > 127) {
......
......@@ -72,7 +72,7 @@ public final class JoinBatch {
/**
* An empty future cursor.
*/
static final Future<Cursor> EMPTY_FUTURE_CURSOR = new DoneFuture<Cursor>(EMPTY_CURSOR);
static final Future<Cursor> EMPTY_FUTURE_CURSOR = new DoneFuture<>(EMPTY_CURSOR);
/**
* The top cursor.
......@@ -694,7 +694,7 @@ public final class JoinBatch {
private boolean full;
private final List<Future<Cursor>> result = new SingletonList<Future<Cursor>>();
private final List<Future<Cursor>> result = new SingletonList<>();
FakeLookupBatch(TableFilter filter) {
this.filter = filter;
......@@ -732,7 +732,7 @@ public final class JoinBatch {
return Collections.emptyList();
}
Cursor c = filter.getIndex().find(filter, first, last);
result.set(0, new DoneFuture<Cursor>(c));
result.set(0, new DoneFuture<>(c));
full = false;
first = last = null;
return result;
......
......@@ -60,7 +60,7 @@ public class RegularTable extends TableBase {
* The queue of sessions waiting to lock the table. It is a FIFO queue to
* prevent starvation, since Java's synchronized locking is biased.
*/
private final ArrayDeque<Session> waitingSessions = new ArrayDeque<Session>();
private final ArrayDeque<Session> waitingSessions = new ArrayDeque<>();
private final Trace traceLock;
private final ArrayList<Index> indexes = New.arrayList();
private long lastModificationId;
......
......@@ -27,7 +27,7 @@ public abstract class TableBase extends Table {
*/
private final String tableEngine;
/** Provided table parameters */
private List<String> tableEngineParams = new ArrayList<String>();
private List<String> tableEngineParams = new ArrayList<>();
private final boolean globalTemporary;
......
......@@ -12,7 +12,6 @@ import java.sql.Statement;
import org.h2.api.ErrorCode;
import org.h2.engine.Constants;
import org.h2.store.fs.FileUtils;
import org.h2.util.IOUtils;
import org.h2.util.JdbcUtils;
import org.h2.util.Tool;
......
......@@ -22,7 +22,7 @@ public class AbbaDetector {
private static final ThreadLocal<Deque<Object>> STACK =
new ThreadLocal<Deque<Object>>() {
@Override protected Deque<Object> initialValue() {
return new ArrayDeque<Object>();
return new ArrayDeque<>();
}
};
......@@ -32,9 +32,9 @@ public class AbbaDetector {
* (stack trace where locked) )
*/
private static final Map<Object, Map<Object, Exception>> LOCK_ORDERING =
new WeakHashMap<Object, Map<Object, Exception>>();
new WeakHashMap<>();
private static final Set<String> KNOWN_DEADLOCKS = new HashSet<String>();
private static final Set<String> KNOWN_DEADLOCKS = new HashSet<>();
/**
* This method is called just before or just after an object is
......@@ -91,7 +91,7 @@ public class AbbaDetector {
Object test = getTest(o);
Map<Object, Exception> map = LOCK_ORDERING.get(test);
if (map == null) {
map = new WeakHashMap<Object, Exception>();
map = new WeakHashMap<>();
LOCK_ORDERING.put(test, map);
}
Exception oldException = null;
......
......@@ -35,8 +35,8 @@ public class AbbaLockingDetector implements Runnable {
* (stack trace where locked) )
*/
private final Map<String, Map<String, String>> lockOrdering =
new WeakHashMap<String, Map<String, String>>();
private final Set<String> knownDeadlocks = new HashSet<String>();
new WeakHashMap<>();
private final Set<String> knownDeadlocks = new HashSet<>();
/**
* Start collecting locking data.
......@@ -105,7 +105,7 @@ public class AbbaLockingDetector implements Runnable {
}
private void processThreadList(ThreadInfo[] threadInfoList) {
final List<String> lockOrder = new ArrayList<String>();
final List<String> lockOrder = new ArrayList<>();
for (ThreadInfo threadInfo : threadInfoList) {
lockOrder.clear();
generateOrdering(lockOrder, threadInfo);
......@@ -147,7 +147,7 @@ public class AbbaLockingDetector implements Runnable {
String topLock = lockOrder.get(lockOrder.size() - 1);
Map<String, String> map = lockOrdering.get(topLock);
if (map == null) {
map = new WeakHashMap<String, String>();
map = new WeakHashMap<>();
lockOrdering.put(topLock, map);
}
String oldException = null;
......
......@@ -68,7 +68,7 @@ public class CacheLRU implements Cache {
int cacheSize) {
Map<Integer, CacheObject> secondLevel = null;
if (cacheType.startsWith("SOFT_")) {
secondLevel = new SoftHashMap<Integer, CacheObject>();
secondLevel = new SoftHashMap<>();
cacheType = cacheType.substring("SOFT_".length());
}
Cache cache;
......
......@@ -24,7 +24,7 @@ public class CloseWatcher extends PhantomReference<Object> {
/**
* The queue (might be set to null at any time).
*/
private static ReferenceQueue<Object> queue = new ReferenceQueue<Object>();
private static ReferenceQueue<Object> queue = new ReferenceQueue<>();
/**
* The reference set. Must keep it, otherwise the references are garbage
......@@ -92,7 +92,7 @@ public class CloseWatcher extends PhantomReference<Object> {
boolean stackTrace) {
ReferenceQueue<Object> q = queue;
if (q == null) {
q = new ReferenceQueue<Object>();
q = new ReferenceQueue<>();
queue = q;
}
CloseWatcher cw = new CloseWatcher(o, q, closeable);
......
......@@ -57,13 +57,13 @@ public class DateTimeUtils {
* have that problem, and while it is still a small memory leak, it is not a
* class loader memory leak.
*/
private static final ThreadLocal<Calendar> CACHED_CALENDAR = new ThreadLocal<Calendar>();
private static final ThreadLocal<Calendar> CACHED_CALENDAR = new ThreadLocal<>();
/**
* A cached instance of Calendar used when a timezone is specified.
*/
private static final ThreadLocal<Calendar> CACHED_CALENDAR_NON_DEFAULT_TIMEZONE =
new ThreadLocal<Calendar>();
new ThreadLocal<>();
/**
* Observed JVM behaviour is that if the timezone of the host computer is
......
......@@ -16,7 +16,7 @@ import java.util.concurrent.ConcurrentHashMap;
*/
public class DebuggingThreadLocal<T> {
private final ConcurrentHashMap<Long, T> map = new ConcurrentHashMap<Long, T>();
private final ConcurrentHashMap<Long, T> map = new ConcurrentHashMap<>();
public void set(T value) {
map.put(Thread.currentThread().getId(), value);
......@@ -39,7 +39,7 @@ public class DebuggingThreadLocal<T> {
* @return a HashMap containing a mapping from thread-id to value
*/
public HashMap<Long, T> getSnapshotOfAllThreads() {
return new HashMap<Long, T>(map);
return new HashMap<>(map);
}
}
......@@ -79,7 +79,7 @@ public class JdbcUtils {
* In order to manage more than one class loader
*/
private static ArrayList<ClassFactory> userClassFactories =
new ArrayList<ClassFactory>();
new ArrayList<>();
private static String[] allowedClassNamePrefixes;
......@@ -109,7 +109,7 @@ public class JdbcUtils {
if (userClassFactories == null) {
// initially, it is empty
// but Apache Tomcat may clear the fields as well
userClassFactories = new ArrayList<ClassFactory>();
userClassFactories = new ArrayList<>();
}
return userClassFactories;
}
......
......@@ -23,7 +23,7 @@ public class New {
* @return the object
*/
public static <T> ArrayList<T> arrayList() {
return new ArrayList<T>(4);
return new ArrayList<>(4);
}
/**
......@@ -34,7 +34,7 @@ public class New {
* @return the object
*/
public static <K, V> HashMap<K, V> hashMap() {
return new HashMap<K, V>();
return new HashMap<>();
}
/**
......@@ -46,7 +46,7 @@ public class New {
* @return the object
*/
public static <K, V> HashMap<K, V> hashMap(int initialCapacity) {
return new HashMap<K, V>(initialCapacity);
return new HashMap<>(initialCapacity);
}
/**
......@@ -56,7 +56,7 @@ public class New {
* @return the object
*/
public static <T> HashSet<T> hashSet() {
return new HashSet<T>();
return new HashSet<>();
}
/**
......@@ -67,7 +67,7 @@ public class New {
* @return the object
*/
public static <T> ArrayList<T> arrayList(Collection<T> c) {
return new ArrayList<T>(c);
return new ArrayList<>(c);
}
/**
......@@ -78,7 +78,7 @@ public class New {
* @return the object
*/
public static <T> ArrayList<T> arrayList(int initialCapacity) {
return new ArrayList<T>(initialCapacity);
return new ArrayList<>(initialCapacity);
}
}
......@@ -64,7 +64,7 @@ public class Permutations<T> {
* @return the generated permutations object
*/
public static <T> Permutations<T> create(T[] in, T[] out) {
return new Permutations<T>(in, out, in.length);
return new Permutations<>(in, out, in.length);
}
/**
......@@ -77,7 +77,7 @@ public class Permutations<T> {
* @return the generated permutations object
*/
public static <T> Permutations<T> create(T[] in, T[] out, int m) {
return new Permutations<T>(in, out, m);
return new Permutations<>(in, out, m);
}
/**
......
......@@ -81,14 +81,14 @@ public class Profiler implements Runnable {
private volatile boolean stop;
private final HashMap<String, Integer> counts =
new HashMap<String, Integer>();
new HashMap<>();
/**
* The summary (usually one entry per package, unless sumClasses is enabled,
* in which case it's one entry per class).
*/
private final HashMap<String, Integer> summary =
new HashMap<String, Integer>();
new HashMap<>();
private int minCount = 1;
private int total;
private Thread thread;
......@@ -193,7 +193,7 @@ public class Profiler implements Runnable {
}
private static List<Object[]> getRunnableStackTraces() {
ArrayList<Object[]> list = new ArrayList<Object[]>();
ArrayList<Object[]> list = new ArrayList<>();
Map<Thread, StackTraceElement[]> map = Thread.getAllStackTraces();
for (Map.Entry<Thread, StackTraceElement[]> entry : map.entrySet()) {
Thread t = entry.getKey();
......@@ -222,7 +222,7 @@ public class Profiler implements Runnable {
private static List<Object[]> readStackTrace(LineNumberReader r)
throws IOException {
ArrayList<Object[]> list = new ArrayList<Object[]>();
ArrayList<Object[]> list = new ArrayList<>();
while (true) {
String line = r.readLine();
if (line == null) {
......@@ -240,7 +240,7 @@ public class Profiler implements Runnable {
if (!line.startsWith("java.lang.Thread.State: RUNNABLE")) {
continue;
}
ArrayList<String> stack = new ArrayList<String>();
ArrayList<String> stack = new ArrayList<>();
while (true) {
line = r.readLine();
if (line == null) {
......@@ -472,10 +472,10 @@ public class Profiler implements Runnable {
if (counts.size() == 0) {
buff.append("(none)").append(LINE_SEPARATOR);
}
HashMap<String, Integer> copy = new HashMap<String, Integer>(counts);
HashMap<String, Integer> copy = new HashMap<>(counts);
appendTop(buff, copy, count, total, false);
buff.append("summary:").append(LINE_SEPARATOR);
copy = new HashMap<String, Integer>(summary);
copy = new HashMap<>(summary);
appendTop(buff, copy, count, total, true);
buff.append('.');
return buff.toString();
......
......@@ -33,7 +33,7 @@ public class SmallLRUCache<K, V> extends LinkedHashMap<K, V> {
* @return the object
*/
public static <K, V> SmallLRUCache<K, V> newInstance(int size) {
return new SmallLRUCache<K, V>(size);
return new SmallLRUCache<>(size);
}
public void setMaxSize(int size) {
......
......@@ -23,7 +23,7 @@ import java.util.Set;
public class SoftHashMap<K, V> extends AbstractMap<K, V> {
private final Map<K, SoftValue<V>> map;
private final ReferenceQueue<V> queue = new ReferenceQueue<V>();
private final ReferenceQueue<V> queue = new ReferenceQueue<>();
public SoftHashMap() {
map = New.hashMap();
......@@ -63,7 +63,7 @@ public class SoftHashMap<K, V> extends AbstractMap<K, V> {
@Override
public V put(K key, V value) {
processQueue();
SoftValue<V> old = map.put(key, new SoftValue<V>(value, queue, key));
SoftValue<V> old = map.put(key, new SoftValue<>(value, queue, key));
return old == null ? null : old.get();
}
......
......@@ -33,7 +33,7 @@ public class SortedProperties extends Properties {
@Override
public synchronized Enumeration<Object> keys() {
Vector<String> v = new Vector<String>();
Vector<String> v = new Vector<>();
for (Object o : keySet()) {
v.add(o.toString());
}
......@@ -132,7 +132,7 @@ public class SortedProperties extends Properties {
*/
public synchronized String toLines() {
StringBuilder buff = new StringBuilder();
for (Entry<Object, Object> e : new TreeMap<Object, Object>(this).entrySet()) {
for (Entry<Object, Object> e : new TreeMap<>(this).entrySet()) {
buff.append(e.getKey()).append('=').append(e.getValue()).append('\n');
}
return buff.toString();
......
......@@ -280,7 +280,7 @@ public class SourceCompiler {
JavaFileManager fileManager = new
ClassFileManager(JAVA_COMPILER
.getStandardFileManager(null, null, null));
ArrayList<JavaFileObject> compilationUnits = new ArrayList<JavaFileObject>();
ArrayList<JavaFileObject> compilationUnits = new ArrayList<>();
compilationUnits.add(new StringJavaFileObject(fullClassName, source));
// cannot concurrently compile
synchronized (JAVA_COMPILER) {
......
......@@ -22,7 +22,7 @@ import org.h2.message.DbException;
public class StringUtils {
private static SoftReference<String[]> softCache =
new SoftReference<String[]>(null);
new SoftReference<>(null);
private static long softCacheCreatedNs;
private static final char[] HEX = "0123456789abcdef".toCharArray();
......@@ -69,7 +69,7 @@ public class StringUtils {
}
try {
cache = new String[SysProperties.OBJECT_CACHE_SIZE];
softCache = new SoftReference<String[]>(cache);
softCache = new SoftReference<>(cache);
return cache;
} finally {
softCacheCreatedNs = System.nanoTime();
......@@ -915,7 +915,7 @@ public class StringUtils {
* Clear the cache. This method is used for testing.
*/
public static void clearCache() {
softCache = new SoftReference<String[]>(null);
softCache = new SoftReference<>(null);
}
/**
......
......@@ -20,7 +20,7 @@ public class SynchronizedVerifier {
private static final Map<Class<?>, AtomicBoolean> DETECT =
Collections.synchronizedMap(new HashMap<Class<?>, AtomicBoolean>());
private static final Map<Object, Object> CURRENT =
Collections.synchronizedMap(new IdentityHashMap<Object, Object>());
Collections.synchronizedMap(new IdentityHashMap<>());
/**
* Enable or disable detection for a given class.
......
......@@ -19,7 +19,7 @@ import org.h2.store.fs.FileUtils;
*/
public class TempFileDeleter {
private final ReferenceQueue<Object> queue = new ReferenceQueue<Object>();
private final ReferenceQueue<Object> queue = new ReferenceQueue<>();
private final HashMap<PhantomReference<?>, String> refMap = New.hashMap();
private TempFileDeleter() {
......@@ -40,7 +40,7 @@ public class TempFileDeleter {
*/
public synchronized Reference<?> addFile(String fileName, Object file) {
IOUtils.trace("TempFileDeleter.addFile", fileName, file);
PhantomReference<?> ref = new PhantomReference<Object>(file, queue);
PhantomReference<?> ref = new PhantomReference<>(file, queue);
refMap.put(ref, fileName);
deleteUnused();
return ref;
......
......@@ -613,10 +613,10 @@ class ToDateTokenizer {
J(PARSLET_DAY);
private static final List<FormatTokenEnum> EMPTY_LIST =
new ArrayList<FormatTokenEnum>(0);
new ArrayList<>(0);
private static final Map<Character, List<FormatTokenEnum>> CACHE =
new HashMap<Character, List<FormatTokenEnum>>(FormatTokenEnum.values().length);
new HashMap<>(FormatTokenEnum.values().length);
private final ToDateParslet toDateParslet;
private final Pattern patternToUse;
......@@ -668,7 +668,7 @@ class ToDateTokenizer {
if (CACHE.size() <= 0) {
for (FormatTokenEnum token : FormatTokenEnum.values()) {
List<Character> tokenKeys = new ArrayList<Character>();
List<Character> tokenKeys = new ArrayList<>();
if (token.name().contains("_")) {
String[] tokens = token.name().split("_");
......@@ -682,7 +682,7 @@ class ToDateTokenizer {
for (Character tokenKey : tokenKeys) {
List<FormatTokenEnum> l = CACHE.get(tokenKey);
if (l == null) {
l = new ArrayList<FormatTokenEnum>(1);
l = new ArrayList<>(1);
CACHE.put(tokenKey, l);
}
l.add(token);
......
......@@ -8,6 +8,7 @@ package org.h2.util;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.lang.reflect.Constructor;
......@@ -16,6 +17,7 @@ import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
......@@ -74,7 +76,7 @@ public class Utils {
buff[pos++] = (byte) (x >> 24);
buff[pos++] = (byte) (x >> 16);
buff[pos++] = (byte) (x >> 8);
buff[pos++] = (byte) x;
buff[pos] = (byte) x;
}
/**
......@@ -314,6 +316,17 @@ public class Utils {
return max / 1024;
}
public static long getGarbageCollectionTime() {
long totalGCTime = 0;
for (GarbageCollectorMXBean gcMXBean : ManagementFactory.getGarbageCollectorMXBeans()) {
long collectionTime = gcMXBean.getCollectionTime();
if(collectionTime > 0) {
totalGCTime += collectionTime;
}
}
return totalGCTime;
}
private static synchronized void collectGarbage() {
Runtime runtime = Runtime.getRuntime();
long total = runtime.totalMemory();
......@@ -710,7 +723,7 @@ public class Utils {
String s = getProperty(key, null);
if (s != null) {
try {
return Integer.decode(s).intValue();
return Integer.decode(s);
} catch (NumberFormatException e) {
// ignore
}
......@@ -738,6 +751,20 @@ public class Utils {
return defaultValue;
}
public static int getConfigParam(Map<String,?> config, String key, int defaultValue) {
Object o = config.get(key);
if (o instanceof Number) {
return ((Number) o).intValue();
} else if (o != null) {
try {
return Integer.decode(o.toString());
} catch (NumberFormatException e) {
// ignore
}
}
return defaultValue;
}
/**
* Scale the value with the available memory. If 1 GB of RAM is available,
* the value is returned, if 2 GB are available, then twice the value, and
......
......@@ -26,7 +26,7 @@ public class ValueHashMap<V> extends HashBase {
* @return the object
*/
public static <T> ValueHashMap<T> newInstance() {
return new ValueHashMap<T>();
return new ValueHashMap<>();
}
@Override
......
......@@ -727,6 +727,7 @@ public class DataType {
// "java.lang.Short";
return Short.class.getName();
case Value.INT:
case Value.ENUM:
// "java.lang.Integer";
return Integer.class.getName();
case Value.LONG:
......
......@@ -181,7 +181,7 @@ public abstract class Value {
public static final int TYPE_COUNT = ENUM;
private static SoftReference<Value[]> softCache =
new SoftReference<Value[]>(null);
new SoftReference<>(null);
private static final BigDecimal MAX_LONG_DECIMAL =
BigDecimal.valueOf(Long.MAX_VALUE);
private static final BigDecimal MIN_LONG_DECIMAL =
......@@ -382,12 +382,12 @@ public abstract class Value {
if (SysProperties.OBJECT_CACHE) {
int hash = v.hashCode();
if (softCache == null) {
softCache = new SoftReference<Value[]>(null);
softCache = new SoftReference<>(null);
}
Value[] cache = softCache.get();
if (cache == null) {
cache = new Value[SysProperties.OBJECT_CACHE_SIZE];
softCache = new SoftReference<Value[]>(cache);
softCache = new SoftReference<>(cache);
}
int index = hash & (SysProperties.OBJECT_CACHE_SIZE - 1);
Value cached = cache[index];
......@@ -608,18 +608,18 @@ public abstract class Value {
case BOOLEAN:
return ValueByte.get(getBoolean().booleanValue() ? (byte) 1 : (byte) 0);
case SHORT:
return ValueByte.get(convertToByte(getShort()));
return ValueByte.get(convertToByte(getShort(), column));
case ENUM:
case INT:
return ValueByte.get(convertToByte(getInt()));
return ValueByte.get(convertToByte(getInt(), column));
case LONG:
return ValueByte.get(convertToByte(getLong()));
return ValueByte.get(convertToByte(getLong(), column));
case DECIMAL:
return ValueByte.get(convertToByte(convertToLong(getBigDecimal())));
return ValueByte.get(convertToByte(convertToLong(getBigDecimal(), column), column));
case DOUBLE:
return ValueByte.get(convertToByte(convertToLong(getDouble())));
return ValueByte.get(convertToByte(convertToLong(getDouble(), column), column));
case FLOAT:
return ValueByte.get(convertToByte(convertToLong(getFloat())));
return ValueByte.get(convertToByte(convertToLong(getFloat(), column), column));
case BYTES:
return ValueByte.get((byte) Integer.parseInt(getString(), 16));
case TIMESTAMP_TZ:
......@@ -636,15 +636,15 @@ public abstract class Value {
return ValueShort.get(getByte());
case ENUM:
case INT:
return ValueShort.get(convertToShort(getInt()));
return ValueShort.get(convertToShort(getInt(), column));
case LONG:
return ValueShort.get(convertToShort(getLong()));
return ValueShort.get(convertToShort(getLong(), column));
case DECIMAL:
return ValueShort.get(convertToShort(convertToLong(getBigDecimal())));
return ValueShort.get(convertToShort(convertToLong(getBigDecimal(), column), column));
case DOUBLE:
return ValueShort.get(convertToShort(convertToLong(getDouble())));
return ValueShort.get(convertToShort(convertToLong(getDouble(), column), column));
case FLOAT:
return ValueShort.get(convertToShort(convertToLong(getFloat())));
return ValueShort.get(convertToShort(convertToLong(getFloat(), column), column));
case BYTES:
return ValueShort.get((short) Integer.parseInt(getString(), 16));
case TIMESTAMP_TZ:
......@@ -664,13 +664,13 @@ public abstract class Value {
case SHORT:
return ValueInt.get(getShort());
case LONG:
return ValueInt.get(convertToInt(getLong()));
return ValueInt.get(convertToInt(getLong(), column));
case DECIMAL:
return ValueInt.get(convertToInt(convertToLong(getBigDecimal())));
return ValueInt.get(convertToInt(convertToLong(getBigDecimal(), column), column));
case DOUBLE:
return ValueInt.get(convertToInt(convertToLong(getDouble())));
return ValueInt.get(convertToInt(convertToLong(getDouble(), column), column));
case FLOAT:
return ValueInt.get(convertToInt(convertToLong(getFloat())));
return ValueInt.get(convertToInt(convertToLong(getFloat(), column), column));
case BYTES:
return ValueInt.get((int) Long.parseLong(getString(), 16));
case TIMESTAMP_TZ:
......@@ -691,11 +691,11 @@ public abstract class Value {
case INT:
return ValueLong.get(getInt());
case DECIMAL:
return ValueLong.get(convertToLong(getBigDecimal()));
return ValueLong.get(convertToLong(getBigDecimal(), column));
case DOUBLE:
return ValueLong.get(convertToLong(getDouble()));
return ValueLong.get(convertToLong(getDouble(), column));
case FLOAT:
return ValueLong.get(convertToLong(getFloat()));
return ValueLong.get(convertToLong(getFloat(), column));
case BYTES: {
// parseLong doesn't work for ffffffffffffffff
byte[] d = getBytes();
......@@ -1131,49 +1131,53 @@ public abstract class Value {
return this;
}
private static byte convertToByte(long x) {
private static byte convertToByte(long x, Column col) {
if (x > Byte.MAX_VALUE || x < Byte.MIN_VALUE) {
throw DbException.get(
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_1, Long.toString(x));
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_2, Long.toString(x), getColumnName(col));
}
return (byte) x;
}
private static short convertToShort(long x) {
private static short convertToShort(long x, Column col) {
if (x > Short.MAX_VALUE || x < Short.MIN_VALUE) {
throw DbException.get(
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_1, Long.toString(x));
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_2, Long.toString(x), getColumnName(col));
}
return (short) x;
}
private static int convertToInt(long x) {
private static int convertToInt(long x, Column col) {
if (x > Integer.MAX_VALUE || x < Integer.MIN_VALUE) {
throw DbException.get(
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_1, Long.toString(x));
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_2, Long.toString(x), getColumnName(col));
}
return (int) x;
}
private static long convertToLong(double x) {
private static long convertToLong(double x, Column col) {
if (x > Long.MAX_VALUE || x < Long.MIN_VALUE) {
// TODO document that +Infinity, -Infinity throw an exception and
// NaN returns 0
throw DbException.get(
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_1, Double.toString(x));
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_2, Double.toString(x), getColumnName(col));
}
return Math.round(x);
}
private static long convertToLong(BigDecimal x) {
private static long convertToLong(BigDecimal x, Column col) {
if (x.compareTo(MAX_LONG_DECIMAL) > 0 ||
x.compareTo(Value.MIN_LONG_DECIMAL) < 0) {
throw DbException.get(
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_1, x.toString());
ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE_2, x.toString(), getColumnName(col));
}
return x.setScale(0, BigDecimal.ROUND_HALF_UP).longValue();
}
private static String getColumnName(Column col) {
return col == null ? "" : col.getName();
}
/**
* Copy a large value, to be used in the given table. For values that are
* kept fully in memory this method has no effect.
......
......@@ -62,8 +62,6 @@ import org.h2.test.db.TestRights;
import org.h2.test.db.TestRowFactory;
import org.h2.test.db.TestRunscript;
import org.h2.test.db.TestSQLInjection;
import org.h2.test.db.TestScript;
import org.h2.test.db.TestScriptSimple;
import org.h2.test.db.TestSelectCountNonNullColumn;
import org.h2.test.db.TestSequence;
import org.h2.test.db.TestSessionsLocks;
......@@ -124,6 +122,8 @@ import org.h2.test.mvcc.TestMvccMultiThreaded2;
import org.h2.test.poweroff.TestReorderWrites;
import org.h2.test.recover.RecoverLobTest;
import org.h2.test.rowlock.TestRowLocks;
import org.h2.test.scripts.TestScript;
import org.h2.test.scripts.TestScriptSimple;
import org.h2.test.server.TestAutoServer;
import org.h2.test.server.TestInit;
import org.h2.test.server.TestNestedLoop;
......@@ -365,6 +365,11 @@ java org.h2.test.TestAll timer
*/
public boolean travis;
/**
* the vmlens.com race condition tool
*/
public boolean vmlens;
/**
* The lock timeout to use
*/
......@@ -506,6 +511,9 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
if ("travis".equals(args[0])) {
test.travis = true;
test.testAll();
} else if ("vmlens".equals(args[0])) {
test.vmlens = true;
test.testAll();
} else if ("reopen".equals(args[0])) {
System.setProperty("h2.delayWrongPasswordMin", "0");
System.setProperty("h2.check2", "false");
......@@ -554,7 +562,7 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
private void testAll() throws Exception {
runTests();
if (!travis) {
if (!travis && !vmlens) {
Profiler prof = new Profiler();
prof.depth = 16;
prof.interval = 1;
......@@ -614,6 +622,9 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
memory = true;
multiThreaded = true;
test();
if (vmlens) {
return;
}
testUnit();
// lazy
......@@ -681,7 +692,6 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
cipher = null;
test();
}
}
/**
......@@ -722,6 +732,9 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
addTest(new TestCompatibilityOracle());
addTest(new TestCsv());
addTest(new TestDeadlock());
if (vmlens) {
return;
}
addTest(new TestDrop());
addTest(new TestDuplicateKeyUpdate());
addTest(new TestEncryptedDb());
......
......@@ -67,11 +67,6 @@ public abstract class TestBase {
*/
private static String baseDir = getTestDir("");
/**
* The last time something was printed.
*/
private static long lastPrint;
/**
* The test configuration.
*/
......@@ -82,7 +77,7 @@ public abstract class TestBase {
*/
protected long start;
private final LinkedList<byte[]> memory = new LinkedList<byte[]>();
private final LinkedList<byte[]> memory = new LinkedList<>();
/**
* Get the test directory for this test.
......@@ -388,7 +383,6 @@ public abstract class TestBase {
*/
public void trace(String s) {
if (config.traceTest) {
lastPrint = 0;
println(s);
}
}
......@@ -459,7 +453,6 @@ public abstract class TestBase {
* @throws AssertionError always throws an AssertionError
*/
protected void fail(String string) {
lastPrint = 0;
if (string.length() > 100) {
// avoid long strings with special characters, because they are slow
// to display in Eclipse
......@@ -529,11 +522,8 @@ public abstract class TestBase {
*/
public void println(String s) {
long now = System.nanoTime();
if (now > lastPrint + TimeUnit.SECONDS.toNanos(1)) {
lastPrint = now;
long time = TimeUnit.NANOSECONDS.toMillis(now - start);
printlnWithTime(time, getClass().getName() + " " + s);
}
long time = TimeUnit.NANOSECONDS.toMillis(now - start);
printlnWithTime(time, getClass().getName() + " " + s);
}
/**
......@@ -1381,8 +1371,8 @@ public abstract class TestBase {
}
ResultSet rs1 = stat1.executeQuery("SCRIPT simple NOPASSWORDS");
ResultSet rs2 = stat2.executeQuery("SCRIPT simple NOPASSWORDS");
ArrayList<String> list1 = new ArrayList<String>();
ArrayList<String> list2 = new ArrayList<String>();
ArrayList<String> list1 = new ArrayList<>();
ArrayList<String> list2 = new ArrayList<>();
while (rs1.next()) {
String s1 = rs1.getString(1);
s1 = removeRowCount(s1);
......
......@@ -42,7 +42,7 @@ public class TestAnnotationProcessor extends AbstractProcessor {
if (messagesStr == null || messagesStr.isEmpty()) {
return Collections.emptyList();
}
List<OutputMessage> outputMessages = new ArrayList<OutputMessage>();
List<OutputMessage> outputMessages = new ArrayList<>();
for (String msg : messagesStr.split("\\|")) {
String[] split = msg.split(",");
......
......@@ -65,7 +65,7 @@ public class BenchB implements Bench, Runnable {
@Override
public void init(Database db, int size) throws SQLException {
this.database = db;
this.transactionPerClient = size / 8;
this.transactionPerClient = getTransactionsPerClient(size);
db.start(this, "Init");
db.openConnection();
......@@ -133,6 +133,10 @@ public class BenchB implements Bench, Runnable {
// db.end();
}
protected int getTransactionsPerClient(int size) {
return size / 8;
}
@Override
public void run() {
int accountsPerBranch = ACCOUNTS / BRANCHES;
......
......@@ -26,7 +26,7 @@ public class BenchCThread {
private final int warehouseId;
private final int terminalId;
private final HashMap<String, PreparedStatement> prepared =
new HashMap<String, PreparedStatement>();
new HashMap<>();
private final BenchCRandom random;
private final BenchC bench;
......
......@@ -23,6 +23,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.h2.test.TestBase;
import org.h2.tools.Server;
import org.h2.util.StringUtils;
import org.h2.util.Utils;
/**
* Represents a database in the benchmark test application.
......@@ -34,15 +35,17 @@ class Database {
private DatabaseTest test;
private int id;
private String name, url, user, password;
private final ArrayList<String[]> replace = new ArrayList<String[]>();
private final ArrayList<String[]> replace = new ArrayList<>();
private String currentAction;
private long startTimeNs;
private long initialGCTime;
private Connection conn;
private Statement stat;
private long lastTrace;
private final Random random = new Random(1);
private final ArrayList<Object[]> results = new ArrayList<Object[]>();
private final ArrayList<Object[]> results = new ArrayList<>();
private int totalTime;
private int totalGCTime;
private final AtomicInteger executedStatements = new AtomicInteger(0);
private int threadCount;
......@@ -68,6 +71,15 @@ class Database {
return totalTime;
}
/**
* Get the total measured GC time.
*
* @return the time in milliseconds
*/
int getTotalGCTime() {
return totalGCTime;
}
/**
* Get the result array.
*
......@@ -272,6 +284,7 @@ class Database {
void start(Bench bench, String action) {
this.currentAction = bench.getName() + ": " + action;
this.startTimeNs = System.nanoTime();
this.initialGCTime = Utils.getGarbageCollectionTime();
}
/**
......@@ -280,9 +293,11 @@ class Database {
*/
void end() {
long time = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNs);
long gcCollectionTime = Utils.getGarbageCollectionTime() - initialGCTime;
log(currentAction, "ms", (int) time);
if (test.isCollect()) {
totalTime += time;
totalGCTime += gcCollectionTime;
}
}
......
......@@ -93,7 +93,7 @@ public class TestPerformance implements Database.DatabaseTest {
size = Integer.parseInt(args[++i]);
}
}
ArrayList<Database> dbs = new ArrayList<Database>();
ArrayList<Database> dbs = new ArrayList<>();
for (int i = 0; i < 100; i++) {
if (dbId != -1 && i != dbId) {
continue;
......@@ -107,7 +107,7 @@ public class TestPerformance implements Database.DatabaseTest {
}
}
}
ArrayList<Bench> tests = new ArrayList<Bench>();
ArrayList<Bench> tests = new ArrayList<>();
for (int i = 0; i < 100; i++) {
String testString = prop.getProperty("test" + i);
if (testString != null) {
......@@ -149,17 +149,17 @@ public class TestPerformance implements Database.DatabaseTest {
writer = new PrintWriter(new FileWriter(out));
ResultSet rs = stat.executeQuery(
"CALL '<table><tr><th>Test Case</th><th>Unit</th>' " +
"|| SELECT GROUP_CONCAT('<th>' || DB || '</th>' " +
"|| (SELECT GROUP_CONCAT('<th>' || DB || '</th>' " +
"ORDER BY DBID SEPARATOR '') FROM " +
"(SELECT DISTINCT DBID, DB FROM RESULTS)" +
"(SELECT DISTINCT DBID, DB FROM RESULTS))" +
"|| '</tr>' || CHAR(10) " +
"|| SELECT GROUP_CONCAT('<tr><td>' || TEST || " +
"|| (SELECT GROUP_CONCAT('<tr><td>' || TEST || " +
"'</td><td>' || UNIT || '</td>' || ( " +
"SELECT GROUP_CONCAT('<td>' || RESULT || '</td>' " +
"ORDER BY DBID SEPARATOR '') FROM RESULTS R2 WHERE " +
"R2.TESTID = R1.TESTID) || '</tr>' " +
"ORDER BY TESTID SEPARATOR CHAR(10)) FROM " +
"(SELECT DISTINCT TESTID, TEST, UNIT FROM RESULTS) R1" +
"(SELECT DISTINCT TESTID, TEST, UNIT FROM RESULTS) R1)" +
"|| '</table>'"
);
rs.next();
......@@ -243,6 +243,7 @@ public class TestPerformance implements Database.DatabaseTest {
int statPerSec = (int) (db.getExecutedStatements() * 1000L / db.getTotalTime());
db.log("Statements per second", "#", statPerSec);
System.out.println("Statements per second: " + statPerSec);
System.out.println("GC overhead: " + (100 * db.getTotalGCTime() / db.getTotalTime()) + "%");
collect = false;
db.stopServer();
}
......
......@@ -59,7 +59,7 @@ public class TestScalability implements Database.DatabaseTest {
stat.execute(
"CREATE TABLE IF NOT EXISTS RESULTS(TESTID INT, " +
"TEST VARCHAR, UNIT VARCHAR, DBID INT, " +
"DB VARCHAR, RESULT VARCHAR)");
"DB VARCHAR, TCNT INT, RESULT VARCHAR)");
} finally {
JdbcUtils.closeSilently(stat);
JdbcUtils.closeSilently(conn);
......@@ -67,34 +67,40 @@ public class TestScalability implements Database.DatabaseTest {
}
private void test() throws Exception {
final boolean exit = false;
FileUtils.deleteRecursive("data", true);
final String out = "benchmark.html";
final int size = 400;
ArrayList<Database> dbs = new ArrayList<Database>();
ArrayList<Database> dbs = new ArrayList<>();
int id = 1;
final String h2Url = "jdbc:h2:./data/test;" +
"LOCK_TIMEOUT=10000;LOCK_MODE=3";
"LOCK_TIMEOUT=10000;MV_STORE=FALSE;LOCK_MODE=3";
dbs.add(createDbEntry(id++, "H2", 1, h2Url));
dbs.add(createDbEntry(id++, "H2", 10, h2Url));
dbs.add(createDbEntry(id++, "H2", 20, h2Url));
dbs.add(createDbEntry(id++, "H2", 30, h2Url));
dbs.add(createDbEntry(id++, "H2", 40, h2Url));
dbs.add(createDbEntry(id++, "H2", 50, h2Url));
dbs.add(createDbEntry(id++, "H2", 100, h2Url));
dbs.add(createDbEntry(id++, "H2", 2, h2Url));
dbs.add(createDbEntry(id++, "H2", 4, h2Url));
dbs.add(createDbEntry(id++, "H2", 8, h2Url));
dbs.add(createDbEntry(id++, "H2", 16, h2Url));
dbs.add(createDbEntry(id++, "H2", 32, h2Url));
dbs.add(createDbEntry(id++, "H2", 64, h2Url));
final String mvUrl = "jdbc:h2:./data/mvTest;" +
"LOCK_TIMEOUT=10000;MV_STORE=TRUE";
"LOCK_TIMEOUT=10000;MULTI_THREADED=1";
dbs.add(createDbEntry(id++, "MV", 1, mvUrl));
dbs.add(createDbEntry(id++, "MV", 10, mvUrl));
dbs.add(createDbEntry(id++, "MV", 20, mvUrl));
dbs.add(createDbEntry(id++, "MV", 30, mvUrl));
dbs.add(createDbEntry(id++, "MV", 40, mvUrl));
dbs.add(createDbEntry(id++, "MV", 50, mvUrl));
dbs.add(createDbEntry(id++, "MV", 100, mvUrl));
final BenchB test = new BenchB();
dbs.add(createDbEntry(id++, "MV", 2, mvUrl));
dbs.add(createDbEntry(id++, "MV", 4, mvUrl));
dbs.add(createDbEntry(id++, "MV", 8, mvUrl));
dbs.add(createDbEntry(id++, "MV", 16, mvUrl));
dbs.add(createDbEntry(id++, "MV", 32, mvUrl));
dbs.add(createDbEntry(id++, "MV", 64, mvUrl));
final BenchB test = new BenchB() {
// Since we focus on scalability here, lets emphasize multi-threaded
// part of the test (transactions) and minimize impact of the init.
@Override
protected int getTransactionsPerClient(int size) {
return size * 8;
}
};
testAll(dbs, test, size);
collect = false;
......@@ -109,7 +115,7 @@ public class TestScalability implements Database.DatabaseTest {
stat = conn.createStatement();
prep = conn.prepareStatement(
"INSERT INTO RESULTS(TESTID, " +
"TEST, UNIT, DBID, DB, RESULT) VALUES(?, ?, ?, ?, ?, ?)");
"TEST, UNIT, DBID, DB, TCNT, RESULT) VALUES(?, ?, ?, ?, ?, ?, ?)");
for (int i = 0; i < results.size(); i++) {
Object[] res = results.get(i);
prep.setInt(1, i);
......@@ -118,28 +124,29 @@ public class TestScalability implements Database.DatabaseTest {
for (Database db : dbs) {
prep.setInt(4, db.getId());
prep.setString(5, db.getName());
prep.setInt(6, db.getThreadsCount());
Object[] v = db.getResults().get(i);
prep.setString(6, v[2].toString());
prep.setString(7, v[2].toString());
prep.execute();
}
}
writer = new PrintWriter(new FileWriter(out));
ResultSet rs = stat.executeQuery(
"CALL '<table><tr><th>Test Case</th>" +
"<th>Unit</th>' " +
"|| SELECT GROUP_CONCAT('<th>' || DB || '</th>' " +
"ORDER BY DBID SEPARATOR '') FROM " +
"(SELECT DISTINCT DBID, DB FROM RESULTS)" +
"|| '</tr>' || CHAR(10) " +
"|| SELECT GROUP_CONCAT('<tr><td>' || " +
"TEST || '</td><td>' || UNIT || '</td>' || ( " +
"SELECT GROUP_CONCAT('<td>' || RESULT || '</td>' " +
"ORDER BY DBID SEPARATOR '') FROM RESULTS R2 WHERE " +
"R2.TESTID = R1.TESTID) || '</tr>' " +
"ORDER BY TESTID SEPARATOR CHAR(10)) FROM " +
"(SELECT DISTINCT TESTID, TEST, UNIT FROM RESULTS) R1" +
"|| '</table>'");
"CALL '<table border=\"1\"><tr><th rowspan=\"2\">Test Case</th>" +
"<th rowspan=\"2\">Unit</th>' " +
"|| (SELECT GROUP_CONCAT('<th colspan=\"' || COLSPAN || '\">' || TCNT || '</th>' " +
"ORDER BY TCNT SEPARATOR '') FROM " +
"(SELECT TCNT, COUNT(*) COLSPAN FROM (SELECT DISTINCT DB, TCNT FROM RESULTS) GROUP BY TCNT))" +
"|| '</tr>' || CHAR(10) " +
"|| '<tr>' || (SELECT GROUP_CONCAT('<th>' || DB || '</th>' ORDER BY TCNT, DB SEPARATOR '')" +
" FROM (SELECT DISTINCT DB, TCNT FROM RESULTS)) || '</tr>' || CHAR(10) " +
"|| (SELECT GROUP_CONCAT('<tr><td>' || TEST || '</td><td>' || UNIT || '</td>' || ( " +
"SELECT GROUP_CONCAT('<td>' || RESULT || '</td>' ORDER BY TCNT,DB SEPARATOR '')" +
" FROM RESULTS R2 WHERE R2.TESTID = R1.TESTID) || '</tr>' " +
"ORDER BY TESTID SEPARATOR CHAR(10)) FROM " +
"(SELECT DISTINCT TESTID, TEST, UNIT FROM RESULTS) R1)" +
"|| '</table>'");
rs.next();
String result = rs.getString(1);
writer.println(result);
......@@ -149,16 +156,12 @@ public class TestScalability implements Database.DatabaseTest {
JdbcUtils.closeSilently(conn);
IOUtils.closeSilently(writer);
}
if (exit) {
System.exit(0);
}
}
private Database createDbEntry(int id, String namePrefix,
int threadCount, String url) {
Database db = Database.parse(this, id, namePrefix + "(" + threadCount +
"threads), org.h2.Driver, " + url + ", sa, sa", threadCount);
Database db = Database.parse(this, id, namePrefix +
", org.h2.Driver, " + url + ", sa, sa", threadCount);
return db;
}
......@@ -172,7 +175,8 @@ public class TestScalability implements Database.DatabaseTest {
// calls garbage collection
TestBase.getMemoryUsed();
Database db = dbs.get(i);
System.out.println("Testing the performance of " + db.getName());
System.out.println("Testing the performance of " + db.getName() +
" (" + db.getThreadsCount() + " threads)");
db.startServer();
Connection conn = db.openNewConnection();
DatabaseMetaData meta = conn.getMetaData();
......@@ -189,6 +193,7 @@ public class TestScalability implements Database.DatabaseTest {
1000L / db.getTotalTime());
db.log("Statements per second", "#", statPerSec);
System.out.println("Statements per second: " + statPerSec);
System.out.println("GC overhead: " + (100 * db.getTotalGCTime() / db.getTotalTime()) + "%");
collect = false;
db.stopServer();
}
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论