Unverified 提交 19f45ea6 authored 作者: Evgenij Ryazanov's avatar Evgenij Ryazanov 提交者: GitHub

Merge pull request #1683 from katzyn/result

Fix issues with IN(), DISTINCT, GROUP BY, PARTITION BY and others
......@@ -641,10 +641,12 @@ ON tableName ( indexColumn [,...] )
Creates a new index.
This command commits an open transaction in this connection.
Hash indexes are meant for in-memory databases and memory tables (CREATE MEMORY TABLE).
Hash indexes are meant for in-memory databases and memory tables (CREATE MEMORY TABLE) when PageStore engine is used.
For other tables, or if the index contains multiple columns, the HASH keyword is ignored.
Hash indexes can only test for equality, and do not support range queries (similar to a hash table).
Hash indexes can only test for equality, do not support range queries (similar to a hash table), use more memory,
but can perform lookups faster.
Non-unique keys are supported.
Spatial indexes are supported only on Geometry columns.
","
CREATE INDEX IDXNAME ON TEST(NAME)
......
......@@ -21,6 +21,14 @@ Change Log
<h2>Next Version (unreleased)</h2>
<ul>
<li>Issue #1681: IN () doesn't work with row values when data types are not exactly the same
</li>
<li>Issue #1320: OOME / GC overhead in IndexCursor.nextCursor()
</li>
<li>PR #1680: Assorted fixes for ALTER TABLE ALTER COLUMN
</li>
<li>PR #1679: Use TestScript for testSimple
</li>
<li>Issue #1677: Unable to use VALUES keyword in WHERE clause
</li>
<li>Issue #1672: Deadlock on MVStore close in TestOutOfMemory
......
......@@ -278,6 +278,10 @@ public class MergeUsing extends Prepared {
private String queryAlias;
private int countUpdatedRows;
private Select targetMatchQuery;
/**
* Contains mappings between _ROWID_ and ROW_NUMBER for processed rows. Row
* identities are remembered to prevent duplicate updates of the same row.
*/
private final HashMap<Value, Integer> targetRowidsRemembered = new HashMap<>();
private int sourceQueryRowNumber;
......
......@@ -319,6 +319,15 @@ public abstract class Query extends Prepared {
return distinct;
}
/**
* Returns whether results support random access.
*
* @return whether results support random access
*/
public boolean isRandomAccessResult() {
return randomAccessResult;
}
/**
* Whether results need to support random access.
*
......
......@@ -51,8 +51,8 @@ import org.h2.util.StatementBuilder;
import org.h2.util.StringUtils;
import org.h2.util.Utils;
import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueNull;
import org.h2.value.ValueRow;
/**
* This class represents a simple SELECT statement.
......@@ -499,7 +499,7 @@ public class Select extends Query {
}
private void processGroupResult(int columnCount, LocalResult result, long offset, boolean quickOffset) {
for (ValueArray currentGroupsKey; (currentGroupsKey = groupData.next()) != null;) {
for (ValueRow currentGroupsKey; (currentGroupsKey = groupData.next()) != null;) {
Value[] keyValues = currentGroupsKey.getList();
Value[] row = new Value[columnCount];
for (int j = 0; groupIndex != null && j < groupIndex.length; j++) {
......
......@@ -11,14 +11,14 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.h2.engine.Session;
import org.h2.expression.Expression;
import org.h2.expression.analysis.DataAnalysisOperation;
import org.h2.expression.analysis.PartitionData;
import org.h2.util.ValueHashMap;
import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueRow;
/**
* Grouped data for aggregates.
......@@ -51,18 +51,18 @@ public abstract class SelectGroups {
/**
* Map of group-by key to group-by expression data e.g. AggregateData
*/
private HashMap<ValueArray, Object[]> groupByData;
private TreeMap<ValueRow, Object[]> groupByData;
/**
* Key into groupByData that produces currentGroupByExprData. Not used
* in lazy mode.
*/
private ValueArray currentGroupsKey;
private ValueRow currentGroupsKey;
/**
* Cursor for {@link #next()} method.
*/
private Iterator<Entry<ValueArray, Object[]>> cursor;
private Iterator<Entry<ValueRow, Object[]>> cursor;
Grouped(Session session, ArrayList<Expression> expressions, int[] groupIndex) {
super(session, expressions);
......@@ -72,7 +72,7 @@ public abstract class SelectGroups {
@Override
public void reset() {
super.reset();
groupByData = new HashMap<>();
groupByData = new TreeMap<>(session.getDatabase().getCompareMode());
currentGroupsKey = null;
cursor = null;
}
......@@ -80,7 +80,7 @@ public abstract class SelectGroups {
@Override
public void nextSource() {
if (groupIndex == null) {
currentGroupsKey = ValueArray.getEmpty();
currentGroupsKey = ValueRow.getEmpty();
} else {
Value[] keyValues = new Value[groupIndex.length];
// update group
......@@ -89,7 +89,7 @@ public abstract class SelectGroups {
Expression expr = expressions.get(idx);
keyValues[i] = expr.getValue(session);
}
currentGroupsKey = ValueArray.get(keyValues);
currentGroupsKey = ValueRow.get(keyValues);
}
Object[] values = groupByData.get(currentGroupsKey);
if (values == null) {
......@@ -114,15 +114,15 @@ public abstract class SelectGroups {
public void done() {
super.done();
if (groupIndex == null && groupByData.size() == 0) {
groupByData.put(ValueArray.getEmpty(), createRow());
groupByData.put(ValueRow.getEmpty(), createRow());
}
cursor = groupByData.entrySet().iterator();
}
@Override
public ValueArray next() {
public ValueRow next() {
if (cursor.hasNext()) {
Map.Entry<ValueArray, Object[]> entry = cursor.next();
Map.Entry<ValueRow, Object[]> entry = cursor.next();
currentGroupByExprData = entry.getValue();
currentGroupRowId++;
return entry.getKey();
......@@ -184,11 +184,11 @@ public abstract class SelectGroups {
}
@Override
public ValueArray next() {
public ValueRow next() {
if (cursor.hasNext()) {
currentGroupByExprData = cursor.next();
currentGroupRowId++;
return ValueArray.getEmpty();
return ValueRow.getEmpty();
}
return null;
}
......@@ -223,7 +223,7 @@ public abstract class SelectGroups {
/**
* Maps an partitioned window expression object to its data.
*/
private final HashMap<DataAnalysisOperation, ValueHashMap<PartitionData>> windowPartitionData = new HashMap<>();
private final HashMap<DataAnalysisOperation, TreeMap<Value, PartitionData>> windowPartitionData = new HashMap<>();
/**
* The id of the current group.
......@@ -324,7 +324,7 @@ public abstract class SelectGroups {
if (partitionKey == null) {
return windowData.get(expr);
} else {
ValueHashMap<PartitionData> map = windowPartitionData.get(expr);
TreeMap<Value, PartitionData> map = windowPartitionData.get(expr);
return map != null ? map.get(partitionKey) : null;
}
}
......@@ -344,9 +344,9 @@ public abstract class SelectGroups {
Object old = windowData.put(expr, obj);
assert old == null;
} else {
ValueHashMap<PartitionData> map = windowPartitionData.get(expr);
TreeMap<Value, PartitionData> map = windowPartitionData.get(expr);
if (map == null) {
map = new ValueHashMap<>();
map = new TreeMap<>(session.getDatabase().getCompareMode());
windowPartitionData.put(expr, map);
}
map.put(partitionKey, obj);
......@@ -397,7 +397,7 @@ public abstract class SelectGroups {
*
* @return the key of the next group, or null
*/
public abstract ValueArray next();
public abstract ValueRow next();
/**
* Removes the data for the current key.
......
......@@ -10,6 +10,7 @@ import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.h2.api.ErrorCode;
import org.h2.command.dml.Select;
import org.h2.command.dml.SelectOrderBy;
......@@ -32,7 +33,6 @@ import org.h2.table.ColumnResolver;
import org.h2.table.Table;
import org.h2.table.TableFilter;
import org.h2.util.StatementBuilder;
import org.h2.util.ValueHashMap;
import org.h2.value.CompareMode;
import org.h2.value.DataType;
import org.h2.value.TypeInfo;
......@@ -303,7 +303,7 @@ public class Aggregate extends AbstractAggregate {
v = updateCollecting(session, v, remembered);
}
}
data.add(session.getDatabase(), type.getValueType(), v);
data.add(session.getDatabase(), v);
}
@Override
......@@ -376,7 +376,7 @@ public class Aggregate extends AbstractAggregate {
@Override
protected Object createAggregateData() {
return AggregateData.create(aggregateType, distinct);
return AggregateData.create(aggregateType, distinct, type.getValueType());
}
@Override
......@@ -440,11 +440,11 @@ public class Aggregate extends AbstractAggregate {
if (c.getCount() == 0) {
return ValueNull.INSTANCE;
}
AggregateDataDefault d = new AggregateDataDefault(aggregateType);
AggregateDataDefault d = new AggregateDataDefault(aggregateType, type.getValueType());
Database db = session.getDatabase();
int dataType = type.getValueType();
for (Value v : c) {
d.add(db, dataType, v);
d.add(db, v);
}
return d.getValue(db, dataType);
}
......@@ -512,13 +512,13 @@ public class Aggregate extends AbstractAggregate {
}
private Value getHistogram(Session session, AggregateData data) {
ValueHashMap<LongDataCounter> distinctValues = ((AggregateDataDistinctWithCounts) data).getValues();
TreeMap<Value, LongDataCounter> distinctValues = ((AggregateDataDistinctWithCounts) data).getValues();
if (distinctValues == null) {
return ValueArray.getEmpty();
}
ValueArray[] values = new ValueArray[distinctValues.size()];
int i = 0;
for (Entry<Value, LongDataCounter> entry : distinctValues.entries()) {
for (Entry<Value, LongDataCounter> entry : distinctValues.entrySet()) {
LongDataCounter d = entry.getValue();
values[i] = ValueArray.get(new Value[] { entry.getKey(), ValueLong.get(distinct ? 1L : d.count) });
i++;
......@@ -539,14 +539,14 @@ public class Aggregate extends AbstractAggregate {
private Value getMode(Session session, AggregateData data) {
Value v = ValueNull.INSTANCE;
ValueHashMap<LongDataCounter> distinctValues = ((AggregateDataDistinctWithCounts) data).getValues();
TreeMap<Value, LongDataCounter> distinctValues = ((AggregateDataDistinctWithCounts) data).getValues();
if (distinctValues == null) {
return v;
}
long count = 0L;
if (orderByList != null) {
boolean desc = (orderByList.get(0).sortType & SortOrder.DESCENDING) != 0;
for (Entry<Value, LongDataCounter> entry : distinctValues.entries()) {
for (Entry<Value, LongDataCounter> entry : distinctValues.entrySet()) {
long c = entry.getValue().count;
if (c > count) {
v = entry.getKey();
......@@ -565,7 +565,7 @@ public class Aggregate extends AbstractAggregate {
}
}
} else {
for (Entry<Value, LongDataCounter> entry : distinctValues.entries()) {
for (Entry<Value, LongDataCounter> entry : distinctValues.entrySet()) {
long c = entry.getValue().count;
if (c > count) {
v = entry.getKey();
......
......@@ -21,9 +21,10 @@ abstract class AggregateData {
*
* @param aggregateType the type of the aggregate operation
* @param distinct if the calculation should be distinct
* @param dataType the data type of the computed result
* @return the aggregate data object of the specified type
*/
static AggregateData create(AggregateType aggregateType, boolean distinct) {
static AggregateData create(AggregateType aggregateType, boolean distinct, int dataType) {
switch (aggregateType) {
case COUNT_ALL:
return new AggregateDataCount(true);
......@@ -42,7 +43,7 @@ abstract class AggregateData {
case BIT_AND:
case ANY:
case EVERY:
return new AggregateDataDefault(aggregateType);
return new AggregateDataDefault(aggregateType, dataType);
case SUM:
case AVG:
case STDDEV_POP:
......@@ -50,7 +51,7 @@ abstract class AggregateData {
case VAR_POP:
case VAR_SAMP:
if (!distinct) {
return new AggregateDataDefault(aggregateType);
return new AggregateDataDefault(aggregateType, dataType);
}
break;
case SELECTIVITY:
......@@ -71,10 +72,9 @@ abstract class AggregateData {
* Add a value to this aggregate.
*
* @param database the database
* @param dataType the datatype of the computed result
* @param v the value
*/
abstract void add(Database database, int dataType, Value v);
abstract void add(Database database, Value v);
/**
* Get the aggregate result.
......
......@@ -8,8 +8,8 @@ package org.h2.expression.aggregate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.TreeSet;
import org.h2.engine.Database;
import org.h2.value.Value;
......@@ -41,13 +41,13 @@ class AggregateDataCollecting extends AggregateData implements Iterable<Value> {
}
@Override
void add(Database database, int dataType, Value v) {
void add(Database database, Value v) {
if (v == ValueNull.INSTANCE) {
return;
}
Collection<Value> c = values;
if (c == null) {
values = c = distinct ? new HashSet<Value>() : new ArrayList<Value>();
values = c = distinct ? new TreeSet<>(database.getCompareMode()) : new ArrayList<Value>();
}
c.add(v);
}
......
......@@ -24,7 +24,7 @@ class AggregateDataCount extends AggregateData {
}
@Override
void add(Database database, int dataType, Value v) {
void add(Database database, Value v) {
if (all || v != ValueNull.INSTANCE) {
count++;
}
......
......@@ -21,19 +21,22 @@ import org.h2.value.ValueNull;
class AggregateDataDefault extends AggregateData {
private final AggregateType aggregateType;
private final int dataType;
private long count;
private Value value;
private double m2, mean;
/**
* @param aggregateType the type of the aggregate operation
* @param dataType the data type of the computed result
*/
AggregateDataDefault(AggregateType aggregateType) {
AggregateDataDefault(AggregateType aggregateType, int dataType) {
this.aggregateType = aggregateType;
this.dataType = dataType;
}
@Override
void add(Database database, int dataType, Value v) {
void add(Database database, Value v) {
if (v == ValueNull.INSTANCE) {
return;
}
......
......@@ -5,8 +5,8 @@
*/
package org.h2.expression.aggregate;
import java.util.TreeMap;
import org.h2.engine.Database;
import org.h2.util.ValueHashMap;
import org.h2.value.Value;
import org.h2.value.ValueNull;
......@@ -20,7 +20,7 @@ class AggregateDataDistinctWithCounts extends AggregateData {
private final int maxDistinctCount;
private ValueHashMap<LongDataCounter> values;
private TreeMap<Value, LongDataCounter> values;
/**
* Creates new instance of data for aggregate that needs distinct values
......@@ -37,12 +37,12 @@ class AggregateDataDistinctWithCounts extends AggregateData {
}
@Override
void add(Database database, int dataType, Value v) {
void add(Database database, Value v) {
if (ignoreNulls && v == ValueNull.INSTANCE) {
return;
}
if (values == null) {
values = new ValueHashMap<>();
values = new TreeMap<>(database.getCompareMode());
}
LongDataCounter a = values.get(v);
if (a == null) {
......@@ -65,7 +65,7 @@ class AggregateDataDistinctWithCounts extends AggregateData {
*
* @return map with values and their counts
*/
ValueHashMap<LongDataCounter> getValues() {
TreeMap<Value, LongDataCounter> getValues() {
return values;
}
......
......@@ -57,7 +57,7 @@ class AggregateDataEnvelope extends AggregateData {
}
@Override
void add(Database database, int dataType, Value v) {
void add(Database database, Value v) {
if (v == ValueNull.INSTANCE) {
return;
}
......
......@@ -32,7 +32,7 @@ class AggregateDataSelectivity extends AggregateData {
}
@Override
void add(Database database, int dataType, Value v) {
void add(Database database, Value v) {
count++;
if (distinctHashes == null) {
distinctHashes = new IntIntHashMap();
......
......@@ -20,9 +20,9 @@ import org.h2.table.TableFilter;
import org.h2.value.DataType;
import org.h2.value.TypeInfo;
import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueBoolean;
import org.h2.value.ValueNull;
import org.h2.value.ValueRow;
/**
* This class wraps a user-defined aggregate.
......@@ -152,7 +152,7 @@ public class JavaAggregate extends AbstractAggregate {
if (args.length == 1) {
agg.add(value.getObject());
} else {
Value[] values = ((ValueArray) value).getList();
Value[] values = ((ValueRow) value).getList();
Object[] argValues = new Object[args.length];
for (int i = 0, len = args.length; i < len; i++) {
argValues[i] = values[i].getObject();
......@@ -193,7 +193,7 @@ public class JavaAggregate extends AbstractAggregate {
arg = arg.convertTo(argTypes[i]);
argValues[i] = arg;
}
data.add(session.getDatabase(), dataType, args.length == 1 ? arg : ValueArray.get(argValues));
data.add(session.getDatabase(), args.length == 1 ? arg : ValueRow.get(argValues));
} else {
Aggregate agg = (Aggregate) aggregateData;
Object[] argValues = new Object[args.length];
......
......@@ -16,7 +16,7 @@ import org.h2.result.SortOrder;
import org.h2.table.ColumnResolver;
import org.h2.table.TableFilter;
import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueRow;
/**
* Window clause.
......@@ -206,7 +206,7 @@ public final class Window {
Expression expr = partitionBy.get(i);
keyValues[i] = expr.getValue(session);
}
return ValueArray.get(keyValues);
return ValueRow.get(keyValues);
}
}
......
......@@ -26,14 +26,13 @@ import org.h2.value.ValueNull;
/**
* Used for optimised IN(...) queries where the contents of the IN list are all
* constant and of the same type.
* <p>
* Checking using a HashSet is has time complexity O(1), instead of O(n) for
* checking using an array.
*/
public class ConditionInConstantSet extends Condition {
private Expression left;
private final ArrayList<Expression> valueList;
// HashSet cannot be used here, because we need to compare values of
// different type or scale properly.
private final TreeSet<Value> valueSet;
private boolean hasNull;
private final TypeInfo type;
......
......@@ -5,7 +5,12 @@
*/
package org.h2.index;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import org.h2.command.dml.AllColumnsForPlan;
import org.h2.engine.Mode.UniqueIndexNullsHandling;
import org.h2.engine.Session;
import org.h2.message.DbException;
import org.h2.result.Row;
......@@ -15,8 +20,9 @@ import org.h2.table.Column;
import org.h2.table.IndexColumn;
import org.h2.table.RegularTable;
import org.h2.table.TableFilter;
import org.h2.util.ValueHashMap;
import org.h2.value.DataType;
import org.h2.value.Value;
import org.h2.value.ValueNull;
/**
* An unique index based on an in-memory hash map.
......@@ -27,20 +33,22 @@ public class HashIndex extends BaseIndex {
* The index of the indexed column.
*/
private final int indexColumn;
private final boolean totalOrdering;
private final RegularTable tableData;
private ValueHashMap<Long> rows;
private Map<Value, Long> rows;
private final ArrayList<Long> nullRows = new ArrayList<>();
public HashIndex(RegularTable table, int id, String indexName,
IndexColumn[] columns, IndexType indexType) {
public HashIndex(RegularTable table, int id, String indexName, IndexColumn[] columns, IndexType indexType) {
super(table, id, indexName, columns, indexType);
this.indexColumn = columns[0].column.getColumnId();
Column column = columns[0].column;
indexColumn = column.getColumnId();
totalOrdering = DataType.hasTotalOrdering(column.getType().getValueType());
this.tableData = table;
reset();
}
private void reset() {
rows = new ValueHashMap<>();
rows = totalOrdering ? new HashMap<Value, Long>() : new TreeMap<Value, Long>(database.getCompareMode());
}
@Override
......@@ -51,17 +59,28 @@ public class HashIndex extends BaseIndex {
@Override
public void add(Session session, Row row) {
Value key = row.getValue(indexColumn);
Object old = rows.get(key);
if (old != null) {
// TODO index duplicate key for hash indexes: is this allowed?
throw getDuplicateKeyException(key.toString());
if (key != ValueNull.INSTANCE
|| database.getMode().uniqueIndexNullsHandling == UniqueIndexNullsHandling.FORBID_ANY_DUPLICATES) {
Object old = rows.get(key);
if (old != null) {
// TODO index duplicate key for hash indexes: is this allowed?
throw getDuplicateKeyException(key.toString());
}
rows.put(key, row.getKey());
} else {
nullRows.add(row.getKey());
}
rows.put(key, row.getKey());
}
@Override
public void remove(Session session, Row row) {
rows.remove(row.getValue(indexColumn));
Value key = row.getValue(indexColumn);
if (key != ValueNull.INSTANCE
|| database.getMode().uniqueIndexNullsHandling == UniqueIndexNullsHandling.FORBID_ANY_DUPLICATES) {
rows.remove(key);
} else {
nullRows.remove(row.getKey());
}
}
@Override
......@@ -71,6 +90,10 @@ public class HashIndex extends BaseIndex {
throw DbException.throwInternalError(first + " " + last);
}
Value v = first.getValue(indexColumn);
if (v == ValueNull.INSTANCE
&& database.getMode().uniqueIndexNullsHandling != UniqueIndexNullsHandling.FORBID_ANY_DUPLICATES) {
return new NonUniqueHashCursor(session, tableData, nullRows);
}
/*
* Sometimes the incoming search is a similar, but not the same type
* e.g. the search value is INT, but the index column is LONG. In which
......@@ -90,12 +113,12 @@ public class HashIndex extends BaseIndex {
@Override
public long getRowCount(Session session) {
return rows.size();
return getRowCountApproximation();
}
@Override
public long getRowCountApproximation() {
return rows.size();
return rows.size() + nullRows.size();
}
@Override
......
......@@ -7,8 +7,8 @@ package org.h2.index;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.TreeSet;
import org.h2.command.dml.Query;
import org.h2.engine.Session;
import org.h2.expression.Expression;
......@@ -123,8 +123,8 @@ public class IndexCondition {
* @return the index condition
*/
public static IndexCondition getInQuery(ExpressionColumn column, Query query) {
IndexCondition cond = new IndexCondition(Comparison.IN_QUERY, column,
null);
assert query.isRandomAccessResult();
IndexCondition cond = new IndexCondition(Comparison.IN_QUERY, column, null);
cond.expressionQuery = query;
return cond;
}
......@@ -147,7 +147,7 @@ public class IndexCondition {
* @return the value list
*/
public Value[] getCurrentValueList(Session session) {
HashSet<Value> valueSet = new HashSet<>();
TreeSet<Value> valueSet = new TreeSet<>(session.getDatabase().getCompareMode());
for (Expression e : expressionList) {
Value v = e.getValue(session);
v = column.convert(v);
......
......@@ -6,7 +6,6 @@
package org.h2.index;
import java.util.ArrayList;
import java.util.HashSet;
import org.h2.engine.Session;
import org.h2.expression.condition.Comparison;
......@@ -45,7 +44,6 @@ public class IndexCursor implements Cursor {
private int inListIndex;
private Value[] inList;
private ResultInterface inResult;
private HashSet<Value> inResultTested;
public IndexCursor(TableFilter filter) {
this.tableFilter = filter;
......@@ -79,7 +77,6 @@ public class IndexCursor implements Cursor {
inList = null;
inColumn = null;
inResult = null;
inResultTested = null;
intersects = null;
for (IndexCondition condition : indexConditions) {
if (condition.isAlwaysFalse()) {
......@@ -311,13 +308,8 @@ public class IndexCursor implements Cursor {
while (inResult.next()) {
Value v = inResult.currentRow()[0];
if (v != ValueNull.INSTANCE) {
if (inResultTested == null) {
inResultTested = new HashSet<>();
}
if (inResultTested.add(v)) {
find(v);
break;
}
find(v);
break;
}
}
}
......
......@@ -6,6 +6,9 @@
package org.h2.index;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import org.h2.command.dml.AllColumnsForPlan;
import org.h2.engine.Session;
......@@ -18,7 +21,7 @@ import org.h2.table.IndexColumn;
import org.h2.table.RegularTable;
import org.h2.table.TableFilter;
import org.h2.util.Utils;
import org.h2.util.ValueHashMap;
import org.h2.value.DataType;
import org.h2.value.Value;
/**
......@@ -32,20 +35,24 @@ public class NonUniqueHashIndex extends BaseIndex {
* The index of the indexed column.
*/
private final int indexColumn;
private ValueHashMap<ArrayList<Long>> rows;
private final boolean totalOrdering;
private Map<Value, ArrayList<Long>> rows;
private final RegularTable tableData;
private long rowCount;
public NonUniqueHashIndex(RegularTable table, int id, String indexName,
IndexColumn[] columns, IndexType indexType) {
super(table, id, indexName, columns, indexType);
this.indexColumn = columns[0].column.getColumnId();
this.tableData = table;
Column column = columns[0].column;
indexColumn = column.getColumnId();
totalOrdering = DataType.hasTotalOrdering(column.getType().getValueType());
tableData = table;
reset();
}
private void reset() {
rows = new ValueHashMap<>();
rows = totalOrdering ? new HashMap<Value, ArrayList<Long>>()
: new TreeMap<Value, ArrayList<Long>>(database.getCompareMode());
rowCount = 0;
}
......
......@@ -13,7 +13,7 @@ import org.h2.mvstore.MVMap;
import org.h2.mvstore.MVMap.Builder;
import org.h2.result.ResultExternal;
import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueRow;
/**
* Plain temporary result.
......@@ -23,7 +23,7 @@ class MVPlainTempResult extends MVTempResult {
/**
* Map with identities of rows as keys rows as values.
*/
private final MVMap<Long, ValueArray> map;
private final MVMap<Long, ValueRow> map;
/**
* Counter for the identities of rows. A separate counter is used instead of
......@@ -35,7 +35,7 @@ class MVPlainTempResult extends MVTempResult {
/**
* Cursor for the {@link #next()} method.
*/
private Cursor<Long, ValueArray> cursor;
private Cursor<Long, ValueRow> cursor;
/**
* Creates a shallow copy of the result.
......@@ -61,7 +61,7 @@ class MVPlainTempResult extends MVTempResult {
MVPlainTempResult(Database database, Expression[] expressions, int visibleColumnCount) {
super(database, expressions.length, visibleColumnCount);
ValueDataType valueType = new ValueDataType(database, new int[columnCount]);
Builder<Long, ValueArray> builder = new MVMap.Builder<Long, ValueArray>()
Builder<Long, ValueRow> builder = new MVMap.Builder<Long, ValueRow>()
.valueType(valueType).singleWriter();
map = store.openMap("tmp", builder);
}
......@@ -69,7 +69,7 @@ class MVPlainTempResult extends MVTempResult {
@Override
public int addRow(Value[] values) {
assert parent == null;
map.append(counter++, ValueArray.get(values));
map.append(counter++, ValueRow.get(values));
return ++rowCount;
}
......
......@@ -17,7 +17,7 @@ import org.h2.mvstore.MVMap.Builder;
import org.h2.result.ResultExternal;
import org.h2.result.SortOrder;
import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueRow;
/**
* Sorted temporary result.
......@@ -48,7 +48,7 @@ class MVSortedTempResult extends MVTempResult {
* Map with rows as keys and counts of duplicate rows as values. If this map is
* distinct all values are 1.
*/
private final MVMap<ValueArray, Long> map;
private final MVMap<ValueRow, Long> map;
/**
* Optional index. This index is created only if result is distinct and
......@@ -56,12 +56,12 @@ class MVSortedTempResult extends MVTempResult {
* {@link #contains(Value[])} method is invoked. Only the root result should
* have an index if required.
*/
private MVMap<ValueArray, Boolean> index;
private MVMap<ValueRow, Boolean> index;
/**
* Cursor for the {@link #next()} method.
*/
private Cursor<ValueArray, Long> cursor;
private Cursor<ValueRow, Long> cursor;
/**
* Current value for the {@link #next()} method. Used in non-distinct results
......@@ -167,12 +167,12 @@ class MVSortedTempResult extends MVTempResult {
}
this.indexes = indexes;
ValueDataType keyType = new ValueDataType(database, sortTypes);
Builder<ValueArray, Long> builder = new MVMap.Builder<ValueArray, Long>().keyType(keyType);
Builder<ValueRow, Long> builder = new MVMap.Builder<ValueRow, Long>().keyType(keyType);
map = store.openMap("tmp", builder);
if (distinct && length != visibleColumnCount || distinctIndexes != null) {
int count = distinctIndexes != null ? distinctIndexes.length : visibleColumnCount;
ValueDataType distinctType = new ValueDataType(database, new int[count]);
Builder<ValueArray, Boolean> indexBuilder = new MVMap.Builder<ValueArray, Boolean>().keyType(distinctType);
Builder<ValueRow, Boolean> indexBuilder = new MVMap.Builder<ValueRow, Boolean>().keyType(distinctType);
index = store.openMap("idx", indexBuilder);
}
}
......@@ -180,7 +180,7 @@ class MVSortedTempResult extends MVTempResult {
@Override
public int addRow(Value[] values) {
assert parent == null;
ValueArray key = getKey(values);
ValueRow key = getKey(values);
if (distinct || distinctIndexes != null) {
if (distinctIndexes != null) {
int cnt = distinctIndexes.length;
......@@ -188,12 +188,12 @@ class MVSortedTempResult extends MVTempResult {
for (int i = 0; i < cnt; i++) {
newValues[i] = values[distinctIndexes[i]];
}
ValueArray distinctRow = ValueArray.get(newValues);
ValueRow distinctRow = ValueRow.get(newValues);
if (index.putIfAbsent(distinctRow, true) != null) {
return rowCount;
}
} else if (columnCount != visibleColumnCount) {
ValueArray distinctRow = ValueArray.get(Arrays.copyOf(values, visibleColumnCount));
ValueRow distinctRow = ValueRow.get(Arrays.copyOf(values, visibleColumnCount));
if (index.putIfAbsent(distinctRow, true) != null) {
return rowCount;
}
......@@ -222,7 +222,7 @@ class MVSortedTempResult extends MVTempResult {
}
assert distinct;
if (columnCount != visibleColumnCount) {
return index.containsKey(ValueArray.get(values));
return index.containsKey(ValueRow.get(values));
}
return map.containsKey(getKey(values));
}
......@@ -240,13 +240,13 @@ class MVSortedTempResult extends MVTempResult {
}
/**
* Reorder values if required and convert them into {@link ValueArray}.
* Reorder values if required and convert them into {@link ValueRow}.
*
* @param values
* values
* @return ValueArray for maps
* @return ValueRow for maps
*/
private ValueArray getKey(Value[] values) {
private ValueRow getKey(Value[] values) {
if (indexes != null) {
Value[] r = new Value[indexes.length];
for (int i = 0; i < indexes.length; i++) {
......@@ -254,7 +254,7 @@ class MVSortedTempResult extends MVTempResult {
}
values = r;
}
return ValueArray.get(values);
return ValueRow.get(values);
}
/**
......
......@@ -7,7 +7,7 @@ package org.h2.mvstore.db;
import java.io.IOException;
import java.lang.ref.Reference;
import java.util.ArrayList;
import java.util.Collection;
import org.h2.engine.Constants;
import org.h2.engine.Database;
......@@ -178,7 +178,7 @@ public abstract class MVTempResult implements ResultExternal {
}
@Override
public int addRows(ArrayList<Value[]> rows) {
public int addRows(Collection<Value[]> rows) {
for (Value[] row : rows) {
addRow(row);
}
......
......@@ -7,6 +7,7 @@ package org.h2.result;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.TreeMap;
import org.h2.engine.Database;
import org.h2.engine.Session;
import org.h2.engine.SessionInterface;
......@@ -14,10 +15,9 @@ import org.h2.expression.Expression;
import org.h2.message.DbException;
import org.h2.mvstore.db.MVTempResult;
import org.h2.util.Utils;
import org.h2.util.ValueHashMap;
import org.h2.value.TypeInfo;
import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueRow;
/**
* A local result set contains all row data of a result set.
......@@ -34,7 +34,9 @@ public class LocalResultImpl implements LocalResult {
private int rowId, rowCount;
private ArrayList<Value[]> rows;
private SortOrder sort;
private ValueHashMap<Value[]> distinctRows;
// HashSet cannot be used here, because we need to compare values of
// different type or scale properly.
private TreeMap<Value, Value[]> distinctRows;
private Value[] currentRow;
private int offset;
private int limit = -1;
......@@ -145,7 +147,7 @@ public class LocalResultImpl implements LocalResult {
public void setDistinct() {
assert distinctIndexes == null;
distinct = true;
distinctRows = new ValueHashMap<>();
distinctRows = new TreeMap<>(session.getDatabase().getCompareMode());
}
/**
......@@ -157,7 +159,7 @@ public class LocalResultImpl implements LocalResult {
public void setDistinct(int[] distinctIndexes) {
assert !distinct;
this.distinctIndexes = distinctIndexes;
distinctRows = new ValueHashMap<>();
distinctRows = new TreeMap<>(session.getDatabase().getCompareMode());
}
/**
......@@ -179,7 +181,7 @@ public class LocalResultImpl implements LocalResult {
}
assert values.length == visibleColumnCount;
if (distinctRows != null) {
ValueArray array = ValueArray.get(values);
ValueRow array = ValueRow.get(values);
distinctRows.remove(array);
rowCount = distinctRows.size();
} else {
......@@ -200,13 +202,13 @@ public class LocalResultImpl implements LocalResult {
return external.contains(values);
}
if (distinctRows == null) {
distinctRows = new ValueHashMap<>();
distinctRows = new TreeMap<>(session.getDatabase().getCompareMode());
for (Value[] row : rows) {
ValueArray array = getArrayOfDistinct(row);
ValueRow array = getDistinctRow(row);
distinctRows.put(array, array.getList());
}
}
ValueArray array = ValueArray.get(values);
ValueRow array = ValueRow.get(values);
return distinctRows.get(array) != null;
}
......@@ -284,7 +286,7 @@ public class LocalResultImpl implements LocalResult {
}
}
private ValueArray getArrayOfDistinct(Value[] values) {
private ValueRow getDistinctRow(Value[] values) {
if (distinctIndexes != null) {
int cnt = distinctIndexes.length;
Value[] newValues = new Value[cnt];
......@@ -295,7 +297,7 @@ public class LocalResultImpl implements LocalResult {
} else if (values.length > visibleColumnCount) {
values = Arrays.copyOf(values, visibleColumnCount);
}
return ValueArray.get(values);
return ValueRow.get(values);
}
private void createExternalResult() {
......@@ -317,8 +319,10 @@ public class LocalResultImpl implements LocalResult {
cloneLobs(values);
if (isAnyDistinct()) {
if (distinctRows != null) {
ValueArray array = getArrayOfDistinct(values);
distinctRows.putIfAbsent(array, values);
ValueRow array = getDistinctRow(values);
if (!distinctRows.containsKey(array)) {
distinctRows.put(array, values);
}
rowCount = distinctRows.size();
if (rowCount > maxMemoryRows) {
createExternalResult();
......@@ -359,7 +363,7 @@ public class LocalResultImpl implements LocalResult {
addRowsToDisk();
} else {
if (isAnyDistinct()) {
rows = distinctRows.values();
rows = new ArrayList<>(distinctRows.values());
}
if (sort != null && limit != 0 && !limitsWereApplied) {
boolean withLimit = limit > 0 && withTiesSortOrder == null;
......
......@@ -5,7 +5,7 @@
*/
package org.h2.result;
import java.util.ArrayList;
import java.util.Collection;
import org.h2.value.Value;
/**
......@@ -40,7 +40,7 @@ public interface ResultExternal {
* @param rows the list of rows to add
* @return the new number of rows in this object
*/
int addRows(ArrayList<Value[]> rows);
int addRows(Collection<Value[]> rows);
/**
* Close this object and delete the temporary file.
......
......@@ -6,9 +6,9 @@
package org.h2.result;
import java.lang.ref.Reference;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collection;
import org.h2.command.ddl.CreateTableData;
import org.h2.engine.Constants;
......@@ -263,11 +263,7 @@ public class ResultTempTable implements ResultExternal {
}
@Override
public int addRows(ArrayList<Value[]> rows) {
// speeds up inserting, but not really needed:
if (sort != null) {
sort.sort(rows);
}
public int addRows(Collection<Value[]> rows) {
for (Value[] values : rows) {
addRow(values);
}
......
/*
* Copyright 2004-2019 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.util;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import org.h2.message.DbException;
import org.h2.value.Value;
import org.h2.value.ValueNull;
/**
* This hash map supports keys of type Value.
* <p>
* ValueHashMap is a very simple implementation without allocation of additional
* objects for entries. It's very fast with good distribution of hashes, but if
* hashes have a lot of collisions this implementation tends to be very slow.
* <p>
* HashMap in archaic versions of Java have some overhead for allocation of
* entries, but slightly better behaviour with limited number of collisions,
* because collisions have no impact on non-colliding entries. HashMap in modern
* versions of Java also have the same overhead, but it builds a trees of keys
* with colliding hashes, that's why even if the all keys have exactly the same
* hash code it still offers a good performance similar to TreeMap. So
* ValueHashMap is faster in typical cases, but may behave really bad in some
* cases. HashMap is slower in typical cases, but its performance does not
* degrade too much even in the worst possible case (if keys are comparable).
*
* @param <V> the value type
*/
public class ValueHashMap<V> extends HashBase {
/**
* Keys array.
*/
Value[] keys;
/**
* Values array.
*/
V[] values;
@Override
@SuppressWarnings("unchecked")
protected void reset(int newLevel) {
super.reset(newLevel);
keys = new Value[len];
values = (V[]) new Object[len];
}
@Override
protected void rehash(int newLevel) {
Value[] oldKeys = keys;
V[] oldValues = values;
reset(newLevel);
int len = oldKeys.length;
for (int i = 0; i < len; i++) {
Value k = oldKeys[i];
if (k != null && k != ValueNull.DELETED) {
// skip the checkSizePut so we don't end up
// accidentally recursing
internalPut(k, oldValues[i], false);
}
}
}
private int getIndex(Value key) {
int h = key.hashCode();
/*
* Add some protection against hashes with the same less significant bits
* (ValueDouble with integer values, for example).
*/
return (h ^ h >>> 16) & mask;
}
/**
* Add or update a key value pair.
*
* @param key the key
* @param value the new value
*/
public void put(Value key, V value) {
checkSizePut();
internalPut(key, value, false);
}
/**
* Add a key value pair, values for existing keys are not replaced.
*
* @param key the key
* @param value the new value
*/
public void putIfAbsent(Value key, V value) {
checkSizePut();
internalPut(key, value, true);
}
private void internalPut(Value key, V value, boolean ifAbsent) {
int index = getIndex(key);
int plus = 1;
int deleted = -1;
do {
Value k = keys[index];
if (k == null) {
// found an empty record
if (deleted >= 0) {
index = deleted;
deletedCount--;
}
size++;
keys[index] = key;
values[index] = value;
return;
} else if (k == ValueNull.DELETED) {
// found a deleted record
if (deleted < 0) {
deleted = index;
}
} else if (k.equals(key)) {
if (ifAbsent) {
return;
}
// update existing
values[index] = value;
return;
}
index = (index + plus++) & mask;
} while (plus <= len);
// no space
DbException.throwInternalError("hashmap is full");
}
/**
* Remove a key value pair.
*
* @param key the key
*/
public void remove(Value key) {
checkSizeRemove();
int index = getIndex(key);
int plus = 1;
do {
Value k = keys[index];
if (k == null) {
// found an empty record
return;
} else if (k == ValueNull.DELETED) {
// found a deleted record
} else if (k.equals(key)) {
// found the record
keys[index] = ValueNull.DELETED;
values[index] = null;
deletedCount++;
size--;
return;
}
index = (index + plus++) & mask;
} while (plus <= len);
// not found
}
/**
* Get the value for this key. This method returns null if the key was not
* found.
*
* @param key the key
* @return the value for the given key
*/
public V get(Value key) {
int index = getIndex(key);
int plus = 1;
do {
Value k = keys[index];
if (k == null) {
// found an empty record
return null;
} else if (k == ValueNull.DELETED) {
// found a deleted record
} else if (k.equals(key)) {
// found it
return values[index];
}
index = (index + plus++) & mask;
} while (plus <= len);
return null;
}
/**
* Get the keys.
*
* @return all keys
*/
public Iterable<Value> keys() {
return new KeyIterable();
}
private final class KeyIterable implements Iterable<Value> {
KeyIterable() {
}
@Override
public Iterator<Value> iterator() {
return new UnifiedIterator<>(false);
}
}
/**
* Gets all map's entries.
*
* @return all map's entries.
*/
public Iterable<Map.Entry<Value, V>> entries() {
return new EntryIterable();
}
private final class EntryIterable implements Iterable<Map.Entry<Value, V>> {
EntryIterable() {
}
@Override
public Iterator<Map.Entry<Value, V>> iterator() {
return new UnifiedIterator<>(true);
}
}
final class UnifiedIterator<T> implements Iterator<T> {
int keysIndex = -1;
int left = size;
private final boolean forEntries;
UnifiedIterator(boolean forEntries) {
this.forEntries = forEntries;
}
@Override
public boolean hasNext() {
return left > 0;
}
@SuppressWarnings("unchecked")
@Override
public T next() {
if (left <= 0)
throw new NoSuchElementException();
left--;
for (;;) {
keysIndex++;
Value key = keys[keysIndex];
if (key != null && key != ValueNull.DELETED) {
return (T) (forEntries ? new AbstractMap.SimpleImmutableEntry<>(key, values[keysIndex]) : key);
}
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
/**
* Get the list of values.
*
* @return all values
*/
public ArrayList<V> values() {
ArrayList<V> list = new ArrayList<>(size);
int len = keys.length;
for (int i = 0; i < len; i++) {
Value k = keys[i];
if (k != null && k != ValueNull.DELETED) {
list.add(values[i]);
}
}
return list;
}
}
......@@ -1454,6 +1454,51 @@ public class DataType {
return type == Value.GEOMETRY || type == Value.ENUM;
}
/**
* Check if the given type has total ordering.
*
* @param type the value type
* @return true if the value type has total ordering
*/
public static boolean hasTotalOrdering(int type) {
switch (type) {
case Value.BOOLEAN:
case Value.BYTE:
case Value.SHORT:
case Value.INT:
case Value.LONG:
// Negative zeroes and NaNs are normalized
case Value.DOUBLE:
case Value.FLOAT:
case Value.TIME:
case Value.DATE:
case Value.TIMESTAMP:
case Value.BYTES:
// Serialized data is compared
case Value.JAVA_OBJECT:
case Value.UUID:
// EWKB is used
case Value.GEOMETRY:
case Value.ENUM:
case Value.INTERVAL_YEAR:
case Value.INTERVAL_MONTH:
case Value.INTERVAL_DAY:
case Value.INTERVAL_HOUR:
case Value.INTERVAL_MINUTE:
case Value.INTERVAL_SECOND:
case Value.INTERVAL_YEAR_TO_MONTH:
case Value.INTERVAL_DAY_TO_HOUR:
case Value.INTERVAL_DAY_TO_MINUTE:
case Value.INTERVAL_DAY_TO_SECOND:
case Value.INTERVAL_HOUR_TO_MINUTE:
case Value.INTERVAL_HOUR_TO_SECOND:
case Value.INTERVAL_MINUTE_TO_SECOND:
return true;
default:
return false;
}
}
/**
* Check if the given value type supports the add operation.
*
......
......@@ -229,7 +229,6 @@ import org.h2.test.unit.TestTools;
import org.h2.test.unit.TestTraceSystem;
import org.h2.test.unit.TestUtils;
import org.h2.test.unit.TestValue;
import org.h2.test.unit.TestValueHashMap;
import org.h2.test.unit.TestValueMemory;
import org.h2.test.utils.OutputCatcher;
import org.h2.test.utils.SelfDestructor;
......@@ -994,7 +993,6 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
addTest(new TestStringUtils());
addTest(new TestTraceSystem());
addTest(new TestUtils());
addTest(new TestValueHashMap());
addTest(new TestLocalResultFactory());
runAddedTests();
......
......@@ -8,3 +8,100 @@ CREATE TABLE TEST(I NUMERIC(-1));
CREATE TABLE TEST(I NUMERIC(-1, -1));
> exception INVALID_VALUE_2
CREATE TABLE TEST (N NUMERIC) AS VALUES (0), (0.0), (NULL);
> ok
SELECT * FROM TEST;
> N
> ----
> 0
> 0.0
> null
> rows: 3
SELECT DISTINCT * FROM TEST;
> N
> ----
> 0
> null
> rows: 2
DROP TABLE TEST;
> ok
CREATE TABLE TEST (N NUMERIC) AS VALUES (0), (0.0), (2), (NULL);
> ok
CREATE INDEX TEST_IDX ON TEST(N);
> ok
SELECT N FROM TEST WHERE N IN (0.000, 0.00, 1.0);
> N
> ---
> 0
> 0.0
> rows: 2
SELECT N FROM TEST WHERE N IN (SELECT DISTINCT ON(B) A FROM VALUES (0.000, 1), (0.00, 2), (1.0, 3) T(A, B));
> N
> ---
> 0
> 0.0
> rows: 2
DROP INDEX TEST_IDX;
> ok
CREATE UNIQUE INDEX TEST_IDX ON TEST(N);
> exception DUPLICATE_KEY_1
DROP TABLE TEST;
> ok
CREATE MEMORY TABLE TEST(N NUMERIC) AS VALUES (0), (0.0), (2), (NULL);
> ok
CREATE HASH INDEX TEST_IDX ON TEST(N);
> ok
SELECT N FROM TEST WHERE N = 0;
> N
> ---
> 0
> 0.0
> rows: 2
DROP INDEX TEST_IDX;
> ok
CREATE UNIQUE HASH INDEX TEST_IDX ON TEST(N);
> exception DUPLICATE_KEY_1
DELETE FROM TEST WHERE N = 0 LIMIT 1;
> update count: 1
CREATE UNIQUE HASH INDEX TEST_IDX ON TEST(N);
> ok
SELECT 1 FROM TEST WHERE N = 0;
>> 1
INSERT INTO TEST VALUES (NULL);
> update count: 1
SELECT N FROM TEST WHERE N IS NULL;
> N
> ----
> null
> null
> rows: 2
DELETE FROM TEST WHERE N IS NULL LIMIT 1;
> update count: 1
SELECT N FROM TEST WHERE N IS NULL;
>> null
DROP TABLE TEST;
> ok
......@@ -550,3 +550,62 @@ VALUES 1, 2;
> 1
> 2
> rows: 2
SELECT * FROM (VALUES (1::BIGINT, 2)) T (A, B) WHERE (A, B) IN (VALUES(1, 2));
> A B
> - -
> 1 2
> rows: 1
SELECT * FROM (VALUES (1000000000000, 2)) T (A, B) WHERE (A, B) IN (VALUES(1, 2));
> A B
> - -
> rows: 0
SELECT * FROM (VALUES (1, 2)) T (A, B) WHERE (A, B) IN (VALUES(1::BIGINT, 2));
> A B
> - -
> 1 2
> rows: 1
SELECT * FROM (VALUES (1, 2)) T (A, B) WHERE (A, B) IN (VALUES(1000000000000, 2));
> A B
> - -
> rows: 0
CREATE TABLE TEST(A BIGINT, B INT) AS VALUES (1::BIGINT, 2);
> ok
SELECT * FROM TEST WHERE (A, B) IN ((1, 2), (3, 4));
> A B
> - -
> 1 2
> rows: 1
UPDATE TEST SET A = 1000000000000;
> update count: 1
SELECT * FROM TEST WHERE (A, B) IN ((1, 2), (3, 4));
> A B
> - -
> rows: 0
DROP TABLE TEST;
> ok
CREATE TABLE TEST(A BIGINT, B INT) AS VALUES (1, 2);
> ok
SELECT * FROM TEST WHERE (A, B) IN ((1::BIGINT, 2), (3, 4));
> A B
> - -
> 1 2
> rows: 1
SELECT * FROM TEST WHERE (A, B) IN ((1000000000000, 2), (3, 4));
> A B
> - -
> rows: 0
DROP TABLE TEST;
> ok
......@@ -104,3 +104,53 @@ SELECT X, COUNT(*) OVER (ORDER BY X) C FROM VALUES (1), (1), (2), (2), (3) V(X);
> 2 4
> 3 5
> rows: 5
CREATE TABLE TEST (N NUMERIC) AS VALUES (0), (0.0), (NULL);
> ok
SELECT COUNT(*) FROM TEST;
>> 3
SELECT COUNT(N) FROM TEST;
>> 2
SELECT COUNT(DISTINCT N) FROM TEST;
>> 1
SELECT COUNT(*) FROM TEST GROUP BY N;
> COUNT(*)
> --------
> 1
> 2
> rows: 2
SELECT COUNT(N) OVER (PARTITION BY N) C FROM TEST;
> C
> -
> 0
> 2
> 2
> rows: 3
DROP TABLE TEST;
> ok
CREATE TABLE TEST(A INT, B INT) AS (VALUES (1, NULL), (1, NULL), (2, NULL));
> ok
SELECT COUNT((A, B)) C, COUNT(DISTINCT (A, B)) CD FROM TEST;
> C CD
> - --
> 3 2
> rows: 1
SELECT COUNT(*) OVER (PARTITION BY A, B) C1, COUNT(*) OVER (PARTITION BY (A, B)) C2 FROM TEST;
> C1 C2
> -- --
> 1 1
> 2 2
> 2 2
> rows: 3
DROP TABLE TEST;
> ok
......@@ -57,3 +57,12 @@ SELECT MODE() WITHIN GROUP(ORDER BY V DESC) FROM TEST;
DROP TABLE TEST;
> ok
CREATE TABLE TEST (N NUMERIC) AS VALUES (0), (0.0), (NULL);
> ok
SELECT MODE(N) FROM TEST;
>> 0
DROP TABLE TEST;
> ok
/*
* Copyright 2004-2019 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.test.unit;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Random;
import org.h2.api.JavaObjectSerializer;
import org.h2.store.DataHandler;
import org.h2.store.FileStore;
import org.h2.store.LobStorageBackend;
import org.h2.test.TestBase;
import org.h2.util.SmallLRUCache;
import org.h2.util.TempFileDeleter;
import org.h2.util.ValueHashMap;
import org.h2.value.CompareMode;
import org.h2.value.Value;
import org.h2.value.ValueDouble;
import org.h2.value.ValueInt;
/**
* Tests the value hash map.
*/
public class TestValueHashMap extends TestBase implements DataHandler {
CompareMode compareMode = CompareMode.getInstance(null, 0);
/**
* Run just this test.
*
* @param a ignored
*/
public static void main(String... a) throws Exception {
TestBase.createCaller().init().test();
}
@Override
public void test() {
testNotANumber();
testRandomized();
}
private void testNotANumber() {
ValueHashMap<Integer> map = new ValueHashMap<>();
for (int i = 1; i < 100; i++) {
double d = Double.longBitsToDouble(0x7ff0000000000000L | i);
ValueDouble v = ValueDouble.get(d);
map.put(v, null);
assertEquals(1, map.size());
}
}
private void testRandomized() {
ValueHashMap<Value> map = new ValueHashMap<>();
HashMap<Value, Value> hash = new HashMap<>();
Random random = new Random(1);
Comparator<Value> vc = new Comparator<Value>() {
@Override
public int compare(Value v1, Value v2) {
return v1.compareTo(v2, null, compareMode);
}
};
for (int i = 0; i < 10000; i++) {
int op = random.nextInt(10);
Value key = ValueInt.get(random.nextInt(100));
Value value = ValueInt.get(random.nextInt(100));
switch (op) {
case 0:
map.put(key, value);
hash.put(key, value);
break;
case 1:
map.remove(key);
hash.remove(key);
break;
case 2:
Value v1 = map.get(key);
Value v2 = hash.get(key);
assertTrue(v1 == null ? v2 == null : v1.equals(v2));
break;
case 3: {
ArrayList<Value> a1 = new ArrayList<>();
for (Value v : map.keys()) {
a1.add(v);
}
ArrayList<Value> a2 = new ArrayList<>(hash.keySet());
assertEquals(a1.size(), a2.size());
Collections.sort(a1, vc);
Collections.sort(a2, vc);
for (int j = 0; j < a1.size(); j++) {
assertTrue(a1.get(j).equals(a2.get(j)));
}
break;
}
case 4:
ArrayList<Value> a1 = map.values();
ArrayList<Value> a2 = new ArrayList<>(hash.values());
assertEquals(a1.size(), a2.size());
Collections.sort(a1, vc);
Collections.sort(a2, vc);
for (int j = 0; j < a1.size(); j++) {
assertTrue(a1.get(j).equals(a2.get(j)));
}
break;
default:
}
}
}
@Override
public String getDatabasePath() {
return null;
}
@Override
public FileStore openFile(String name, String mode, boolean mustExist) {
return null;
}
@Override
public void checkPowerOff() {
// nothing to do
}
@Override
public void checkWritingAllowed() {
// nothing to do
}
@Override
public int getMaxLengthInplaceLob() {
return 0;
}
@Override
public String getLobCompressionAlgorithm(int type) {
return null;
}
@Override
public Object getLobSyncObject() {
return this;
}
@Override
public SmallLRUCache<String, String[]> getLobFileListCache() {
return null;
}
@Override
public TempFileDeleter getTempFileDeleter() {
return TempFileDeleter.getInstance();
}
@Override
public LobStorageBackend getLobStorage() {
return null;
}
@Override
public int readLob(long lobId, byte[] hmac, long offset, byte[] buff,
int off, int length) {
return -1;
}
@Override
public JavaObjectSerializer getJavaObjectSerializer() {
return null;
}
@Override
public CompareMode getCompareMode() {
return compareMode;
}
}
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论