Unverified 提交 19f45ea6 authored 作者: Evgenij Ryazanov's avatar Evgenij Ryazanov 提交者: GitHub

Merge pull request #1683 from katzyn/result

Fix issues with IN(), DISTINCT, GROUP BY, PARTITION BY and others
...@@ -641,10 +641,12 @@ ON tableName ( indexColumn [,...] ) ...@@ -641,10 +641,12 @@ ON tableName ( indexColumn [,...] )
Creates a new index. Creates a new index.
This command commits an open transaction in this connection. This command commits an open transaction in this connection.
Hash indexes are meant for in-memory databases and memory tables (CREATE MEMORY TABLE). Hash indexes are meant for in-memory databases and memory tables (CREATE MEMORY TABLE) when PageStore engine is used.
For other tables, or if the index contains multiple columns, the HASH keyword is ignored. For other tables, or if the index contains multiple columns, the HASH keyword is ignored.
Hash indexes can only test for equality, and do not support range queries (similar to a hash table). Hash indexes can only test for equality, do not support range queries (similar to a hash table), use more memory,
but can perform lookups faster.
Non-unique keys are supported. Non-unique keys are supported.
Spatial indexes are supported only on Geometry columns. Spatial indexes are supported only on Geometry columns.
"," ","
CREATE INDEX IDXNAME ON TEST(NAME) CREATE INDEX IDXNAME ON TEST(NAME)
......
...@@ -21,6 +21,14 @@ Change Log ...@@ -21,6 +21,14 @@ Change Log
<h2>Next Version (unreleased)</h2> <h2>Next Version (unreleased)</h2>
<ul> <ul>
<li>Issue #1681: IN () doesn't work with row values when data types are not exactly the same
</li>
<li>Issue #1320: OOME / GC overhead in IndexCursor.nextCursor()
</li>
<li>PR #1680: Assorted fixes for ALTER TABLE ALTER COLUMN
</li>
<li>PR #1679: Use TestScript for testSimple
</li>
<li>Issue #1677: Unable to use VALUES keyword in WHERE clause <li>Issue #1677: Unable to use VALUES keyword in WHERE clause
</li> </li>
<li>Issue #1672: Deadlock on MVStore close in TestOutOfMemory <li>Issue #1672: Deadlock on MVStore close in TestOutOfMemory
......
...@@ -278,6 +278,10 @@ public class MergeUsing extends Prepared { ...@@ -278,6 +278,10 @@ public class MergeUsing extends Prepared {
private String queryAlias; private String queryAlias;
private int countUpdatedRows; private int countUpdatedRows;
private Select targetMatchQuery; private Select targetMatchQuery;
/**
* Contains mappings between _ROWID_ and ROW_NUMBER for processed rows. Row
* identities are remembered to prevent duplicate updates of the same row.
*/
private final HashMap<Value, Integer> targetRowidsRemembered = new HashMap<>(); private final HashMap<Value, Integer> targetRowidsRemembered = new HashMap<>();
private int sourceQueryRowNumber; private int sourceQueryRowNumber;
......
...@@ -319,6 +319,15 @@ public abstract class Query extends Prepared { ...@@ -319,6 +319,15 @@ public abstract class Query extends Prepared {
return distinct; return distinct;
} }
/**
* Returns whether results support random access.
*
* @return whether results support random access
*/
public boolean isRandomAccessResult() {
return randomAccessResult;
}
/** /**
* Whether results need to support random access. * Whether results need to support random access.
* *
......
...@@ -51,8 +51,8 @@ import org.h2.util.StatementBuilder; ...@@ -51,8 +51,8 @@ import org.h2.util.StatementBuilder;
import org.h2.util.StringUtils; import org.h2.util.StringUtils;
import org.h2.util.Utils; import org.h2.util.Utils;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueNull; import org.h2.value.ValueNull;
import org.h2.value.ValueRow;
/** /**
* This class represents a simple SELECT statement. * This class represents a simple SELECT statement.
...@@ -499,7 +499,7 @@ public class Select extends Query { ...@@ -499,7 +499,7 @@ public class Select extends Query {
} }
private void processGroupResult(int columnCount, LocalResult result, long offset, boolean quickOffset) { private void processGroupResult(int columnCount, LocalResult result, long offset, boolean quickOffset) {
for (ValueArray currentGroupsKey; (currentGroupsKey = groupData.next()) != null;) { for (ValueRow currentGroupsKey; (currentGroupsKey = groupData.next()) != null;) {
Value[] keyValues = currentGroupsKey.getList(); Value[] keyValues = currentGroupsKey.getList();
Value[] row = new Value[columnCount]; Value[] row = new Value[columnCount];
for (int j = 0; groupIndex != null && j < groupIndex.length; j++) { for (int j = 0; groupIndex != null && j < groupIndex.length; j++) {
......
...@@ -11,14 +11,14 @@ import java.util.HashMap; ...@@ -11,14 +11,14 @@ import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.TreeMap;
import org.h2.engine.Session; import org.h2.engine.Session;
import org.h2.expression.Expression; import org.h2.expression.Expression;
import org.h2.expression.analysis.DataAnalysisOperation; import org.h2.expression.analysis.DataAnalysisOperation;
import org.h2.expression.analysis.PartitionData; import org.h2.expression.analysis.PartitionData;
import org.h2.util.ValueHashMap;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueArray; import org.h2.value.ValueRow;
/** /**
* Grouped data for aggregates. * Grouped data for aggregates.
...@@ -51,18 +51,18 @@ public abstract class SelectGroups { ...@@ -51,18 +51,18 @@ public abstract class SelectGroups {
/** /**
* Map of group-by key to group-by expression data e.g. AggregateData * Map of group-by key to group-by expression data e.g. AggregateData
*/ */
private HashMap<ValueArray, Object[]> groupByData; private TreeMap<ValueRow, Object[]> groupByData;
/** /**
* Key into groupByData that produces currentGroupByExprData. Not used * Key into groupByData that produces currentGroupByExprData. Not used
* in lazy mode. * in lazy mode.
*/ */
private ValueArray currentGroupsKey; private ValueRow currentGroupsKey;
/** /**
* Cursor for {@link #next()} method. * Cursor for {@link #next()} method.
*/ */
private Iterator<Entry<ValueArray, Object[]>> cursor; private Iterator<Entry<ValueRow, Object[]>> cursor;
Grouped(Session session, ArrayList<Expression> expressions, int[] groupIndex) { Grouped(Session session, ArrayList<Expression> expressions, int[] groupIndex) {
super(session, expressions); super(session, expressions);
...@@ -72,7 +72,7 @@ public abstract class SelectGroups { ...@@ -72,7 +72,7 @@ public abstract class SelectGroups {
@Override @Override
public void reset() { public void reset() {
super.reset(); super.reset();
groupByData = new HashMap<>(); groupByData = new TreeMap<>(session.getDatabase().getCompareMode());
currentGroupsKey = null; currentGroupsKey = null;
cursor = null; cursor = null;
} }
...@@ -80,7 +80,7 @@ public abstract class SelectGroups { ...@@ -80,7 +80,7 @@ public abstract class SelectGroups {
@Override @Override
public void nextSource() { public void nextSource() {
if (groupIndex == null) { if (groupIndex == null) {
currentGroupsKey = ValueArray.getEmpty(); currentGroupsKey = ValueRow.getEmpty();
} else { } else {
Value[] keyValues = new Value[groupIndex.length]; Value[] keyValues = new Value[groupIndex.length];
// update group // update group
...@@ -89,7 +89,7 @@ public abstract class SelectGroups { ...@@ -89,7 +89,7 @@ public abstract class SelectGroups {
Expression expr = expressions.get(idx); Expression expr = expressions.get(idx);
keyValues[i] = expr.getValue(session); keyValues[i] = expr.getValue(session);
} }
currentGroupsKey = ValueArray.get(keyValues); currentGroupsKey = ValueRow.get(keyValues);
} }
Object[] values = groupByData.get(currentGroupsKey); Object[] values = groupByData.get(currentGroupsKey);
if (values == null) { if (values == null) {
...@@ -114,15 +114,15 @@ public abstract class SelectGroups { ...@@ -114,15 +114,15 @@ public abstract class SelectGroups {
public void done() { public void done() {
super.done(); super.done();
if (groupIndex == null && groupByData.size() == 0) { if (groupIndex == null && groupByData.size() == 0) {
groupByData.put(ValueArray.getEmpty(), createRow()); groupByData.put(ValueRow.getEmpty(), createRow());
} }
cursor = groupByData.entrySet().iterator(); cursor = groupByData.entrySet().iterator();
} }
@Override @Override
public ValueArray next() { public ValueRow next() {
if (cursor.hasNext()) { if (cursor.hasNext()) {
Map.Entry<ValueArray, Object[]> entry = cursor.next(); Map.Entry<ValueRow, Object[]> entry = cursor.next();
currentGroupByExprData = entry.getValue(); currentGroupByExprData = entry.getValue();
currentGroupRowId++; currentGroupRowId++;
return entry.getKey(); return entry.getKey();
...@@ -184,11 +184,11 @@ public abstract class SelectGroups { ...@@ -184,11 +184,11 @@ public abstract class SelectGroups {
} }
@Override @Override
public ValueArray next() { public ValueRow next() {
if (cursor.hasNext()) { if (cursor.hasNext()) {
currentGroupByExprData = cursor.next(); currentGroupByExprData = cursor.next();
currentGroupRowId++; currentGroupRowId++;
return ValueArray.getEmpty(); return ValueRow.getEmpty();
} }
return null; return null;
} }
...@@ -223,7 +223,7 @@ public abstract class SelectGroups { ...@@ -223,7 +223,7 @@ public abstract class SelectGroups {
/** /**
* Maps an partitioned window expression object to its data. * Maps an partitioned window expression object to its data.
*/ */
private final HashMap<DataAnalysisOperation, ValueHashMap<PartitionData>> windowPartitionData = new HashMap<>(); private final HashMap<DataAnalysisOperation, TreeMap<Value, PartitionData>> windowPartitionData = new HashMap<>();
/** /**
* The id of the current group. * The id of the current group.
...@@ -324,7 +324,7 @@ public abstract class SelectGroups { ...@@ -324,7 +324,7 @@ public abstract class SelectGroups {
if (partitionKey == null) { if (partitionKey == null) {
return windowData.get(expr); return windowData.get(expr);
} else { } else {
ValueHashMap<PartitionData> map = windowPartitionData.get(expr); TreeMap<Value, PartitionData> map = windowPartitionData.get(expr);
return map != null ? map.get(partitionKey) : null; return map != null ? map.get(partitionKey) : null;
} }
} }
...@@ -344,9 +344,9 @@ public abstract class SelectGroups { ...@@ -344,9 +344,9 @@ public abstract class SelectGroups {
Object old = windowData.put(expr, obj); Object old = windowData.put(expr, obj);
assert old == null; assert old == null;
} else { } else {
ValueHashMap<PartitionData> map = windowPartitionData.get(expr); TreeMap<Value, PartitionData> map = windowPartitionData.get(expr);
if (map == null) { if (map == null) {
map = new ValueHashMap<>(); map = new TreeMap<>(session.getDatabase().getCompareMode());
windowPartitionData.put(expr, map); windowPartitionData.put(expr, map);
} }
map.put(partitionKey, obj); map.put(partitionKey, obj);
...@@ -397,7 +397,7 @@ public abstract class SelectGroups { ...@@ -397,7 +397,7 @@ public abstract class SelectGroups {
* *
* @return the key of the next group, or null * @return the key of the next group, or null
*/ */
public abstract ValueArray next(); public abstract ValueRow next();
/** /**
* Removes the data for the current key. * Removes the data for the current key.
......
...@@ -10,6 +10,7 @@ import java.util.Arrays; ...@@ -10,6 +10,7 @@ import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.TreeMap;
import org.h2.api.ErrorCode; import org.h2.api.ErrorCode;
import org.h2.command.dml.Select; import org.h2.command.dml.Select;
import org.h2.command.dml.SelectOrderBy; import org.h2.command.dml.SelectOrderBy;
...@@ -32,7 +33,6 @@ import org.h2.table.ColumnResolver; ...@@ -32,7 +33,6 @@ import org.h2.table.ColumnResolver;
import org.h2.table.Table; import org.h2.table.Table;
import org.h2.table.TableFilter; import org.h2.table.TableFilter;
import org.h2.util.StatementBuilder; import org.h2.util.StatementBuilder;
import org.h2.util.ValueHashMap;
import org.h2.value.CompareMode; import org.h2.value.CompareMode;
import org.h2.value.DataType; import org.h2.value.DataType;
import org.h2.value.TypeInfo; import org.h2.value.TypeInfo;
...@@ -303,7 +303,7 @@ public class Aggregate extends AbstractAggregate { ...@@ -303,7 +303,7 @@ public class Aggregate extends AbstractAggregate {
v = updateCollecting(session, v, remembered); v = updateCollecting(session, v, remembered);
} }
} }
data.add(session.getDatabase(), type.getValueType(), v); data.add(session.getDatabase(), v);
} }
@Override @Override
...@@ -376,7 +376,7 @@ public class Aggregate extends AbstractAggregate { ...@@ -376,7 +376,7 @@ public class Aggregate extends AbstractAggregate {
@Override @Override
protected Object createAggregateData() { protected Object createAggregateData() {
return AggregateData.create(aggregateType, distinct); return AggregateData.create(aggregateType, distinct, type.getValueType());
} }
@Override @Override
...@@ -440,11 +440,11 @@ public class Aggregate extends AbstractAggregate { ...@@ -440,11 +440,11 @@ public class Aggregate extends AbstractAggregate {
if (c.getCount() == 0) { if (c.getCount() == 0) {
return ValueNull.INSTANCE; return ValueNull.INSTANCE;
} }
AggregateDataDefault d = new AggregateDataDefault(aggregateType); AggregateDataDefault d = new AggregateDataDefault(aggregateType, type.getValueType());
Database db = session.getDatabase(); Database db = session.getDatabase();
int dataType = type.getValueType(); int dataType = type.getValueType();
for (Value v : c) { for (Value v : c) {
d.add(db, dataType, v); d.add(db, v);
} }
return d.getValue(db, dataType); return d.getValue(db, dataType);
} }
...@@ -512,13 +512,13 @@ public class Aggregate extends AbstractAggregate { ...@@ -512,13 +512,13 @@ public class Aggregate extends AbstractAggregate {
} }
private Value getHistogram(Session session, AggregateData data) { private Value getHistogram(Session session, AggregateData data) {
ValueHashMap<LongDataCounter> distinctValues = ((AggregateDataDistinctWithCounts) data).getValues(); TreeMap<Value, LongDataCounter> distinctValues = ((AggregateDataDistinctWithCounts) data).getValues();
if (distinctValues == null) { if (distinctValues == null) {
return ValueArray.getEmpty(); return ValueArray.getEmpty();
} }
ValueArray[] values = new ValueArray[distinctValues.size()]; ValueArray[] values = new ValueArray[distinctValues.size()];
int i = 0; int i = 0;
for (Entry<Value, LongDataCounter> entry : distinctValues.entries()) { for (Entry<Value, LongDataCounter> entry : distinctValues.entrySet()) {
LongDataCounter d = entry.getValue(); LongDataCounter d = entry.getValue();
values[i] = ValueArray.get(new Value[] { entry.getKey(), ValueLong.get(distinct ? 1L : d.count) }); values[i] = ValueArray.get(new Value[] { entry.getKey(), ValueLong.get(distinct ? 1L : d.count) });
i++; i++;
...@@ -539,14 +539,14 @@ public class Aggregate extends AbstractAggregate { ...@@ -539,14 +539,14 @@ public class Aggregate extends AbstractAggregate {
private Value getMode(Session session, AggregateData data) { private Value getMode(Session session, AggregateData data) {
Value v = ValueNull.INSTANCE; Value v = ValueNull.INSTANCE;
ValueHashMap<LongDataCounter> distinctValues = ((AggregateDataDistinctWithCounts) data).getValues(); TreeMap<Value, LongDataCounter> distinctValues = ((AggregateDataDistinctWithCounts) data).getValues();
if (distinctValues == null) { if (distinctValues == null) {
return v; return v;
} }
long count = 0L; long count = 0L;
if (orderByList != null) { if (orderByList != null) {
boolean desc = (orderByList.get(0).sortType & SortOrder.DESCENDING) != 0; boolean desc = (orderByList.get(0).sortType & SortOrder.DESCENDING) != 0;
for (Entry<Value, LongDataCounter> entry : distinctValues.entries()) { for (Entry<Value, LongDataCounter> entry : distinctValues.entrySet()) {
long c = entry.getValue().count; long c = entry.getValue().count;
if (c > count) { if (c > count) {
v = entry.getKey(); v = entry.getKey();
...@@ -565,7 +565,7 @@ public class Aggregate extends AbstractAggregate { ...@@ -565,7 +565,7 @@ public class Aggregate extends AbstractAggregate {
} }
} }
} else { } else {
for (Entry<Value, LongDataCounter> entry : distinctValues.entries()) { for (Entry<Value, LongDataCounter> entry : distinctValues.entrySet()) {
long c = entry.getValue().count; long c = entry.getValue().count;
if (c > count) { if (c > count) {
v = entry.getKey(); v = entry.getKey();
......
...@@ -21,9 +21,10 @@ abstract class AggregateData { ...@@ -21,9 +21,10 @@ abstract class AggregateData {
* *
* @param aggregateType the type of the aggregate operation * @param aggregateType the type of the aggregate operation
* @param distinct if the calculation should be distinct * @param distinct if the calculation should be distinct
* @param dataType the data type of the computed result
* @return the aggregate data object of the specified type * @return the aggregate data object of the specified type
*/ */
static AggregateData create(AggregateType aggregateType, boolean distinct) { static AggregateData create(AggregateType aggregateType, boolean distinct, int dataType) {
switch (aggregateType) { switch (aggregateType) {
case COUNT_ALL: case COUNT_ALL:
return new AggregateDataCount(true); return new AggregateDataCount(true);
...@@ -42,7 +43,7 @@ abstract class AggregateData { ...@@ -42,7 +43,7 @@ abstract class AggregateData {
case BIT_AND: case BIT_AND:
case ANY: case ANY:
case EVERY: case EVERY:
return new AggregateDataDefault(aggregateType); return new AggregateDataDefault(aggregateType, dataType);
case SUM: case SUM:
case AVG: case AVG:
case STDDEV_POP: case STDDEV_POP:
...@@ -50,7 +51,7 @@ abstract class AggregateData { ...@@ -50,7 +51,7 @@ abstract class AggregateData {
case VAR_POP: case VAR_POP:
case VAR_SAMP: case VAR_SAMP:
if (!distinct) { if (!distinct) {
return new AggregateDataDefault(aggregateType); return new AggregateDataDefault(aggregateType, dataType);
} }
break; break;
case SELECTIVITY: case SELECTIVITY:
...@@ -71,10 +72,9 @@ abstract class AggregateData { ...@@ -71,10 +72,9 @@ abstract class AggregateData {
* Add a value to this aggregate. * Add a value to this aggregate.
* *
* @param database the database * @param database the database
* @param dataType the datatype of the computed result
* @param v the value * @param v the value
*/ */
abstract void add(Database database, int dataType, Value v); abstract void add(Database database, Value v);
/** /**
* Get the aggregate result. * Get the aggregate result.
......
...@@ -8,8 +8,8 @@ package org.h2.expression.aggregate; ...@@ -8,8 +8,8 @@ package org.h2.expression.aggregate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.TreeSet;
import org.h2.engine.Database; import org.h2.engine.Database;
import org.h2.value.Value; import org.h2.value.Value;
...@@ -41,13 +41,13 @@ class AggregateDataCollecting extends AggregateData implements Iterable<Value> { ...@@ -41,13 +41,13 @@ class AggregateDataCollecting extends AggregateData implements Iterable<Value> {
} }
@Override @Override
void add(Database database, int dataType, Value v) { void add(Database database, Value v) {
if (v == ValueNull.INSTANCE) { if (v == ValueNull.INSTANCE) {
return; return;
} }
Collection<Value> c = values; Collection<Value> c = values;
if (c == null) { if (c == null) {
values = c = distinct ? new HashSet<Value>() : new ArrayList<Value>(); values = c = distinct ? new TreeSet<>(database.getCompareMode()) : new ArrayList<Value>();
} }
c.add(v); c.add(v);
} }
......
...@@ -24,7 +24,7 @@ class AggregateDataCount extends AggregateData { ...@@ -24,7 +24,7 @@ class AggregateDataCount extends AggregateData {
} }
@Override @Override
void add(Database database, int dataType, Value v) { void add(Database database, Value v) {
if (all || v != ValueNull.INSTANCE) { if (all || v != ValueNull.INSTANCE) {
count++; count++;
} }
......
...@@ -21,19 +21,22 @@ import org.h2.value.ValueNull; ...@@ -21,19 +21,22 @@ import org.h2.value.ValueNull;
class AggregateDataDefault extends AggregateData { class AggregateDataDefault extends AggregateData {
private final AggregateType aggregateType; private final AggregateType aggregateType;
private final int dataType;
private long count; private long count;
private Value value; private Value value;
private double m2, mean; private double m2, mean;
/** /**
* @param aggregateType the type of the aggregate operation * @param aggregateType the type of the aggregate operation
* @param dataType the data type of the computed result
*/ */
AggregateDataDefault(AggregateType aggregateType) { AggregateDataDefault(AggregateType aggregateType, int dataType) {
this.aggregateType = aggregateType; this.aggregateType = aggregateType;
this.dataType = dataType;
} }
@Override @Override
void add(Database database, int dataType, Value v) { void add(Database database, Value v) {
if (v == ValueNull.INSTANCE) { if (v == ValueNull.INSTANCE) {
return; return;
} }
......
...@@ -5,8 +5,8 @@ ...@@ -5,8 +5,8 @@
*/ */
package org.h2.expression.aggregate; package org.h2.expression.aggregate;
import java.util.TreeMap;
import org.h2.engine.Database; import org.h2.engine.Database;
import org.h2.util.ValueHashMap;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueNull; import org.h2.value.ValueNull;
...@@ -20,7 +20,7 @@ class AggregateDataDistinctWithCounts extends AggregateData { ...@@ -20,7 +20,7 @@ class AggregateDataDistinctWithCounts extends AggregateData {
private final int maxDistinctCount; private final int maxDistinctCount;
private ValueHashMap<LongDataCounter> values; private TreeMap<Value, LongDataCounter> values;
/** /**
* Creates new instance of data for aggregate that needs distinct values * Creates new instance of data for aggregate that needs distinct values
...@@ -37,12 +37,12 @@ class AggregateDataDistinctWithCounts extends AggregateData { ...@@ -37,12 +37,12 @@ class AggregateDataDistinctWithCounts extends AggregateData {
} }
@Override @Override
void add(Database database, int dataType, Value v) { void add(Database database, Value v) {
if (ignoreNulls && v == ValueNull.INSTANCE) { if (ignoreNulls && v == ValueNull.INSTANCE) {
return; return;
} }
if (values == null) { if (values == null) {
values = new ValueHashMap<>(); values = new TreeMap<>(database.getCompareMode());
} }
LongDataCounter a = values.get(v); LongDataCounter a = values.get(v);
if (a == null) { if (a == null) {
...@@ -65,7 +65,7 @@ class AggregateDataDistinctWithCounts extends AggregateData { ...@@ -65,7 +65,7 @@ class AggregateDataDistinctWithCounts extends AggregateData {
* *
* @return map with values and their counts * @return map with values and their counts
*/ */
ValueHashMap<LongDataCounter> getValues() { TreeMap<Value, LongDataCounter> getValues() {
return values; return values;
} }
......
...@@ -57,7 +57,7 @@ class AggregateDataEnvelope extends AggregateData { ...@@ -57,7 +57,7 @@ class AggregateDataEnvelope extends AggregateData {
} }
@Override @Override
void add(Database database, int dataType, Value v) { void add(Database database, Value v) {
if (v == ValueNull.INSTANCE) { if (v == ValueNull.INSTANCE) {
return; return;
} }
......
...@@ -32,7 +32,7 @@ class AggregateDataSelectivity extends AggregateData { ...@@ -32,7 +32,7 @@ class AggregateDataSelectivity extends AggregateData {
} }
@Override @Override
void add(Database database, int dataType, Value v) { void add(Database database, Value v) {
count++; count++;
if (distinctHashes == null) { if (distinctHashes == null) {
distinctHashes = new IntIntHashMap(); distinctHashes = new IntIntHashMap();
......
...@@ -20,9 +20,9 @@ import org.h2.table.TableFilter; ...@@ -20,9 +20,9 @@ import org.h2.table.TableFilter;
import org.h2.value.DataType; import org.h2.value.DataType;
import org.h2.value.TypeInfo; import org.h2.value.TypeInfo;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueBoolean; import org.h2.value.ValueBoolean;
import org.h2.value.ValueNull; import org.h2.value.ValueNull;
import org.h2.value.ValueRow;
/** /**
* This class wraps a user-defined aggregate. * This class wraps a user-defined aggregate.
...@@ -152,7 +152,7 @@ public class JavaAggregate extends AbstractAggregate { ...@@ -152,7 +152,7 @@ public class JavaAggregate extends AbstractAggregate {
if (args.length == 1) { if (args.length == 1) {
agg.add(value.getObject()); agg.add(value.getObject());
} else { } else {
Value[] values = ((ValueArray) value).getList(); Value[] values = ((ValueRow) value).getList();
Object[] argValues = new Object[args.length]; Object[] argValues = new Object[args.length];
for (int i = 0, len = args.length; i < len; i++) { for (int i = 0, len = args.length; i < len; i++) {
argValues[i] = values[i].getObject(); argValues[i] = values[i].getObject();
...@@ -193,7 +193,7 @@ public class JavaAggregate extends AbstractAggregate { ...@@ -193,7 +193,7 @@ public class JavaAggregate extends AbstractAggregate {
arg = arg.convertTo(argTypes[i]); arg = arg.convertTo(argTypes[i]);
argValues[i] = arg; argValues[i] = arg;
} }
data.add(session.getDatabase(), dataType, args.length == 1 ? arg : ValueArray.get(argValues)); data.add(session.getDatabase(), args.length == 1 ? arg : ValueRow.get(argValues));
} else { } else {
Aggregate agg = (Aggregate) aggregateData; Aggregate agg = (Aggregate) aggregateData;
Object[] argValues = new Object[args.length]; Object[] argValues = new Object[args.length];
......
...@@ -16,7 +16,7 @@ import org.h2.result.SortOrder; ...@@ -16,7 +16,7 @@ import org.h2.result.SortOrder;
import org.h2.table.ColumnResolver; import org.h2.table.ColumnResolver;
import org.h2.table.TableFilter; import org.h2.table.TableFilter;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueArray; import org.h2.value.ValueRow;
/** /**
* Window clause. * Window clause.
...@@ -206,7 +206,7 @@ public final class Window { ...@@ -206,7 +206,7 @@ public final class Window {
Expression expr = partitionBy.get(i); Expression expr = partitionBy.get(i);
keyValues[i] = expr.getValue(session); keyValues[i] = expr.getValue(session);
} }
return ValueArray.get(keyValues); return ValueRow.get(keyValues);
} }
} }
......
...@@ -26,14 +26,13 @@ import org.h2.value.ValueNull; ...@@ -26,14 +26,13 @@ import org.h2.value.ValueNull;
/** /**
* Used for optimised IN(...) queries where the contents of the IN list are all * Used for optimised IN(...) queries where the contents of the IN list are all
* constant and of the same type. * constant and of the same type.
* <p>
* Checking using a HashSet is has time complexity O(1), instead of O(n) for
* checking using an array.
*/ */
public class ConditionInConstantSet extends Condition { public class ConditionInConstantSet extends Condition {
private Expression left; private Expression left;
private final ArrayList<Expression> valueList; private final ArrayList<Expression> valueList;
// HashSet cannot be used here, because we need to compare values of
// different type or scale properly.
private final TreeSet<Value> valueSet; private final TreeSet<Value> valueSet;
private boolean hasNull; private boolean hasNull;
private final TypeInfo type; private final TypeInfo type;
......
...@@ -5,7 +5,12 @@ ...@@ -5,7 +5,12 @@
*/ */
package org.h2.index; package org.h2.index;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import org.h2.command.dml.AllColumnsForPlan; import org.h2.command.dml.AllColumnsForPlan;
import org.h2.engine.Mode.UniqueIndexNullsHandling;
import org.h2.engine.Session; import org.h2.engine.Session;
import org.h2.message.DbException; import org.h2.message.DbException;
import org.h2.result.Row; import org.h2.result.Row;
...@@ -15,8 +20,9 @@ import org.h2.table.Column; ...@@ -15,8 +20,9 @@ import org.h2.table.Column;
import org.h2.table.IndexColumn; import org.h2.table.IndexColumn;
import org.h2.table.RegularTable; import org.h2.table.RegularTable;
import org.h2.table.TableFilter; import org.h2.table.TableFilter;
import org.h2.util.ValueHashMap; import org.h2.value.DataType;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueNull;
/** /**
* An unique index based on an in-memory hash map. * An unique index based on an in-memory hash map.
...@@ -27,20 +33,22 @@ public class HashIndex extends BaseIndex { ...@@ -27,20 +33,22 @@ public class HashIndex extends BaseIndex {
* The index of the indexed column. * The index of the indexed column.
*/ */
private final int indexColumn; private final int indexColumn;
private final boolean totalOrdering;
private final RegularTable tableData; private final RegularTable tableData;
private ValueHashMap<Long> rows; private Map<Value, Long> rows;
private final ArrayList<Long> nullRows = new ArrayList<>();
public HashIndex(RegularTable table, int id, String indexName, public HashIndex(RegularTable table, int id, String indexName, IndexColumn[] columns, IndexType indexType) {
IndexColumn[] columns, IndexType indexType) {
super(table, id, indexName, columns, indexType); super(table, id, indexName, columns, indexType);
this.indexColumn = columns[0].column.getColumnId(); Column column = columns[0].column;
indexColumn = column.getColumnId();
totalOrdering = DataType.hasTotalOrdering(column.getType().getValueType());
this.tableData = table; this.tableData = table;
reset(); reset();
} }
private void reset() { private void reset() {
rows = new ValueHashMap<>(); rows = totalOrdering ? new HashMap<Value, Long>() : new TreeMap<Value, Long>(database.getCompareMode());
} }
@Override @Override
...@@ -51,17 +59,28 @@ public class HashIndex extends BaseIndex { ...@@ -51,17 +59,28 @@ public class HashIndex extends BaseIndex {
@Override @Override
public void add(Session session, Row row) { public void add(Session session, Row row) {
Value key = row.getValue(indexColumn); Value key = row.getValue(indexColumn);
Object old = rows.get(key); if (key != ValueNull.INSTANCE
if (old != null) { || database.getMode().uniqueIndexNullsHandling == UniqueIndexNullsHandling.FORBID_ANY_DUPLICATES) {
// TODO index duplicate key for hash indexes: is this allowed? Object old = rows.get(key);
throw getDuplicateKeyException(key.toString()); if (old != null) {
// TODO index duplicate key for hash indexes: is this allowed?
throw getDuplicateKeyException(key.toString());
}
rows.put(key, row.getKey());
} else {
nullRows.add(row.getKey());
} }
rows.put(key, row.getKey());
} }
@Override @Override
public void remove(Session session, Row row) { public void remove(Session session, Row row) {
rows.remove(row.getValue(indexColumn)); Value key = row.getValue(indexColumn);
if (key != ValueNull.INSTANCE
|| database.getMode().uniqueIndexNullsHandling == UniqueIndexNullsHandling.FORBID_ANY_DUPLICATES) {
rows.remove(key);
} else {
nullRows.remove(row.getKey());
}
} }
@Override @Override
...@@ -71,6 +90,10 @@ public class HashIndex extends BaseIndex { ...@@ -71,6 +90,10 @@ public class HashIndex extends BaseIndex {
throw DbException.throwInternalError(first + " " + last); throw DbException.throwInternalError(first + " " + last);
} }
Value v = first.getValue(indexColumn); Value v = first.getValue(indexColumn);
if (v == ValueNull.INSTANCE
&& database.getMode().uniqueIndexNullsHandling != UniqueIndexNullsHandling.FORBID_ANY_DUPLICATES) {
return new NonUniqueHashCursor(session, tableData, nullRows);
}
/* /*
* Sometimes the incoming search is a similar, but not the same type * Sometimes the incoming search is a similar, but not the same type
* e.g. the search value is INT, but the index column is LONG. In which * e.g. the search value is INT, but the index column is LONG. In which
...@@ -90,12 +113,12 @@ public class HashIndex extends BaseIndex { ...@@ -90,12 +113,12 @@ public class HashIndex extends BaseIndex {
@Override @Override
public long getRowCount(Session session) { public long getRowCount(Session session) {
return rows.size(); return getRowCountApproximation();
} }
@Override @Override
public long getRowCountApproximation() { public long getRowCountApproximation() {
return rows.size(); return rows.size() + nullRows.size();
} }
@Override @Override
......
...@@ -7,8 +7,8 @@ package org.h2.index; ...@@ -7,8 +7,8 @@ package org.h2.index;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.TreeSet;
import org.h2.command.dml.Query; import org.h2.command.dml.Query;
import org.h2.engine.Session; import org.h2.engine.Session;
import org.h2.expression.Expression; import org.h2.expression.Expression;
...@@ -123,8 +123,8 @@ public class IndexCondition { ...@@ -123,8 +123,8 @@ public class IndexCondition {
* @return the index condition * @return the index condition
*/ */
public static IndexCondition getInQuery(ExpressionColumn column, Query query) { public static IndexCondition getInQuery(ExpressionColumn column, Query query) {
IndexCondition cond = new IndexCondition(Comparison.IN_QUERY, column, assert query.isRandomAccessResult();
null); IndexCondition cond = new IndexCondition(Comparison.IN_QUERY, column, null);
cond.expressionQuery = query; cond.expressionQuery = query;
return cond; return cond;
} }
...@@ -147,7 +147,7 @@ public class IndexCondition { ...@@ -147,7 +147,7 @@ public class IndexCondition {
* @return the value list * @return the value list
*/ */
public Value[] getCurrentValueList(Session session) { public Value[] getCurrentValueList(Session session) {
HashSet<Value> valueSet = new HashSet<>(); TreeSet<Value> valueSet = new TreeSet<>(session.getDatabase().getCompareMode());
for (Expression e : expressionList) { for (Expression e : expressionList) {
Value v = e.getValue(session); Value v = e.getValue(session);
v = column.convert(v); v = column.convert(v);
......
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
package org.h2.index; package org.h2.index;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import org.h2.engine.Session; import org.h2.engine.Session;
import org.h2.expression.condition.Comparison; import org.h2.expression.condition.Comparison;
...@@ -45,7 +44,6 @@ public class IndexCursor implements Cursor { ...@@ -45,7 +44,6 @@ public class IndexCursor implements Cursor {
private int inListIndex; private int inListIndex;
private Value[] inList; private Value[] inList;
private ResultInterface inResult; private ResultInterface inResult;
private HashSet<Value> inResultTested;
public IndexCursor(TableFilter filter) { public IndexCursor(TableFilter filter) {
this.tableFilter = filter; this.tableFilter = filter;
...@@ -79,7 +77,6 @@ public class IndexCursor implements Cursor { ...@@ -79,7 +77,6 @@ public class IndexCursor implements Cursor {
inList = null; inList = null;
inColumn = null; inColumn = null;
inResult = null; inResult = null;
inResultTested = null;
intersects = null; intersects = null;
for (IndexCondition condition : indexConditions) { for (IndexCondition condition : indexConditions) {
if (condition.isAlwaysFalse()) { if (condition.isAlwaysFalse()) {
...@@ -311,13 +308,8 @@ public class IndexCursor implements Cursor { ...@@ -311,13 +308,8 @@ public class IndexCursor implements Cursor {
while (inResult.next()) { while (inResult.next()) {
Value v = inResult.currentRow()[0]; Value v = inResult.currentRow()[0];
if (v != ValueNull.INSTANCE) { if (v != ValueNull.INSTANCE) {
if (inResultTested == null) { find(v);
inResultTested = new HashSet<>(); break;
}
if (inResultTested.add(v)) {
find(v);
break;
}
} }
} }
} }
......
...@@ -6,6 +6,9 @@ ...@@ -6,6 +6,9 @@
package org.h2.index; package org.h2.index;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import org.h2.command.dml.AllColumnsForPlan; import org.h2.command.dml.AllColumnsForPlan;
import org.h2.engine.Session; import org.h2.engine.Session;
...@@ -18,7 +21,7 @@ import org.h2.table.IndexColumn; ...@@ -18,7 +21,7 @@ import org.h2.table.IndexColumn;
import org.h2.table.RegularTable; import org.h2.table.RegularTable;
import org.h2.table.TableFilter; import org.h2.table.TableFilter;
import org.h2.util.Utils; import org.h2.util.Utils;
import org.h2.util.ValueHashMap; import org.h2.value.DataType;
import org.h2.value.Value; import org.h2.value.Value;
/** /**
...@@ -32,20 +35,24 @@ public class NonUniqueHashIndex extends BaseIndex { ...@@ -32,20 +35,24 @@ public class NonUniqueHashIndex extends BaseIndex {
* The index of the indexed column. * The index of the indexed column.
*/ */
private final int indexColumn; private final int indexColumn;
private ValueHashMap<ArrayList<Long>> rows; private final boolean totalOrdering;
private Map<Value, ArrayList<Long>> rows;
private final RegularTable tableData; private final RegularTable tableData;
private long rowCount; private long rowCount;
public NonUniqueHashIndex(RegularTable table, int id, String indexName, public NonUniqueHashIndex(RegularTable table, int id, String indexName,
IndexColumn[] columns, IndexType indexType) { IndexColumn[] columns, IndexType indexType) {
super(table, id, indexName, columns, indexType); super(table, id, indexName, columns, indexType);
this.indexColumn = columns[0].column.getColumnId(); Column column = columns[0].column;
this.tableData = table; indexColumn = column.getColumnId();
totalOrdering = DataType.hasTotalOrdering(column.getType().getValueType());
tableData = table;
reset(); reset();
} }
private void reset() { private void reset() {
rows = new ValueHashMap<>(); rows = totalOrdering ? new HashMap<Value, ArrayList<Long>>()
: new TreeMap<Value, ArrayList<Long>>(database.getCompareMode());
rowCount = 0; rowCount = 0;
} }
......
...@@ -13,7 +13,7 @@ import org.h2.mvstore.MVMap; ...@@ -13,7 +13,7 @@ import org.h2.mvstore.MVMap;
import org.h2.mvstore.MVMap.Builder; import org.h2.mvstore.MVMap.Builder;
import org.h2.result.ResultExternal; import org.h2.result.ResultExternal;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueArray; import org.h2.value.ValueRow;
/** /**
* Plain temporary result. * Plain temporary result.
...@@ -23,7 +23,7 @@ class MVPlainTempResult extends MVTempResult { ...@@ -23,7 +23,7 @@ class MVPlainTempResult extends MVTempResult {
/** /**
* Map with identities of rows as keys rows as values. * Map with identities of rows as keys rows as values.
*/ */
private final MVMap<Long, ValueArray> map; private final MVMap<Long, ValueRow> map;
/** /**
* Counter for the identities of rows. A separate counter is used instead of * Counter for the identities of rows. A separate counter is used instead of
...@@ -35,7 +35,7 @@ class MVPlainTempResult extends MVTempResult { ...@@ -35,7 +35,7 @@ class MVPlainTempResult extends MVTempResult {
/** /**
* Cursor for the {@link #next()} method. * Cursor for the {@link #next()} method.
*/ */
private Cursor<Long, ValueArray> cursor; private Cursor<Long, ValueRow> cursor;
/** /**
* Creates a shallow copy of the result. * Creates a shallow copy of the result.
...@@ -61,7 +61,7 @@ class MVPlainTempResult extends MVTempResult { ...@@ -61,7 +61,7 @@ class MVPlainTempResult extends MVTempResult {
MVPlainTempResult(Database database, Expression[] expressions, int visibleColumnCount) { MVPlainTempResult(Database database, Expression[] expressions, int visibleColumnCount) {
super(database, expressions.length, visibleColumnCount); super(database, expressions.length, visibleColumnCount);
ValueDataType valueType = new ValueDataType(database, new int[columnCount]); ValueDataType valueType = new ValueDataType(database, new int[columnCount]);
Builder<Long, ValueArray> builder = new MVMap.Builder<Long, ValueArray>() Builder<Long, ValueRow> builder = new MVMap.Builder<Long, ValueRow>()
.valueType(valueType).singleWriter(); .valueType(valueType).singleWriter();
map = store.openMap("tmp", builder); map = store.openMap("tmp", builder);
} }
...@@ -69,7 +69,7 @@ class MVPlainTempResult extends MVTempResult { ...@@ -69,7 +69,7 @@ class MVPlainTempResult extends MVTempResult {
@Override @Override
public int addRow(Value[] values) { public int addRow(Value[] values) {
assert parent == null; assert parent == null;
map.append(counter++, ValueArray.get(values)); map.append(counter++, ValueRow.get(values));
return ++rowCount; return ++rowCount;
} }
......
...@@ -17,7 +17,7 @@ import org.h2.mvstore.MVMap.Builder; ...@@ -17,7 +17,7 @@ import org.h2.mvstore.MVMap.Builder;
import org.h2.result.ResultExternal; import org.h2.result.ResultExternal;
import org.h2.result.SortOrder; import org.h2.result.SortOrder;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueArray; import org.h2.value.ValueRow;
/** /**
* Sorted temporary result. * Sorted temporary result.
...@@ -48,7 +48,7 @@ class MVSortedTempResult extends MVTempResult { ...@@ -48,7 +48,7 @@ class MVSortedTempResult extends MVTempResult {
* Map with rows as keys and counts of duplicate rows as values. If this map is * Map with rows as keys and counts of duplicate rows as values. If this map is
* distinct all values are 1. * distinct all values are 1.
*/ */
private final MVMap<ValueArray, Long> map; private final MVMap<ValueRow, Long> map;
/** /**
* Optional index. This index is created only if result is distinct and * Optional index. This index is created only if result is distinct and
...@@ -56,12 +56,12 @@ class MVSortedTempResult extends MVTempResult { ...@@ -56,12 +56,12 @@ class MVSortedTempResult extends MVTempResult {
* {@link #contains(Value[])} method is invoked. Only the root result should * {@link #contains(Value[])} method is invoked. Only the root result should
* have an index if required. * have an index if required.
*/ */
private MVMap<ValueArray, Boolean> index; private MVMap<ValueRow, Boolean> index;
/** /**
* Cursor for the {@link #next()} method. * Cursor for the {@link #next()} method.
*/ */
private Cursor<ValueArray, Long> cursor; private Cursor<ValueRow, Long> cursor;
/** /**
* Current value for the {@link #next()} method. Used in non-distinct results * Current value for the {@link #next()} method. Used in non-distinct results
...@@ -167,12 +167,12 @@ class MVSortedTempResult extends MVTempResult { ...@@ -167,12 +167,12 @@ class MVSortedTempResult extends MVTempResult {
} }
this.indexes = indexes; this.indexes = indexes;
ValueDataType keyType = new ValueDataType(database, sortTypes); ValueDataType keyType = new ValueDataType(database, sortTypes);
Builder<ValueArray, Long> builder = new MVMap.Builder<ValueArray, Long>().keyType(keyType); Builder<ValueRow, Long> builder = new MVMap.Builder<ValueRow, Long>().keyType(keyType);
map = store.openMap("tmp", builder); map = store.openMap("tmp", builder);
if (distinct && length != visibleColumnCount || distinctIndexes != null) { if (distinct && length != visibleColumnCount || distinctIndexes != null) {
int count = distinctIndexes != null ? distinctIndexes.length : visibleColumnCount; int count = distinctIndexes != null ? distinctIndexes.length : visibleColumnCount;
ValueDataType distinctType = new ValueDataType(database, new int[count]); ValueDataType distinctType = new ValueDataType(database, new int[count]);
Builder<ValueArray, Boolean> indexBuilder = new MVMap.Builder<ValueArray, Boolean>().keyType(distinctType); Builder<ValueRow, Boolean> indexBuilder = new MVMap.Builder<ValueRow, Boolean>().keyType(distinctType);
index = store.openMap("idx", indexBuilder); index = store.openMap("idx", indexBuilder);
} }
} }
...@@ -180,7 +180,7 @@ class MVSortedTempResult extends MVTempResult { ...@@ -180,7 +180,7 @@ class MVSortedTempResult extends MVTempResult {
@Override @Override
public int addRow(Value[] values) { public int addRow(Value[] values) {
assert parent == null; assert parent == null;
ValueArray key = getKey(values); ValueRow key = getKey(values);
if (distinct || distinctIndexes != null) { if (distinct || distinctIndexes != null) {
if (distinctIndexes != null) { if (distinctIndexes != null) {
int cnt = distinctIndexes.length; int cnt = distinctIndexes.length;
...@@ -188,12 +188,12 @@ class MVSortedTempResult extends MVTempResult { ...@@ -188,12 +188,12 @@ class MVSortedTempResult extends MVTempResult {
for (int i = 0; i < cnt; i++) { for (int i = 0; i < cnt; i++) {
newValues[i] = values[distinctIndexes[i]]; newValues[i] = values[distinctIndexes[i]];
} }
ValueArray distinctRow = ValueArray.get(newValues); ValueRow distinctRow = ValueRow.get(newValues);
if (index.putIfAbsent(distinctRow, true) != null) { if (index.putIfAbsent(distinctRow, true) != null) {
return rowCount; return rowCount;
} }
} else if (columnCount != visibleColumnCount) { } else if (columnCount != visibleColumnCount) {
ValueArray distinctRow = ValueArray.get(Arrays.copyOf(values, visibleColumnCount)); ValueRow distinctRow = ValueRow.get(Arrays.copyOf(values, visibleColumnCount));
if (index.putIfAbsent(distinctRow, true) != null) { if (index.putIfAbsent(distinctRow, true) != null) {
return rowCount; return rowCount;
} }
...@@ -222,7 +222,7 @@ class MVSortedTempResult extends MVTempResult { ...@@ -222,7 +222,7 @@ class MVSortedTempResult extends MVTempResult {
} }
assert distinct; assert distinct;
if (columnCount != visibleColumnCount) { if (columnCount != visibleColumnCount) {
return index.containsKey(ValueArray.get(values)); return index.containsKey(ValueRow.get(values));
} }
return map.containsKey(getKey(values)); return map.containsKey(getKey(values));
} }
...@@ -240,13 +240,13 @@ class MVSortedTempResult extends MVTempResult { ...@@ -240,13 +240,13 @@ class MVSortedTempResult extends MVTempResult {
} }
/** /**
* Reorder values if required and convert them into {@link ValueArray}. * Reorder values if required and convert them into {@link ValueRow}.
* *
* @param values * @param values
* values * values
* @return ValueArray for maps * @return ValueRow for maps
*/ */
private ValueArray getKey(Value[] values) { private ValueRow getKey(Value[] values) {
if (indexes != null) { if (indexes != null) {
Value[] r = new Value[indexes.length]; Value[] r = new Value[indexes.length];
for (int i = 0; i < indexes.length; i++) { for (int i = 0; i < indexes.length; i++) {
...@@ -254,7 +254,7 @@ class MVSortedTempResult extends MVTempResult { ...@@ -254,7 +254,7 @@ class MVSortedTempResult extends MVTempResult {
} }
values = r; values = r;
} }
return ValueArray.get(values); return ValueRow.get(values);
} }
/** /**
......
...@@ -7,7 +7,7 @@ package org.h2.mvstore.db; ...@@ -7,7 +7,7 @@ package org.h2.mvstore.db;
import java.io.IOException; import java.io.IOException;
import java.lang.ref.Reference; import java.lang.ref.Reference;
import java.util.ArrayList; import java.util.Collection;
import org.h2.engine.Constants; import org.h2.engine.Constants;
import org.h2.engine.Database; import org.h2.engine.Database;
...@@ -178,7 +178,7 @@ public abstract class MVTempResult implements ResultExternal { ...@@ -178,7 +178,7 @@ public abstract class MVTempResult implements ResultExternal {
} }
@Override @Override
public int addRows(ArrayList<Value[]> rows) { public int addRows(Collection<Value[]> rows) {
for (Value[] row : rows) { for (Value[] row : rows) {
addRow(row); addRow(row);
} }
......
...@@ -7,6 +7,7 @@ package org.h2.result; ...@@ -7,6 +7,7 @@ package org.h2.result;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.TreeMap;
import org.h2.engine.Database; import org.h2.engine.Database;
import org.h2.engine.Session; import org.h2.engine.Session;
import org.h2.engine.SessionInterface; import org.h2.engine.SessionInterface;
...@@ -14,10 +15,9 @@ import org.h2.expression.Expression; ...@@ -14,10 +15,9 @@ import org.h2.expression.Expression;
import org.h2.message.DbException; import org.h2.message.DbException;
import org.h2.mvstore.db.MVTempResult; import org.h2.mvstore.db.MVTempResult;
import org.h2.util.Utils; import org.h2.util.Utils;
import org.h2.util.ValueHashMap;
import org.h2.value.TypeInfo; import org.h2.value.TypeInfo;
import org.h2.value.Value; import org.h2.value.Value;
import org.h2.value.ValueArray; import org.h2.value.ValueRow;
/** /**
* A local result set contains all row data of a result set. * A local result set contains all row data of a result set.
...@@ -34,7 +34,9 @@ public class LocalResultImpl implements LocalResult { ...@@ -34,7 +34,9 @@ public class LocalResultImpl implements LocalResult {
private int rowId, rowCount; private int rowId, rowCount;
private ArrayList<Value[]> rows; private ArrayList<Value[]> rows;
private SortOrder sort; private SortOrder sort;
private ValueHashMap<Value[]> distinctRows; // HashSet cannot be used here, because we need to compare values of
// different type or scale properly.
private TreeMap<Value, Value[]> distinctRows;
private Value[] currentRow; private Value[] currentRow;
private int offset; private int offset;
private int limit = -1; private int limit = -1;
...@@ -145,7 +147,7 @@ public class LocalResultImpl implements LocalResult { ...@@ -145,7 +147,7 @@ public class LocalResultImpl implements LocalResult {
public void setDistinct() { public void setDistinct() {
assert distinctIndexes == null; assert distinctIndexes == null;
distinct = true; distinct = true;
distinctRows = new ValueHashMap<>(); distinctRows = new TreeMap<>(session.getDatabase().getCompareMode());
} }
/** /**
...@@ -157,7 +159,7 @@ public class LocalResultImpl implements LocalResult { ...@@ -157,7 +159,7 @@ public class LocalResultImpl implements LocalResult {
public void setDistinct(int[] distinctIndexes) { public void setDistinct(int[] distinctIndexes) {
assert !distinct; assert !distinct;
this.distinctIndexes = distinctIndexes; this.distinctIndexes = distinctIndexes;
distinctRows = new ValueHashMap<>(); distinctRows = new TreeMap<>(session.getDatabase().getCompareMode());
} }
/** /**
...@@ -179,7 +181,7 @@ public class LocalResultImpl implements LocalResult { ...@@ -179,7 +181,7 @@ public class LocalResultImpl implements LocalResult {
} }
assert values.length == visibleColumnCount; assert values.length == visibleColumnCount;
if (distinctRows != null) { if (distinctRows != null) {
ValueArray array = ValueArray.get(values); ValueRow array = ValueRow.get(values);
distinctRows.remove(array); distinctRows.remove(array);
rowCount = distinctRows.size(); rowCount = distinctRows.size();
} else { } else {
...@@ -200,13 +202,13 @@ public class LocalResultImpl implements LocalResult { ...@@ -200,13 +202,13 @@ public class LocalResultImpl implements LocalResult {
return external.contains(values); return external.contains(values);
} }
if (distinctRows == null) { if (distinctRows == null) {
distinctRows = new ValueHashMap<>(); distinctRows = new TreeMap<>(session.getDatabase().getCompareMode());
for (Value[] row : rows) { for (Value[] row : rows) {
ValueArray array = getArrayOfDistinct(row); ValueRow array = getDistinctRow(row);
distinctRows.put(array, array.getList()); distinctRows.put(array, array.getList());
} }
} }
ValueArray array = ValueArray.get(values); ValueRow array = ValueRow.get(values);
return distinctRows.get(array) != null; return distinctRows.get(array) != null;
} }
...@@ -284,7 +286,7 @@ public class LocalResultImpl implements LocalResult { ...@@ -284,7 +286,7 @@ public class LocalResultImpl implements LocalResult {
} }
} }
private ValueArray getArrayOfDistinct(Value[] values) { private ValueRow getDistinctRow(Value[] values) {
if (distinctIndexes != null) { if (distinctIndexes != null) {
int cnt = distinctIndexes.length; int cnt = distinctIndexes.length;
Value[] newValues = new Value[cnt]; Value[] newValues = new Value[cnt];
...@@ -295,7 +297,7 @@ public class LocalResultImpl implements LocalResult { ...@@ -295,7 +297,7 @@ public class LocalResultImpl implements LocalResult {
} else if (values.length > visibleColumnCount) { } else if (values.length > visibleColumnCount) {
values = Arrays.copyOf(values, visibleColumnCount); values = Arrays.copyOf(values, visibleColumnCount);
} }
return ValueArray.get(values); return ValueRow.get(values);
} }
private void createExternalResult() { private void createExternalResult() {
...@@ -317,8 +319,10 @@ public class LocalResultImpl implements LocalResult { ...@@ -317,8 +319,10 @@ public class LocalResultImpl implements LocalResult {
cloneLobs(values); cloneLobs(values);
if (isAnyDistinct()) { if (isAnyDistinct()) {
if (distinctRows != null) { if (distinctRows != null) {
ValueArray array = getArrayOfDistinct(values); ValueRow array = getDistinctRow(values);
distinctRows.putIfAbsent(array, values); if (!distinctRows.containsKey(array)) {
distinctRows.put(array, values);
}
rowCount = distinctRows.size(); rowCount = distinctRows.size();
if (rowCount > maxMemoryRows) { if (rowCount > maxMemoryRows) {
createExternalResult(); createExternalResult();
...@@ -359,7 +363,7 @@ public class LocalResultImpl implements LocalResult { ...@@ -359,7 +363,7 @@ public class LocalResultImpl implements LocalResult {
addRowsToDisk(); addRowsToDisk();
} else { } else {
if (isAnyDistinct()) { if (isAnyDistinct()) {
rows = distinctRows.values(); rows = new ArrayList<>(distinctRows.values());
} }
if (sort != null && limit != 0 && !limitsWereApplied) { if (sort != null && limit != 0 && !limitsWereApplied) {
boolean withLimit = limit > 0 && withTiesSortOrder == null; boolean withLimit = limit > 0 && withTiesSortOrder == null;
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
*/ */
package org.h2.result; package org.h2.result;
import java.util.ArrayList; import java.util.Collection;
import org.h2.value.Value; import org.h2.value.Value;
/** /**
...@@ -40,7 +40,7 @@ public interface ResultExternal { ...@@ -40,7 +40,7 @@ public interface ResultExternal {
* @param rows the list of rows to add * @param rows the list of rows to add
* @return the new number of rows in this object * @return the new number of rows in this object
*/ */
int addRows(ArrayList<Value[]> rows); int addRows(Collection<Value[]> rows);
/** /**
* Close this object and delete the temporary file. * Close this object and delete the temporary file.
......
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
package org.h2.result; package org.h2.result;
import java.lang.ref.Reference; import java.lang.ref.Reference;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.BitSet; import java.util.BitSet;
import java.util.Collection;
import org.h2.command.ddl.CreateTableData; import org.h2.command.ddl.CreateTableData;
import org.h2.engine.Constants; import org.h2.engine.Constants;
...@@ -263,11 +263,7 @@ public class ResultTempTable implements ResultExternal { ...@@ -263,11 +263,7 @@ public class ResultTempTable implements ResultExternal {
} }
@Override @Override
public int addRows(ArrayList<Value[]> rows) { public int addRows(Collection<Value[]> rows) {
// speeds up inserting, but not really needed:
if (sort != null) {
sort.sort(rows);
}
for (Value[] values : rows) { for (Value[] values : rows) {
addRow(values); addRow(values);
} }
......
/*
* Copyright 2004-2019 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.util;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import org.h2.message.DbException;
import org.h2.value.Value;
import org.h2.value.ValueNull;
/**
* This hash map supports keys of type Value.
* <p>
* ValueHashMap is a very simple implementation without allocation of additional
* objects for entries. It's very fast with good distribution of hashes, but if
* hashes have a lot of collisions this implementation tends to be very slow.
* <p>
* HashMap in archaic versions of Java have some overhead for allocation of
* entries, but slightly better behaviour with limited number of collisions,
* because collisions have no impact on non-colliding entries. HashMap in modern
* versions of Java also have the same overhead, but it builds a trees of keys
* with colliding hashes, that's why even if the all keys have exactly the same
* hash code it still offers a good performance similar to TreeMap. So
* ValueHashMap is faster in typical cases, but may behave really bad in some
* cases. HashMap is slower in typical cases, but its performance does not
* degrade too much even in the worst possible case (if keys are comparable).
*
* @param <V> the value type
*/
public class ValueHashMap<V> extends HashBase {
/**
* Keys array.
*/
Value[] keys;
/**
* Values array.
*/
V[] values;
@Override
@SuppressWarnings("unchecked")
protected void reset(int newLevel) {
super.reset(newLevel);
keys = new Value[len];
values = (V[]) new Object[len];
}
@Override
protected void rehash(int newLevel) {
Value[] oldKeys = keys;
V[] oldValues = values;
reset(newLevel);
int len = oldKeys.length;
for (int i = 0; i < len; i++) {
Value k = oldKeys[i];
if (k != null && k != ValueNull.DELETED) {
// skip the checkSizePut so we don't end up
// accidentally recursing
internalPut(k, oldValues[i], false);
}
}
}
private int getIndex(Value key) {
int h = key.hashCode();
/*
* Add some protection against hashes with the same less significant bits
* (ValueDouble with integer values, for example).
*/
return (h ^ h >>> 16) & mask;
}
/**
* Add or update a key value pair.
*
* @param key the key
* @param value the new value
*/
public void put(Value key, V value) {
checkSizePut();
internalPut(key, value, false);
}
/**
* Add a key value pair, values for existing keys are not replaced.
*
* @param key the key
* @param value the new value
*/
public void putIfAbsent(Value key, V value) {
checkSizePut();
internalPut(key, value, true);
}
private void internalPut(Value key, V value, boolean ifAbsent) {
int index = getIndex(key);
int plus = 1;
int deleted = -1;
do {
Value k = keys[index];
if (k == null) {
// found an empty record
if (deleted >= 0) {
index = deleted;
deletedCount--;
}
size++;
keys[index] = key;
values[index] = value;
return;
} else if (k == ValueNull.DELETED) {
// found a deleted record
if (deleted < 0) {
deleted = index;
}
} else if (k.equals(key)) {
if (ifAbsent) {
return;
}
// update existing
values[index] = value;
return;
}
index = (index + plus++) & mask;
} while (plus <= len);
// no space
DbException.throwInternalError("hashmap is full");
}
/**
* Remove a key value pair.
*
* @param key the key
*/
public void remove(Value key) {
checkSizeRemove();
int index = getIndex(key);
int plus = 1;
do {
Value k = keys[index];
if (k == null) {
// found an empty record
return;
} else if (k == ValueNull.DELETED) {
// found a deleted record
} else if (k.equals(key)) {
// found the record
keys[index] = ValueNull.DELETED;
values[index] = null;
deletedCount++;
size--;
return;
}
index = (index + plus++) & mask;
} while (plus <= len);
// not found
}
/**
* Get the value for this key. This method returns null if the key was not
* found.
*
* @param key the key
* @return the value for the given key
*/
public V get(Value key) {
int index = getIndex(key);
int plus = 1;
do {
Value k = keys[index];
if (k == null) {
// found an empty record
return null;
} else if (k == ValueNull.DELETED) {
// found a deleted record
} else if (k.equals(key)) {
// found it
return values[index];
}
index = (index + plus++) & mask;
} while (plus <= len);
return null;
}
/**
* Get the keys.
*
* @return all keys
*/
public Iterable<Value> keys() {
return new KeyIterable();
}
private final class KeyIterable implements Iterable<Value> {
KeyIterable() {
}
@Override
public Iterator<Value> iterator() {
return new UnifiedIterator<>(false);
}
}
/**
* Gets all map's entries.
*
* @return all map's entries.
*/
public Iterable<Map.Entry<Value, V>> entries() {
return new EntryIterable();
}
private final class EntryIterable implements Iterable<Map.Entry<Value, V>> {
EntryIterable() {
}
@Override
public Iterator<Map.Entry<Value, V>> iterator() {
return new UnifiedIterator<>(true);
}
}
final class UnifiedIterator<T> implements Iterator<T> {
int keysIndex = -1;
int left = size;
private final boolean forEntries;
UnifiedIterator(boolean forEntries) {
this.forEntries = forEntries;
}
@Override
public boolean hasNext() {
return left > 0;
}
@SuppressWarnings("unchecked")
@Override
public T next() {
if (left <= 0)
throw new NoSuchElementException();
left--;
for (;;) {
keysIndex++;
Value key = keys[keysIndex];
if (key != null && key != ValueNull.DELETED) {
return (T) (forEntries ? new AbstractMap.SimpleImmutableEntry<>(key, values[keysIndex]) : key);
}
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
/**
* Get the list of values.
*
* @return all values
*/
public ArrayList<V> values() {
ArrayList<V> list = new ArrayList<>(size);
int len = keys.length;
for (int i = 0; i < len; i++) {
Value k = keys[i];
if (k != null && k != ValueNull.DELETED) {
list.add(values[i]);
}
}
return list;
}
}
...@@ -1454,6 +1454,51 @@ public class DataType { ...@@ -1454,6 +1454,51 @@ public class DataType {
return type == Value.GEOMETRY || type == Value.ENUM; return type == Value.GEOMETRY || type == Value.ENUM;
} }
/**
* Check if the given type has total ordering.
*
* @param type the value type
* @return true if the value type has total ordering
*/
public static boolean hasTotalOrdering(int type) {
switch (type) {
case Value.BOOLEAN:
case Value.BYTE:
case Value.SHORT:
case Value.INT:
case Value.LONG:
// Negative zeroes and NaNs are normalized
case Value.DOUBLE:
case Value.FLOAT:
case Value.TIME:
case Value.DATE:
case Value.TIMESTAMP:
case Value.BYTES:
// Serialized data is compared
case Value.JAVA_OBJECT:
case Value.UUID:
// EWKB is used
case Value.GEOMETRY:
case Value.ENUM:
case Value.INTERVAL_YEAR:
case Value.INTERVAL_MONTH:
case Value.INTERVAL_DAY:
case Value.INTERVAL_HOUR:
case Value.INTERVAL_MINUTE:
case Value.INTERVAL_SECOND:
case Value.INTERVAL_YEAR_TO_MONTH:
case Value.INTERVAL_DAY_TO_HOUR:
case Value.INTERVAL_DAY_TO_MINUTE:
case Value.INTERVAL_DAY_TO_SECOND:
case Value.INTERVAL_HOUR_TO_MINUTE:
case Value.INTERVAL_HOUR_TO_SECOND:
case Value.INTERVAL_MINUTE_TO_SECOND:
return true;
default:
return false;
}
}
/** /**
* Check if the given value type supports the add operation. * Check if the given value type supports the add operation.
* *
......
...@@ -229,7 +229,6 @@ import org.h2.test.unit.TestTools; ...@@ -229,7 +229,6 @@ import org.h2.test.unit.TestTools;
import org.h2.test.unit.TestTraceSystem; import org.h2.test.unit.TestTraceSystem;
import org.h2.test.unit.TestUtils; import org.h2.test.unit.TestUtils;
import org.h2.test.unit.TestValue; import org.h2.test.unit.TestValue;
import org.h2.test.unit.TestValueHashMap;
import org.h2.test.unit.TestValueMemory; import org.h2.test.unit.TestValueMemory;
import org.h2.test.utils.OutputCatcher; import org.h2.test.utils.OutputCatcher;
import org.h2.test.utils.SelfDestructor; import org.h2.test.utils.SelfDestructor;
...@@ -994,7 +993,6 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1` ...@@ -994,7 +993,6 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
addTest(new TestStringUtils()); addTest(new TestStringUtils());
addTest(new TestTraceSystem()); addTest(new TestTraceSystem());
addTest(new TestUtils()); addTest(new TestUtils());
addTest(new TestValueHashMap());
addTest(new TestLocalResultFactory()); addTest(new TestLocalResultFactory());
runAddedTests(); runAddedTests();
......
...@@ -8,3 +8,100 @@ CREATE TABLE TEST(I NUMERIC(-1)); ...@@ -8,3 +8,100 @@ CREATE TABLE TEST(I NUMERIC(-1));
CREATE TABLE TEST(I NUMERIC(-1, -1)); CREATE TABLE TEST(I NUMERIC(-1, -1));
> exception INVALID_VALUE_2 > exception INVALID_VALUE_2
CREATE TABLE TEST (N NUMERIC) AS VALUES (0), (0.0), (NULL);
> ok
SELECT * FROM TEST;
> N
> ----
> 0
> 0.0
> null
> rows: 3
SELECT DISTINCT * FROM TEST;
> N
> ----
> 0
> null
> rows: 2
DROP TABLE TEST;
> ok
CREATE TABLE TEST (N NUMERIC) AS VALUES (0), (0.0), (2), (NULL);
> ok
CREATE INDEX TEST_IDX ON TEST(N);
> ok
SELECT N FROM TEST WHERE N IN (0.000, 0.00, 1.0);
> N
> ---
> 0
> 0.0
> rows: 2
SELECT N FROM TEST WHERE N IN (SELECT DISTINCT ON(B) A FROM VALUES (0.000, 1), (0.00, 2), (1.0, 3) T(A, B));
> N
> ---
> 0
> 0.0
> rows: 2
DROP INDEX TEST_IDX;
> ok
CREATE UNIQUE INDEX TEST_IDX ON TEST(N);
> exception DUPLICATE_KEY_1
DROP TABLE TEST;
> ok
CREATE MEMORY TABLE TEST(N NUMERIC) AS VALUES (0), (0.0), (2), (NULL);
> ok
CREATE HASH INDEX TEST_IDX ON TEST(N);
> ok
SELECT N FROM TEST WHERE N = 0;
> N
> ---
> 0
> 0.0
> rows: 2
DROP INDEX TEST_IDX;
> ok
CREATE UNIQUE HASH INDEX TEST_IDX ON TEST(N);
> exception DUPLICATE_KEY_1
DELETE FROM TEST WHERE N = 0 LIMIT 1;
> update count: 1
CREATE UNIQUE HASH INDEX TEST_IDX ON TEST(N);
> ok
SELECT 1 FROM TEST WHERE N = 0;
>> 1
INSERT INTO TEST VALUES (NULL);
> update count: 1
SELECT N FROM TEST WHERE N IS NULL;
> N
> ----
> null
> null
> rows: 2
DELETE FROM TEST WHERE N IS NULL LIMIT 1;
> update count: 1
SELECT N FROM TEST WHERE N IS NULL;
>> null
DROP TABLE TEST;
> ok
...@@ -550,3 +550,62 @@ VALUES 1, 2; ...@@ -550,3 +550,62 @@ VALUES 1, 2;
> 1 > 1
> 2 > 2
> rows: 2 > rows: 2
SELECT * FROM (VALUES (1::BIGINT, 2)) T (A, B) WHERE (A, B) IN (VALUES(1, 2));
> A B
> - -
> 1 2
> rows: 1
SELECT * FROM (VALUES (1000000000000, 2)) T (A, B) WHERE (A, B) IN (VALUES(1, 2));
> A B
> - -
> rows: 0
SELECT * FROM (VALUES (1, 2)) T (A, B) WHERE (A, B) IN (VALUES(1::BIGINT, 2));
> A B
> - -
> 1 2
> rows: 1
SELECT * FROM (VALUES (1, 2)) T (A, B) WHERE (A, B) IN (VALUES(1000000000000, 2));
> A B
> - -
> rows: 0
CREATE TABLE TEST(A BIGINT, B INT) AS VALUES (1::BIGINT, 2);
> ok
SELECT * FROM TEST WHERE (A, B) IN ((1, 2), (3, 4));
> A B
> - -
> 1 2
> rows: 1
UPDATE TEST SET A = 1000000000000;
> update count: 1
SELECT * FROM TEST WHERE (A, B) IN ((1, 2), (3, 4));
> A B
> - -
> rows: 0
DROP TABLE TEST;
> ok
CREATE TABLE TEST(A BIGINT, B INT) AS VALUES (1, 2);
> ok
SELECT * FROM TEST WHERE (A, B) IN ((1::BIGINT, 2), (3, 4));
> A B
> - -
> 1 2
> rows: 1
SELECT * FROM TEST WHERE (A, B) IN ((1000000000000, 2), (3, 4));
> A B
> - -
> rows: 0
DROP TABLE TEST;
> ok
...@@ -104,3 +104,53 @@ SELECT X, COUNT(*) OVER (ORDER BY X) C FROM VALUES (1), (1), (2), (2), (3) V(X); ...@@ -104,3 +104,53 @@ SELECT X, COUNT(*) OVER (ORDER BY X) C FROM VALUES (1), (1), (2), (2), (3) V(X);
> 2 4 > 2 4
> 3 5 > 3 5
> rows: 5 > rows: 5
CREATE TABLE TEST (N NUMERIC) AS VALUES (0), (0.0), (NULL);
> ok
SELECT COUNT(*) FROM TEST;
>> 3
SELECT COUNT(N) FROM TEST;
>> 2
SELECT COUNT(DISTINCT N) FROM TEST;
>> 1
SELECT COUNT(*) FROM TEST GROUP BY N;
> COUNT(*)
> --------
> 1
> 2
> rows: 2
SELECT COUNT(N) OVER (PARTITION BY N) C FROM TEST;
> C
> -
> 0
> 2
> 2
> rows: 3
DROP TABLE TEST;
> ok
CREATE TABLE TEST(A INT, B INT) AS (VALUES (1, NULL), (1, NULL), (2, NULL));
> ok
SELECT COUNT((A, B)) C, COUNT(DISTINCT (A, B)) CD FROM TEST;
> C CD
> - --
> 3 2
> rows: 1
SELECT COUNT(*) OVER (PARTITION BY A, B) C1, COUNT(*) OVER (PARTITION BY (A, B)) C2 FROM TEST;
> C1 C2
> -- --
> 1 1
> 2 2
> 2 2
> rows: 3
DROP TABLE TEST;
> ok
...@@ -57,3 +57,12 @@ SELECT MODE() WITHIN GROUP(ORDER BY V DESC) FROM TEST; ...@@ -57,3 +57,12 @@ SELECT MODE() WITHIN GROUP(ORDER BY V DESC) FROM TEST;
DROP TABLE TEST; DROP TABLE TEST;
> ok > ok
CREATE TABLE TEST (N NUMERIC) AS VALUES (0), (0.0), (NULL);
> ok
SELECT MODE(N) FROM TEST;
>> 0
DROP TABLE TEST;
> ok
/*
* Copyright 2004-2019 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.test.unit;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Random;
import org.h2.api.JavaObjectSerializer;
import org.h2.store.DataHandler;
import org.h2.store.FileStore;
import org.h2.store.LobStorageBackend;
import org.h2.test.TestBase;
import org.h2.util.SmallLRUCache;
import org.h2.util.TempFileDeleter;
import org.h2.util.ValueHashMap;
import org.h2.value.CompareMode;
import org.h2.value.Value;
import org.h2.value.ValueDouble;
import org.h2.value.ValueInt;
/**
* Tests the value hash map.
*/
public class TestValueHashMap extends TestBase implements DataHandler {
CompareMode compareMode = CompareMode.getInstance(null, 0);
/**
* Run just this test.
*
* @param a ignored
*/
public static void main(String... a) throws Exception {
TestBase.createCaller().init().test();
}
@Override
public void test() {
testNotANumber();
testRandomized();
}
private void testNotANumber() {
ValueHashMap<Integer> map = new ValueHashMap<>();
for (int i = 1; i < 100; i++) {
double d = Double.longBitsToDouble(0x7ff0000000000000L | i);
ValueDouble v = ValueDouble.get(d);
map.put(v, null);
assertEquals(1, map.size());
}
}
private void testRandomized() {
ValueHashMap<Value> map = new ValueHashMap<>();
HashMap<Value, Value> hash = new HashMap<>();
Random random = new Random(1);
Comparator<Value> vc = new Comparator<Value>() {
@Override
public int compare(Value v1, Value v2) {
return v1.compareTo(v2, null, compareMode);
}
};
for (int i = 0; i < 10000; i++) {
int op = random.nextInt(10);
Value key = ValueInt.get(random.nextInt(100));
Value value = ValueInt.get(random.nextInt(100));
switch (op) {
case 0:
map.put(key, value);
hash.put(key, value);
break;
case 1:
map.remove(key);
hash.remove(key);
break;
case 2:
Value v1 = map.get(key);
Value v2 = hash.get(key);
assertTrue(v1 == null ? v2 == null : v1.equals(v2));
break;
case 3: {
ArrayList<Value> a1 = new ArrayList<>();
for (Value v : map.keys()) {
a1.add(v);
}
ArrayList<Value> a2 = new ArrayList<>(hash.keySet());
assertEquals(a1.size(), a2.size());
Collections.sort(a1, vc);
Collections.sort(a2, vc);
for (int j = 0; j < a1.size(); j++) {
assertTrue(a1.get(j).equals(a2.get(j)));
}
break;
}
case 4:
ArrayList<Value> a1 = map.values();
ArrayList<Value> a2 = new ArrayList<>(hash.values());
assertEquals(a1.size(), a2.size());
Collections.sort(a1, vc);
Collections.sort(a2, vc);
for (int j = 0; j < a1.size(); j++) {
assertTrue(a1.get(j).equals(a2.get(j)));
}
break;
default:
}
}
}
@Override
public String getDatabasePath() {
return null;
}
@Override
public FileStore openFile(String name, String mode, boolean mustExist) {
return null;
}
@Override
public void checkPowerOff() {
// nothing to do
}
@Override
public void checkWritingAllowed() {
// nothing to do
}
@Override
public int getMaxLengthInplaceLob() {
return 0;
}
@Override
public String getLobCompressionAlgorithm(int type) {
return null;
}
@Override
public Object getLobSyncObject() {
return this;
}
@Override
public SmallLRUCache<String, String[]> getLobFileListCache() {
return null;
}
@Override
public TempFileDeleter getTempFileDeleter() {
return TempFileDeleter.getInstance();
}
@Override
public LobStorageBackend getLobStorage() {
return null;
}
@Override
public int readLob(long lobId, byte[] hmac, long offset, byte[] buff,
int off, int length) {
return -1;
}
@Override
public JavaObjectSerializer getJavaObjectSerializer() {
return null;
}
@Override
public CompareMode getCompareMode() {
return compareMode;
}
}
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论