Unverified 提交 5b87daab authored 作者: Evgenij Ryazanov's avatar Evgenij Ryazanov 提交者: GitHub

Merge pull request #1464 from katzyn/window

Assorted minor changes in window processing code
......@@ -15,6 +15,8 @@ import java.util.Map.Entry;
import org.h2.engine.Session;
import org.h2.expression.Expression;
import org.h2.expression.aggregate.DataAnalysisOperation;
import org.h2.expression.aggregate.PartitionData;
import org.h2.util.ValueHashMap;
import org.h2.value.Value;
import org.h2.value.ValueArray;
......@@ -62,12 +64,6 @@ public abstract class SelectGroups {
*/
private Iterator<Entry<ValueArray, Object[]>> cursor;
/**
* The key for the default group.
*/
// Can be static, but TestClearReferences complains about it
private final ValueArray defaultGroup = ValueArray.get(new Value[0]);
Grouped(Session session, ArrayList<Expression> expressions, int[] groupIndex) {
super(session, expressions);
this.groupIndex = groupIndex;
......@@ -84,7 +80,7 @@ public abstract class SelectGroups {
@Override
public void nextSource() {
if (groupIndex == null) {
currentGroupsKey = defaultGroup;
currentGroupsKey = ValueArray.getEmpty();
} else {
Value[] keyValues = new Value[groupIndex.length];
// update group
......@@ -97,7 +93,7 @@ public abstract class SelectGroups {
}
Object[] values = groupByData.get(currentGroupsKey);
if (values == null) {
values = new Object[Math.max(exprToIndexInGroupByData.size(), expressions.size())];
values = createRow();
groupByData.put(currentGroupsKey, values);
}
currentGroupByExprData = values;
......@@ -118,8 +114,7 @@ public abstract class SelectGroups {
public void done() {
super.done();
if (groupIndex == null && groupByData.size() == 0) {
groupByData.put(defaultGroup,
new Object[Math.max(exprToIndexInGroupByData.size(), expressions.size())]);
groupByData.put(ValueArray.getEmpty(), createRow());
}
cursor = groupByData.entrySet().iterator();
}
......@@ -164,7 +159,7 @@ public abstract class SelectGroups {
@Override
public void nextSource() {
Object[] values = new Object[Math.max(exprToIndexInGroupByData.size(), expressions.size())];
Object[] values = createRow();
rows.add(values);
currentGroupByExprData = values;
currentGroupRowId++;
......@@ -184,10 +179,9 @@ public abstract class SelectGroups {
@Override
public ValueArray next() {
if (cursor.hasNext()) {
Object[] values = cursor.next();
currentGroupByExprData = values;
currentGroupByExprData = cursor.next();
currentGroupRowId++;
return ValueArray.get(new Value[0]);
return ValueArray.getEmpty();
}
return null;
}
......@@ -206,12 +200,17 @@ public abstract class SelectGroups {
* Maps an expression object to an index, to use in accessing the Object[]
* pointed to by groupByData.
*/
final HashMap<Expression, Integer> exprToIndexInGroupByData = new HashMap<>();
private final HashMap<Expression, Integer> exprToIndexInGroupByData = new HashMap<>();
/**
* Maps an expression object to its data.
* Maps an window expression object to its data.
*/
private final HashMap<DataAnalysisOperation, Object> windowData = new HashMap<>();
private final HashMap<DataAnalysisOperation, PartitionData> windowData = new HashMap<>();
/**
* Maps an partitioned window expression object to its data.
*/
private final HashMap<DataAnalysisOperation, ValueHashMap<PartitionData>> windowPartitionData = new HashMap<>();
/**
* The id of the current group.
......@@ -286,15 +285,26 @@ public abstract class SelectGroups {
currentGroupByExprData[index] = obj;
}
final Object[] createRow() {
return new Object[Math.max(exprToIndexInGroupByData.size(), expressions.size())];
}
/**
* Get the window data for the specified expression.
*
* @param expr
* expression
* @param partitionKey
* a key of partition
* @return expression data or null
*/
public final Object getWindowExprData(DataAnalysisOperation expr) {
public final PartitionData getWindowExprData(DataAnalysisOperation expr, Value partitionKey) {
if (partitionKey == null) {
return windowData.get(expr);
} else {
ValueHashMap<PartitionData> map = windowPartitionData.get(expr);
return map != null ? map.get(partitionKey) : null;
}
}
/**
......@@ -302,12 +312,23 @@ public abstract class SelectGroups {
*
* @param expr
* expression
* @param partitionKey
* a key of partition
* @param object
* expression data to set
* window expression data to set
*/
public final void setWindowExprData(DataAnalysisOperation expr, Object obj) {
public final void setWindowExprData(DataAnalysisOperation expr, Value partitionKey, PartitionData obj) {
if (partitionKey == null) {
Object old = windowData.put(expr, obj);
assert old == null;
} else {
ValueHashMap<PartitionData> map = windowPartitionData.get(expr);
if (map == null) {
map = new ValueHashMap<>();
windowPartitionData.put(expr, map);
}
map.put(partitionKey, obj);
}
}
abstract void updateCurrentGroupExprData();
......@@ -329,6 +350,7 @@ public abstract class SelectGroups {
currentGroupByExprData = null;
exprToIndexInGroupByData.clear();
windowData.clear();
windowPartitionData.clear();
currentGroupRowId = 0;
}
......
......@@ -6,6 +6,8 @@
package org.h2.expression.aggregate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import org.h2.command.dml.Select;
import org.h2.command.dml.SelectGroups;
......@@ -14,6 +16,7 @@ import org.h2.engine.Session;
import org.h2.expression.Expression;
import org.h2.table.ColumnResolver;
import org.h2.table.TableFilter;
import org.h2.value.Value;
/**
* A base class for aggregate functions.
......@@ -68,14 +71,67 @@ public abstract class AbstractAggregate extends DataAnalysisOperation {
super.setEvaluatable(tableFilter, b);
}
@Override
protected void getOrderedResultLoop(Session session, HashMap<Integer, Value> result, ArrayList<Value[]> ordered,
int rowIdColumn) {
WindowFrame frame = over.getWindowFrame();
if (frame == null || frame.isDefault()) {
// Aggregate all values before the current row (including)
Object aggregateData = createAggregateData();
for (Value[] row : ordered) {
// Collect values one by one
updateFromExpressions(session, aggregateData, row);
result.put(row[rowIdColumn].getInt(), getAggregatedValue(session, aggregateData));
}
} else if (frame.isFullPartition()) {
// Aggregate values from the whole partition
Object aggregateData = createAggregateData();
for (Value[] row : ordered) {
updateFromExpressions(session, aggregateData, row);
}
// All rows have the same value
Value value = getAggregatedValue(session, aggregateData);
for (Value[] row : ordered) {
result.put(row[rowIdColumn].getInt(), value);
}
} else {
// All other types of frames (slow)
int size = ordered.size();
for (int i = 0; i < size; i++) {
Object aggregateData = createAggregateData();
for (Iterator<Value[]> iter = frame.iterator(session, ordered, getOverOrderBySort(), i, false); iter
.hasNext();) {
updateFromExpressions(session, aggregateData, iter.next());
}
result.put(ordered.get(i)[rowIdColumn].getInt(), getAggregatedValue(session, aggregateData));
}
}
}
/**
* Updates the provided aggregate data from the remembered expressions.
*
* @param session
* the session
* @param aggregateData
* aggregate data
* @param array
* values of expressions
*/
protected abstract void updateFromExpressions(Session session, Object aggregateData, Value[] array);
@Override
protected void updateAggregate(Session session, SelectGroups groupData, int groupRowId) {
if (filterCondition == null || filterCondition.getBooleanValue(session)) {
ArrayList<SelectOrderBy> orderBy;
if (over != null && (orderBy = over.getOrderBy()) != null) {
if (over != null) {
if ((orderBy = over.getOrderBy()) != null) {
updateOrderedAggregate(session, groupData, groupRowId, orderBy);
} else {
updateAggregate(session, getData(session, groupData, false, false));
updateAggregate(session, getWindowData(session, groupData, false));
}
} else {
updateAggregate(session, getGroupData(groupData, false));
}
}
}
......
......@@ -38,7 +38,7 @@ class AggregateDataHistogram extends AggregateData {
@Override
void add(Database database, int dataType, Value v) {
if (distinctValues == null) {
distinctValues = ValueHashMap.newInstance();
distinctValues = new ValueHashMap<>();
}
LongDataCounter a = distinctValues.get(v);
if (a == null) {
......@@ -54,7 +54,7 @@ class AggregateDataHistogram extends AggregateData {
@Override
Value getValue(Database database, int dataType) {
if (distinctValues == null) {
return ValueArray.get(new Value[0]).convertTo(dataType);
return ValueArray.getEmpty().convertTo(dataType);
}
ValueArray[] values = new ValueArray[distinctValues.size()];
int i = 0;
......
......@@ -25,7 +25,7 @@ class AggregateDataMode extends AggregateData {
return;
}
if (distinctValues == null) {
distinctValues = ValueHashMap.newInstance();
distinctValues = new ValueHashMap<>();
}
LongDataCounter a = distinctValues.get(v);
if (a == null) {
......
......@@ -8,7 +8,6 @@ package org.h2.expression.aggregate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import org.h2.api.ErrorCode;
import org.h2.command.dml.Select;
......@@ -21,9 +20,7 @@ import org.h2.message.DbException;
import org.h2.result.SortOrder;
import org.h2.table.ColumnResolver;
import org.h2.table.TableFilter;
import org.h2.util.ValueHashMap;
import org.h2.value.Value;
import org.h2.value.ValueArray;
import org.h2.value.ValueInt;
/**
......@@ -184,64 +181,29 @@ public abstract class DataAnalysisOperation extends Expression {
*/
protected abstract void rememberExpressions(Session session, Value[] array);
/**
* Updates the provided aggregate data from the remembered expressions.
*
* @param session
* the session
* @param aggregateData
* aggregate data
* @param array
* values of expressions
*/
protected abstract void updateFromExpressions(Session session, Object aggregateData, Value[] array);
protected Object getData(Session session, SelectGroups groupData, boolean ifExists, boolean forOrderBy) {
protected Object getWindowData(Session session, SelectGroups groupData, boolean forOrderBy) {
Object data;
if (over != null) {
ValueArray key = over.getCurrentKey(session);
if (key != null) {
@SuppressWarnings("unchecked")
ValueHashMap<Object> map = (ValueHashMap<Object>) groupData.getWindowExprData(this);
if (map == null) {
if (ifExists) {
return null;
}
map = new ValueHashMap<>();
groupData.setWindowExprData(this, map);
}
PartitionData partition = (PartitionData) map.get(key);
Value key = over.getCurrentKey(session);
PartitionData partition = groupData.getWindowExprData(this, key);
if (partition == null) {
if (ifExists) {
return null;
}
data = forOrderBy ? new ArrayList<>() : createAggregateData();
map.put(key, new PartitionData(data));
} else {
data = partition.getData();
}
} else {
PartitionData partition = (PartitionData) groupData.getWindowExprData(this);
if (partition == null) {
if (ifExists) {
return null;
}
data = forOrderBy ? new ArrayList<>() : createAggregateData();
groupData.setWindowExprData(this, new PartitionData(data));
groupData.setWindowExprData(this, key, new PartitionData(data));
} else {
data = partition.getData();
}
return data;
}
} else {
protected Object getGroupData(SelectGroups groupData, boolean ifExists) {
Object data;
data = groupData.getCurrentGroupExprData(this);
if (data == null) {
if (ifExists) {
return null;
}
data = forOrderBy ? new ArrayList<>() : createAggregateData();
data = createAggregateData();
groupData.setCurrentGroupExprData(this, data);
}
}
return data;
}
......@@ -277,43 +239,38 @@ public abstract class DataAnalysisOperation extends Expression {
if (groupData == null) {
throw DbException.get(ErrorCode.INVALID_USE_OF_AGGREGATE_FUNCTION_1, getSQL());
}
return over == null ? getAggregatedValue(session, getData(session, groupData, true, false))
return over == null ? getAggregatedValue(session, getGroupData(groupData, true))
: getWindowResult(session, groupData);
}
/**
* Returns result of this window function or window aggregate. This method
* is not used for plain aggregates.
*
* @param session
* the session
* @param groupData
* the group data
* @return result of this function
*/
private Value getWindowResult(Session session, SelectGroups groupData) {
PartitionData partition;
Object data;
boolean forOrderBy = over.getOrderBy() != null;
ValueArray key = over.getCurrentKey(session);
if (key != null) {
@SuppressWarnings("unchecked")
ValueHashMap<Object> map = (ValueHashMap<Object>) groupData.getWindowExprData(this);
if (map == null) {
map = new ValueHashMap<>();
groupData.setWindowExprData(this, map);
}
partition = (PartitionData) map.get(key);
Value key = over.getCurrentKey(session);
partition = groupData.getWindowExprData(this, key);
if (partition == null) {
// Window aggregates with FILTER clause may have no collected values
data = forOrderBy ? new ArrayList<>() : createAggregateData();
partition = new PartitionData(data);
map.put(key, partition);
groupData.setWindowExprData(this, key, partition);
} else {
data = partition.getData();
}
} else {
partition = (PartitionData) groupData.getWindowExprData(this);
if (partition == null) {
data = forOrderBy ? new ArrayList<>() : createAggregateData();
partition = new PartitionData(data);
groupData.setWindowExprData(this, partition);
} else {
data = partition.getData();
}
}
if (over.getOrderBy() != null || !isAggregate()) {
if (forOrderBy || !isAggregate()) {
return getOrderedResult(session, groupData, partition, data);
}
// Window aggregate without ORDER BY clause in window specification
Value result = partition.getResult();
if (result == null) {
result = getAggregatedValue(session, data);
......@@ -346,7 +303,7 @@ public abstract class DataAnalysisOperation extends Expression {
}
array[ne] = ValueInt.get(groupRowId);
@SuppressWarnings("unchecked")
ArrayList<Value[]> data = (ArrayList<Value[]>) getData(session, groupData, false, true);
ArrayList<Value[]> data = (ArrayList<Value[]>) getWindowData(session, groupData, true);
data.add(array);
}
......@@ -378,36 +335,8 @@ public abstract class DataAnalysisOperation extends Expression {
* @param rowIdColumn
* the index of row id value
*/
protected void getOrderedResultLoop(Session session, HashMap<Integer, Value> result, ArrayList<Value[]> ordered,
int rowIdColumn) {
WindowFrame frame = over.getWindowFrame();
if (frame == null || frame.isDefault()) {
Object aggregateData = createAggregateData();
for (Value[] row : ordered) {
updateFromExpressions(session, aggregateData, row);
result.put(row[rowIdColumn].getInt(), getAggregatedValue(session, aggregateData));
}
} else if (frame.isFullPartition()) {
Object aggregateData = createAggregateData();
for (Value[] row : ordered) {
updateFromExpressions(session, aggregateData, row);
}
Value value = getAggregatedValue(session, aggregateData);
for (Value[] row : ordered) {
result.put(row[rowIdColumn].getInt(), value);
}
} else {
int size = ordered.size();
for (int i = 0; i < size; i++) {
Object aggregateData = createAggregateData();
for (Iterator<Value[]> iter = frame.iterator(session, ordered, getOverOrderBySort(), i, false); iter
.hasNext();) {
updateFromExpressions(session, aggregateData, iter.next());
}
result.put(ordered.get(i)[rowIdColumn].getInt(), getAggregatedValue(session, aggregateData));
}
}
}
protected abstract void getOrderedResultLoop(Session session, HashMap<Integer, Value> result,
ArrayList<Value[]> ordered, int rowIdColumn);
protected StringBuilder appendTailConditions(StringBuilder builder) {
if (over != null) {
......
......@@ -12,7 +12,7 @@ import org.h2.value.Value;
/**
* Partition data of a window aggregate.
*/
final class PartitionData {
public final class PartitionData {
/**
* Aggregate data.
......
......@@ -185,11 +185,14 @@ public final class Window {
* session
* @return key for the current group, or null
*/
public ValueArray getCurrentKey(Session session) {
public Value getCurrentKey(Session session) {
if (partitionBy == null) {
return null;
}
int len = partitionBy.size();
if (len == 1) {
return partitionBy.get(0).getValue(session);
} else {
Value[] keyValues = new Value[len];
// update group
for (int i = 0; i < len; i++) {
......@@ -198,6 +201,7 @@ public final class Window {
}
return ValueArray.get(keyValues);
}
}
/**
* Returns SQL representation.
......
......@@ -174,11 +174,6 @@ public class WindowFunction extends DataAnalysisOperation {
}
}
@Override
protected void updateFromExpressions(Session session, Object aggregateData, Value[] array) {
throw DbException.getUnsupportedException("Window function");
}
@Override
protected Object createAggregateData() {
throw DbException.getUnsupportedException("Window function");
......@@ -188,62 +183,58 @@ public class WindowFunction extends DataAnalysisOperation {
protected void getOrderedResultLoop(Session session, HashMap<Integer, Value> result, ArrayList<Value[]> ordered,
int rowIdColumn) {
switch (type) {
case ROW_NUMBER:
for (int i = 0, size = ordered.size(); i < size;) {
result.put(ordered.get(i)[rowIdColumn].getInt(), ValueInt.get(++i));
}
break;
case RANK:
case DENSE_RANK:
case PERCENT_RANK:
getRank(result, ordered, rowIdColumn);
break;
case CUME_DIST:
getCumeDist(session, result, ordered, rowIdColumn);
return;
break;
case NTILE:
getNtile(session, result, ordered, rowIdColumn);
return;
break;
case LEAD:
case LAG:
getLeadLag(session, result, ordered, rowIdColumn);
return;
break;
case FIRST_VALUE:
case LAST_VALUE:
case NTH_VALUE:
getNth(session, result, ordered, rowIdColumn);
return;
break;
default:
throw DbException.throwInternalError("type=" + type);
}
}
private void getRank(HashMap<Integer, Value> result, ArrayList<Value[]> ordered, int rowIdColumn) {
int size = ordered.size();
int number = 0;
for (int i = 0; i < size; i++) {
Value[] row = ordered.get(i);
int rowId = row[rowIdColumn].getInt();
Value v;
switch (type) {
case ROW_NUMBER:
v = ValueInt.get(i + 1);
break;
case RANK:
case DENSE_RANK:
case PERCENT_RANK: {
if (i == 0) {
number = 1;
} else if (getOverOrderBySort().compare(ordered.get(i - 1), row) != 0) {
if (type == WindowFunctionType.DENSE_RANK) {
number++;
} else {
if (getOverOrderBySort().compare(ordered.get(i - 1), row) != 0) {
switch (type) {
case RANK:
case PERCENT_RANK:
number = i + 1;
break;
default: // DENSE_RANK
number++;
}
}
}
Value v;
if (type == WindowFunctionType.PERCENT_RANK) {
int nm = number - 1;
v = nm == 0 ? ValueDouble.ZERO : ValueDouble.get((double) nm / (size - 1));
} else {
v = ValueInt.get(number);
}
break;
}
default:
throw DbException.throwInternalError("type=" + type);
}
result.put(rowId, v);
result.put(row[rowIdColumn].getInt(), v);
}
}
......
......@@ -40,7 +40,7 @@ public class HashIndex extends BaseIndex {
}
private void reset() {
rows = ValueHashMap.newInstance();
rows = new ValueHashMap<>();
}
@Override
......
......@@ -45,7 +45,7 @@ public class NonUniqueHashIndex extends BaseIndex {
}
private void reset() {
rows = ValueHashMap.newInstance();
rows = new ValueHashMap<>();
rowCount = 0;
}
......
......@@ -147,7 +147,7 @@ public class LocalResultImpl implements LocalResult {
public void setDistinct() {
assert distinctIndexes == null;
distinct = true;
distinctRows = ValueHashMap.newInstance();
distinctRows = new ValueHashMap<>();
}
/**
......@@ -159,7 +159,7 @@ public class LocalResultImpl implements LocalResult {
public void setDistinct(int[] distinctIndexes) {
assert !distinct;
this.distinctIndexes = distinctIndexes;
distinctRows = ValueHashMap.newInstance();
distinctRows = new ValueHashMap<>();
}
/**
......@@ -202,7 +202,7 @@ public class LocalResultImpl implements LocalResult {
return external.contains(values);
}
if (distinctRows == null) {
distinctRows = ValueHashMap.newInstance();
distinctRows = new ValueHashMap<>();
for (Value[] row : rows) {
ValueArray array = getArrayOfDistinct(row);
distinctRows.put(array, array.getList());
......
......@@ -38,15 +38,6 @@ public class ValueHashMap<V> extends HashBase {
Value[] keys;
V[] values;
/**
* Create a new value hash map.
*
* @return the object
*/
public static <T> ValueHashMap<T> newInstance() {
return new ValueHashMap<>();
}
@Override
@SuppressWarnings("unchecked")
protected void reset(int newLevel) {
......
......@@ -20,6 +20,11 @@ import org.h2.util.StatementBuilder;
*/
public class ValueArray extends Value {
/**
* Empty array.
*/
private static final Object EMPTY = get(new Value[0]);
private final Class<?> componentType;
private final Value[] values;
private int hash;
......@@ -52,6 +57,15 @@ public class ValueArray extends Value {
return new ValueArray(componentType, list);
}
/**
* Returns empty array.
*
* @return empty array
*/
public static ValueArray getEmpty() {
return (ValueArray) EMPTY;
}
@Override
public int hashCode() {
if (hash != 0) {
......
......@@ -83,3 +83,14 @@ SELECT I, V, COUNT(V) OVER W C, COUNT(DISTINCT V) OVER W D FROM
> 6 2 6 2
> 7 3 7 3
> rows (ordered): 7
SELECT I, C, COUNT(I) OVER (PARTITION BY C) CNT FROM
VALUES (1, 1), (2, 1), (3, 2), (4, 2), (5, 2) T(I, C);
> I C CNT
> - - ---
> 1 1 2
> 2 1 2
> 3 2 3
> 4 2 3
> 5 2 3
> rows: 5
......@@ -143,3 +143,11 @@ SELECT LAG(VALUE) OVER (ORDER BY ID RANGE CURRENT ROW) FROM TEST;
DROP TABLE TEST;
> ok
SELECT C, SUM(I) S, LEAD(SUM(I)) OVER (ORDER /**/ BY SUM(I)) L FROM
VALUES (1, 1), (2, 1), (4, 2), (8, 2) T(I, C) GROUP BY C;
> C S L
> - -- ----
> 1 3 12
> 2 12 null
> rows: 2
......@@ -133,7 +133,7 @@ INSERT INTO TEST VALUES
(4, 'b', 8);
> update count: 4
SELECT ROW_NUMBER() OVER(ORDER /**/ BY TYPE) RN, TYPE, SUM(CNT) SUM FROM TEST GROUP BY TYPE;
SELECT ROW_NUMBER() OVER (ORDER /**/ BY TYPE) RN, TYPE, SUM(CNT) SUM FROM TEST GROUP BY TYPE;
> RN TYPE SUM
> -- ---- ---
> 1 a 1
......@@ -141,6 +141,16 @@ SELECT ROW_NUMBER() OVER(ORDER /**/ BY TYPE) RN, TYPE, SUM(CNT) SUM FROM TEST GR
> 3 c 4
> rows: 3
SELECT A, B, C, ROW_NUMBER() OVER (PARTITION BY A, B) N FROM
VALUES (1, 1, 1), (1, 1, 2), (1, 2, 3), (2, 1, 4) T(A, B, C);
> A B C N
> - - - -
> 1 1 1 1
> 1 1 2 2
> 1 2 3 1
> 2 1 4 1
> rows: 4
SELECT RANK () OVER () FROM TEST;
> exception SYNTAX_ERROR_2
......@@ -149,3 +159,9 @@ SELECT DENSE_RANK () OVER () FROM TEST;
DROP TABLE TEST;
> ok
SELECT ROW_NUMBER() OVER () FROM VALUES (1);
> ROW_NUMBER() OVER ()
> --------------------
> 1
> rows: 1
......@@ -47,7 +47,7 @@ public class TestValueHashMap extends TestBase implements DataHandler {
}
private void testNotANumber() {
ValueHashMap<Integer> map = ValueHashMap.newInstance();
ValueHashMap<Integer> map = new ValueHashMap<>();
for (int i = 1; i < 100; i++) {
double d = Double.longBitsToDouble(0x7ff0000000000000L | i);
ValueDouble v = ValueDouble.get(d);
......@@ -57,7 +57,7 @@ public class TestValueHashMap extends TestBase implements DataHandler {
}
private void testRandomized() {
ValueHashMap<Value> map = ValueHashMap.newInstance();
ValueHashMap<Value> map = new ValueHashMap<>();
HashMap<Value, Value> hash = new HashMap<>();
Random random = new Random(1);
Comparator<Value> vc = new Comparator<Value>() {
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论