Skip to content
项目
群组
代码片段
帮助
正在加载...
帮助
为 GitLab 提交贡献
登录/注册
切换导航
H
h2database
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
分枝图
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
计划
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
分枝图
统计图
创建新议题
作业
提交
议题看板
打开侧边栏
Administrator
h2database
Commits
a50f8f5c
提交
a50f8f5c
authored
1月 29, 2019
作者:
Evgenij Ryazanov
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Add PERCENTILE_DISC inverse distribution function
上级
e1e6689d
隐藏空白字符变更
内嵌
并排
正在显示
9 个修改的文件
包含
419 行增加
和
65 行删除
+419
-65
help.csv
h2/src/docsrc/help/help.csv
+16
-0
Parser.java
h2/src/main/org/h2/command/Parser.java
+6
-0
Aggregate.java
h2/src/main/org/h2/expression/aggregate/Aggregate.java
+44
-3
AggregateData.java
h2/src/main/org/h2/expression/aggregate/AggregateData.java
+1
-0
AggregateDataCollecting.java
.../org/h2/expression/aggregate/AggregateDataCollecting.java
+27
-0
AggregateType.java
h2/src/main/org/h2/expression/aggregate/AggregateType.java
+5
-0
Percentile.java
h2/src/main/org/h2/expression/aggregate/Percentile.java
+79
-25
TestScript.java
h2/src/test/org/h2/test/scripts/TestScript.java
+1
-1
percentile.sql
...st/org/h2/test/scripts/functions/aggregate/percentile.sql
+240
-36
没有找到文件。
h2/src/docsrc/help/help.csv
浏览文件 @
a50f8f5c
...
...
@@ -3674,6 +3674,22 @@ Aggregates are only allowed in select statements.
VAR_SAMP(X)
"
"Functions (Aggregate)","PERCENTILE_DISC","
PERCENTILE_DISC(numeric) WITHIN GROUP (ORDER BY value [ASC|DESC])
[FILTER (WHERE expression)] [OVER windowNameOrSpecification]
","
Return percentile of values from the group.
Interpolation is not performed.
Argument must be between 0 and 1 inclusive.
Argument must be the same for all rows in the same group.
If argument is NULL, the result is NULL.
NULL values are ignored in the calculation.
If no rows are selected, the result is NULL.
Aggregates are only allowed in select statements.
","
PERCENTILE_DISC(0.5) WITHIN GROUP (ORDER BY V)
"
"Functions (Aggregate)","MEDIAN","
MEDIAN( [ DISTINCT|ALL ] value )
[FILTER (WHERE expression)] [OVER windowNameOrSpecification]
...
...
h2/src/main/org/h2/command/Parser.java
浏览文件 @
a50f8f5c
...
...
@@ -3084,6 +3084,12 @@ public class Parser {
}
break
;
}
case
PERCENTILE_DISC:
{
Expression
num
=
readExpression
();
read
(
CLOSE_PAREN
);
r
=
readWithinGroup
(
aggregateType
,
num
);
break
;
}
case
MODE:
{
if
(
readIf
(
CLOSE_PAREN
))
{
r
=
readWithinGroup
(
AggregateType
.
MODE
,
null
);
...
...
h2/src/main/org/h2/expression/aggregate/Aggregate.java
浏览文件 @
a50f8f5c
...
...
@@ -111,6 +111,7 @@ public class Aggregate extends AbstractAggregate {
addAggregate
(
"HISTOGRAM"
,
AggregateType
.
HISTOGRAM
);
addAggregate
(
"BIT_OR"
,
AggregateType
.
BIT_OR
);
addAggregate
(
"BIT_AND"
,
AggregateType
.
BIT_AND
);
addAggregate
(
"PERCENTILE_DISC"
,
AggregateType
.
PERCENTILE_DISC
);
addAggregate
(
"MEDIAN"
,
AggregateType
.
MEDIAN
);
addAggregate
(
"ARRAY_AGG"
,
AggregateType
.
ARRAY_AGG
);
addAggregate
(
"MODE"
,
AggregateType
.
MODE
);
...
...
@@ -198,6 +199,10 @@ public class Aggregate extends AbstractAggregate {
v
=
updateCollecting
(
session
,
v
,
remembered
);
}
break
;
case
PERCENTILE_DISC:
((
AggregateDataCollecting
)
data
).
setSharedArgument
(
v
);
v
=
remembered
!=
null
?
remembered
[
1
]
:
orderByList
.
get
(
0
).
expression
.
getValue
(
session
);
break
;
case
MODE:
v
=
remembered
!=
null
?
remembered
[
0
]
:
orderByList
.
get
(
0
).
expression
.
getValue
(
session
);
break
;
...
...
@@ -309,8 +314,21 @@ public class Aggregate extends AbstractAggregate {
}
return
v
;
}
case
PERCENTILE_DISC:
{
Value
v
=
on
.
getValue
(
session
);
if
(
v
==
ValueNull
.
INSTANCE
)
{
return
ValueNull
.
INSTANCE
;
}
double
arg
=
v
.
getDouble
();
if
(
arg
>=
0
d
&&
arg
<=
1
d
)
{
return
Percentile
.
getFromIndex
(
session
,
orderByList
.
get
(
0
).
expression
,
type
.
getValueType
(),
orderByList
,
arg
,
false
);
}
else
{
throw
DbException
.
getInvalidValueException
(
"PERCENTILE_DISC argument"
,
arg
);
}
}
case
MEDIAN:
return
AggregateMedian
.
medianFromIndex
(
session
,
on
,
type
.
getValueType
()
);
return
Percentile
.
getFromIndex
(
session
,
on
,
type
.
getValueType
(),
orderByList
,
0.5d
,
true
);
case
ENVELOPE:
return
((
MVSpatialIndex
)
AggregateDataEnvelope
.
getGeometryColumnIndex
(
on
)).
getBounds
(
session
);
default
:
...
...
@@ -369,12 +387,29 @@ public class Aggregate extends AbstractAggregate {
}
return
ValueArray
.
get
(
array
);
}
case
PERCENTILE_DISC:
{
AggregateDataCollecting
collectingData
=
(
AggregateDataCollecting
)
data
;
Value
[]
array
=
collectingData
.
getArray
();
if
(
array
==
null
)
{
return
ValueNull
.
INSTANCE
;
}
Value
v
=
collectingData
.
getSharedArgument
();
if
(
v
==
ValueNull
.
INSTANCE
)
{
return
ValueNull
.
INSTANCE
;
}
double
arg
=
v
.
getDouble
();
if
(
arg
>=
0
d
&&
arg
<=
1
d
)
{
return
Percentile
.
getValue
(
session
.
getDatabase
(),
array
,
type
.
getValueType
(),
orderByList
,
arg
,
false
);
}
else
{
throw
DbException
.
getInvalidValueException
(
"PERCENTILE_DISC argument"
,
arg
);
}
}
case
MEDIAN:
{
Value
[]
array
=
((
AggregateDataCollecting
)
data
).
getArray
();
if
(
array
==
null
)
{
return
ValueNull
.
INSTANCE
;
}
return
AggregateMedian
.
median
(
session
.
getDatabase
(),
array
,
type
.
getValueType
()
);
return
Percentile
.
getValue
(
session
.
getDatabase
(),
array
,
type
.
getValueType
(),
orderByList
,
0.5d
,
true
);
}
case
MODE:
return
getMode
(
session
,
data
);
...
...
@@ -549,6 +584,7 @@ public class Aggregate extends AbstractAggregate {
case
MAX:
case
MEDIAN:
break
;
case
PERCENTILE_DISC:
case
MODE:
type
=
orderByList
.
get
(
0
).
expression
.
getType
();
break
;
...
...
@@ -675,6 +711,9 @@ public class Aggregate extends AbstractAggregate {
case
BIT_OR:
text
=
"BIT_OR"
;
break
;
case
PERCENTILE_DISC:
text
=
"PERCENTILE_DISC"
;
break
;
case
MEDIAN:
text
=
"MEDIAN"
;
break
;
...
...
@@ -746,11 +785,13 @@ public class Aggregate extends AbstractAggregate {
case
MAX:
Index
index
=
getMinMaxColumnIndex
();
return
index
!=
null
;
case
PERCENTILE_DISC:
return
on
.
isConstant
()
&&
Percentile
.
getColumnIndex
(
orderByList
.
get
(
0
).
expression
)
!=
null
;
case
MEDIAN:
if
(
distinct
)
{
return
false
;
}
return
AggregateMedian
.
getMedian
ColumnIndex
(
on
)
!=
null
;
return
Percentile
.
get
ColumnIndex
(
on
)
!=
null
;
case
ENVELOPE:
return
AggregateDataEnvelope
.
getGeometryColumnIndex
(
on
)
!=
null
;
default
:
...
...
h2/src/main/org/h2/expression/aggregate/AggregateData.java
浏览文件 @
a50f8f5c
...
...
@@ -34,6 +34,7 @@ abstract class AggregateData {
break
;
case
GROUP_CONCAT:
case
ARRAY_AGG:
case
PERCENTILE_DISC:
case
MEDIAN:
break
;
case
MIN:
...
...
h2/src/main/org/h2/expression/aggregate/AggregateDataCollecting.java
浏览文件 @
a50f8f5c
...
...
@@ -11,7 +11,9 @@ import java.util.Collections;
import
java.util.Iterator
;
import
java.util.TreeSet
;
import
org.h2.api.ErrorCode
;
import
org.h2.engine.Database
;
import
org.h2.message.DbException
;
import
org.h2.value.Value
;
import
org.h2.value.ValueNull
;
...
...
@@ -31,6 +33,8 @@ class AggregateDataCollecting extends AggregateData implements Iterable<Value> {
Collection
<
Value
>
values
;
private
Value
shared
;
/**
* Creates new instance of data for collecting aggregates.
*
...
...
@@ -84,4 +88,27 @@ class AggregateDataCollecting extends AggregateData implements Iterable<Value> {
return
values
!=
null
?
values
.
iterator
()
:
Collections
.<
Value
>
emptyIterator
();
}
/**
* Sets value of a shared argument.
*
* @param shared the shared value
*/
void
setSharedArgument
(
Value
shared
)
{
if
(
this
.
shared
==
null
)
{
this
.
shared
=
shared
;
}
else
if
(!
this
.
shared
.
equals
(
shared
))
{
throw
DbException
.
get
(
ErrorCode
.
INVALID_VALUE_2
,
"Inverse distribution function argument"
,
this
.
shared
.
getTraceSQL
()
+
"<>"
+
shared
.
getTraceSQL
());
}
}
/**
* Returns value of a shared argument.
*
* @return value of a shared argument
*/
Value
getSharedArgument
()
{
return
shared
;
}
}
h2/src/main/org/h2/expression/aggregate/AggregateType.java
浏览文件 @
a50f8f5c
...
...
@@ -95,6 +95,11 @@ public enum AggregateType {
*/
HISTOGRAM
,
/**
* The aggregate type for PERCENTILE_DISC(expression).
*/
PERCENTILE_DISC
,
/**
* The aggregate type for MEDIAN(expression).
*/
...
...
h2/src/main/org/h2/expression/aggregate/
AggregateMedian
.java
→
h2/src/main/org/h2/expression/aggregate/
Percentile
.java
浏览文件 @
a50f8f5c
...
...
@@ -10,6 +10,7 @@ import java.util.ArrayList;
import
java.util.Arrays
;
import
org.h2.api.IntervalQualifier
;
import
org.h2.command.dml.SelectOrderBy
;
import
org.h2.engine.Database
;
import
org.h2.engine.Mode
;
import
org.h2.engine.Session
;
...
...
@@ -41,25 +42,26 @@ import org.h2.value.ValueTimestamp;
import
org.h2.value.ValueTimestampTimeZone
;
/**
*
MEDIAN aggregate
.
*
PERCENTILE_DISC and MEDIAN inverse distribution functions
.
*/
final
class
AggregateMedian
{
final
class
Percentile
{
private
static
boolean
isNullsLast
(
Index
index
)
{
IndexColumn
ic
=
index
.
getIndexColumns
()[
0
];
int
sortType
=
ic
.
sortType
;
return
(
sortType
&
SortOrder
.
NULLS_LAST
)
!=
0
||
(
sortType
&
SortOrder
.
NULLS_FIRST
)
==
0
&&
(
(
sortType
&
SortOrder
.
DESCENDING
)
!=
0
^
SysProperties
.
SORT_NULLS_HIGH
)
;
&&
(
sortType
&
SortOrder
.
DESCENDING
)
!=
0
^
SysProperties
.
SORT_NULLS_HIGH
;
}
/**
* Get the index (if any) for the column specified in the median aggregate.
* Get the index (if any) for the column specified in the inverse
* distribution function.
*
* @param on the expression (usually a column expression)
* @return the index, or null
*/
static
Index
get
Median
ColumnIndex
(
Expression
on
)
{
static
Index
getColumnIndex
(
Expression
on
)
{
if
(
on
instanceof
ExpressionColumn
)
{
ExpressionColumn
col
=
(
ExpressionColumn
)
on
;
Column
column
=
col
.
getColumn
();
...
...
@@ -92,35 +94,64 @@ final class AggregateMedian {
}
/**
* Get the
median
from the array of values.
* Get the
result
from the array of values.
*
* @param database the database
* @param array array with values
* @param dataType the data type
* @param orderByList ORDER BY list
* @param percentile argument of percentile function, or 0.5d for median
* @param interpolate whether value should be interpolated
* @return the result
*/
static
Value
median
(
Database
database
,
Value
[]
array
,
int
dataType
)
{
static
Value
getValue
(
Database
database
,
Value
[]
array
,
int
dataType
,
ArrayList
<
SelectOrderBy
>
orderByList
,
double
percentile
,
boolean
interpolate
)
{
final
CompareMode
compareMode
=
database
.
getCompareMode
();
Arrays
.
sort
(
array
,
compareMode
);
int
len
=
array
.
length
;
int
idx
=
len
/
2
;
Value
v1
=
array
[
idx
];
if
((
len
&
1
)
==
1
)
{
return
v1
.
convertTo
(
dataType
);
int
count
=
array
.
length
;
boolean
reverseIndex
=
orderByList
!=
null
&&
(
orderByList
.
get
(
0
).
sortType
&
SortOrder
.
DESCENDING
)
!=
0
;
double
fpRow
=
(
count
-
1
)
*
percentile
;
int
rowIdx1
=
(
int
)
fpRow
;
double
factor
=
fpRow
-
rowIdx1
;
int
rowIdx2
;
if
(
factor
==
0
d
)
{
interpolate
=
false
;
rowIdx2
=
rowIdx1
;
}
else
{
rowIdx2
=
rowIdx1
+
1
;
if
(!
interpolate
)
{
if
(
factor
>
0.5d
)
{
rowIdx1
=
rowIdx2
;
}
else
{
rowIdx2
=
rowIdx1
;
}
}
}
return
getMedian
(
array
[
idx
-
1
],
v1
,
dataType
,
database
.
getMode
(),
compareMode
);
if
(
reverseIndex
)
{
rowIdx1
=
count
-
1
-
rowIdx1
;
rowIdx2
=
count
-
1
-
rowIdx2
;
}
Value
v
=
array
[
rowIdx1
];
if
(!
interpolate
)
{
return
v
.
convertTo
(
dataType
);
}
return
getMedian
(
v
,
array
[
rowIdx2
],
dataType
,
database
.
getMode
(),
compareMode
);
}
/**
* Get the
median
from the index.
* Get the
result
from the index.
*
* @param session the session
* @param on the expression
* @param
expressi
on the expression
* @param dataType the data type
* @param orderByList ORDER BY list
* @param percentile argument of percentile function, or 0.5d for median
* @param interpolate whether value should be interpolated
* @return the result
*/
static
Value
medianFromIndex
(
Session
session
,
Expression
on
,
int
dataType
)
{
Index
index
=
getMedianColumnIndex
(
on
);
static
Value
getFromIndex
(
Session
session
,
Expression
expression
,
int
dataType
,
ArrayList
<
SelectOrderBy
>
orderByList
,
double
percentile
,
boolean
interpolate
)
{
Index
index
=
getColumnIndex
(
expression
);
long
count
=
index
.
getRowCount
(
session
);
if
(
count
==
0
)
{
return
ValueNull
.
INSTANCE
;
...
...
@@ -128,7 +159,7 @@ final class AggregateMedian {
Cursor
cursor
=
index
.
find
(
session
,
null
,
null
);
cursor
.
next
();
int
columnId
=
index
.
getColumns
()[
0
].
getColumnId
();
ExpressionColumn
expr
=
(
ExpressionColumn
)
on
;
ExpressionColumn
expr
=
(
ExpressionColumn
)
expressi
on
;
if
(
expr
.
getColumn
().
isNullable
())
{
boolean
hasNulls
=
false
;
SearchRow
row
;
...
...
@@ -165,7 +196,26 @@ final class AggregateMedian {
}
}
}
long
skip
=
(
count
-
1
)
/
2
;
boolean
reverseIndex
=
(
orderByList
!=
null
?
orderByList
.
get
(
0
).
sortType
&
SortOrder
.
DESCENDING
:
0
)
!=
(
index
.
getIndexColumns
()[
0
].
sortType
&
SortOrder
.
DESCENDING
);
double
fpRow
=
(
count
-
1
)
*
percentile
;
long
rowIdx1
=
(
long
)
fpRow
;
double
factor
=
fpRow
-
rowIdx1
;
long
rowIdx2
;
if
(
factor
==
0
d
)
{
interpolate
=
false
;
rowIdx2
=
rowIdx1
;
}
else
{
rowIdx2
=
rowIdx1
+
1
;
if
(!
interpolate
)
{
if
(
factor
>
0.5d
)
{
rowIdx1
=
rowIdx2
;
}
else
{
rowIdx2
=
rowIdx1
;
}
}
}
long
skip
=
reverseIndex
?
count
-
1
-
rowIdx2
:
rowIdx1
;
for
(
int
i
=
0
;
i
<
skip
;
i
++)
{
cursor
.
next
();
}
...
...
@@ -177,7 +227,7 @@ final class AggregateMedian {
if
(
v
==
ValueNull
.
INSTANCE
)
{
return
v
;
}
if
(
(
count
&
1
)
==
0
)
{
if
(
interpolate
)
{
cursor
.
next
();
row
=
cursor
.
getSearchRow
();
if
(
row
==
null
)
{
...
...
@@ -188,14 +238,18 @@ final class AggregateMedian {
return
v
;
}
Database
database
=
session
.
getDatabase
();
if
(
reverseIndex
)
{
Value
t
=
v
;
v
=
v2
;
v2
=
t
;
}
return
getMedian
(
v
,
v2
,
dataType
,
database
.
getMode
(),
database
.
getCompareMode
());
}
return
v
;
}
private
static
Value
getMedian
(
Value
v0
,
Value
v1
,
int
dataType
,
Mode
databaseMode
,
CompareMode
compareMode
)
{
int
cmp
=
v0
.
compareTo
(
v1
,
databaseMode
,
compareMode
);
if
(
cmp
==
0
)
{
if
(
v0
.
compareTo
(
v1
,
databaseMode
,
compareMode
)
==
0
)
{
return
v0
.
convertTo
(
dataType
);
}
switch
(
dataType
)
{
...
...
@@ -273,12 +327,12 @@ final class AggregateMedian {
IntervalUtils
.
intervalToAbsolute
((
ValueInterval
)
v0
)
.
add
(
IntervalUtils
.
intervalToAbsolute
((
ValueInterval
)
v1
)).
shiftRight
(
1
));
default
:
// Just return
smaller
return
(
cmp
<
0
?
v0
:
v1
)
.
convertTo
(
dataType
);
// Just return
first
return
v0
.
convertTo
(
dataType
);
}
}
private
AggregateMedian
()
{
private
Percentile
()
{
}
}
h2/src/test/org/h2/test/scripts/TestScript.java
浏览文件 @
a50f8f5c
...
...
@@ -165,7 +165,7 @@ public class TestScript extends TestDb {
testScript
(
"other/"
+
s
+
".sql"
);
}
for
(
String
s
:
new
String
[]
{
"any"
,
"array-agg"
,
"avg"
,
"bit-and"
,
"bit-or"
,
"count"
,
"envelope"
,
"every"
,
"group-concat"
,
"histogram"
,
"max"
,
"m
edian"
,
"min"
,
"mod
e"
,
"selectivity"
,
"stddev-pop"
,
"every"
,
"group-concat"
,
"histogram"
,
"max"
,
"m
in"
,
"mode"
,
"percentil
e"
,
"selectivity"
,
"stddev-pop"
,
"stddev-samp"
,
"sum"
,
"var-pop"
,
"var-samp"
})
{
testScript
(
"functions/aggregate/"
+
s
+
".sql"
);
}
...
...
h2/src/test/org/h2/test/scripts/functions/aggregate/
median
.sql
→
h2/src/test/org/h2/test/scripts/functions/aggregate/
percentile
.sql
浏览文件 @
a50f8f5c
...
...
@@ -13,14 +13,26 @@ create index test_idx on test(v asc);
insert
into
test
values
(
20
),
(
20
),
(
10
);
>
update
count
:
3
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
insert
into
test
values
(
null
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
select
median
(
distinct
v
)
from
test
;
>>
15
...
...
@@ -28,8 +40,14 @@ select median(distinct v) from test;
insert
into
test
values
(
10
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
15
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
10
20
15
>
rows
:
1
drop
table
test
;
>
ok
...
...
@@ -44,14 +62,26 @@ create index test_idx on test(v asc nulls first);
insert
into
test
values
(
20
),
(
20
),
(
10
);
>
update
count
:
3
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
insert
into
test
values
(
null
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
select
median
(
distinct
v
)
from
test
;
>>
15
...
...
@@ -59,8 +89,14 @@ select median(distinct v) from test;
insert
into
test
values
(
10
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
15
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
10
20
15
>
rows
:
1
drop
table
test
;
>
ok
...
...
@@ -75,14 +111,26 @@ create index test_idx on test(v asc nulls last);
insert
into
test
values
(
20
),
(
20
),
(
10
);
>
update
count
:
3
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
insert
into
test
values
(
null
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
select
median
(
distinct
v
)
from
test
;
>>
15
...
...
@@ -90,8 +138,14 @@ select median(distinct v) from test;
insert
into
test
values
(
10
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
15
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
10
20
15
>
rows
:
1
drop
table
test
;
>
ok
...
...
@@ -106,14 +160,26 @@ create index test_idx on test(v desc);
insert
into
test
values
(
20
),
(
20
),
(
10
);
>
update
count
:
3
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
insert
into
test
values
(
null
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
select
median
(
distinct
v
)
from
test
;
>>
15
...
...
@@ -121,8 +187,14 @@ select median(distinct v) from test;
insert
into
test
values
(
10
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
15
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
10
20
15
>
rows
:
1
drop
table
test
;
>
ok
...
...
@@ -137,14 +209,26 @@ create index test_idx on test(v desc nulls first);
insert
into
test
values
(
20
),
(
20
),
(
10
);
>
update
count
:
3
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
insert
into
test
values
(
null
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
select
median
(
distinct
v
)
from
test
;
>>
15
...
...
@@ -152,8 +236,14 @@ select median(distinct v) from test;
insert
into
test
values
(
10
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
15
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
10
20
15
>
rows
:
1
drop
table
test
;
>
ok
...
...
@@ -168,14 +258,26 @@ create index test_idx on test(v desc nulls last);
insert
into
test
values
(
20
),
(
20
),
(
10
);
>
update
count
:
3
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
insert
into
test
values
(
null
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
20
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
20
20
20
>
rows
:
1
select
median
(
distinct
v
)
from
test
;
>>
15
...
...
@@ -183,8 +285,14 @@ select median(distinct v) from test;
insert
into
test
values
(
10
);
>
update
count
:
1
select
median
(
v
)
from
test
;
>>
15
select
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50a
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
median
(
v
)
m
from
test
;
>
D50A
D50D
M
>
---- ---- --
>
10
20
15
>
rows
:
1
drop
table
test
;
>
ok
...
...
@@ -665,3 +773,99 @@ select dept, median(amount) filter (where amount >= 20) from test
drop
table
test
;
>
ok
create
table
test
(
g
int
,
v
int
);
>
ok
insert
into
test
values
(
1
,
1
),
(
1
,
2
),
(
1
,
3
),
(
1
,
4
),
(
1
,
5
),
(
1
,
6
),
(
1
,
7
),
(
1
,
8
),
(
1
,
9
),
(
1
,
10
),
(
2
,
10
),
(
2
,
20
),
(
2
,
30
),
(
2
,
100
);
>
update
count
:
14
select
percentile_disc
(
0
.
05
)
within
group
(
order
by
v
)
d05a
,
percentile_disc
(
0
.
05
)
within
group
(
order
by
v
desc
)
d05d
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
percentile_disc
(
0
.
95
)
within
group
(
order
by
v
)
d95a
,
percentile_disc
(
0
.
95
)
within
group
(
order
by
v
desc
)
d95d
,
g
from
test
group
by
g
;
>
D05A
D05D
D50
D50D
D95A
D95D
G
>
---- ---- --- ---- ---- ---- -
>
1
10
5
6
10
1
1
>
10
100
20
30
100
10
2
>
rows
:
2
select
percentile_disc
(
0
.
05
)
within
group
(
order
by
v
)
over
(
partition
by
g
order
by
v
)
d05a
,
percentile_disc
(
0
.
05
)
within
group
(
order
by
v
desc
)
over
(
partition
by
g
order
by
v
)
d05d
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
over
(
partition
by
g
order
by
v
)
d50
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
over
(
partition
by
g
order
by
v
)
d50d
,
percentile_disc
(
0
.
95
)
within
group
(
order
by
v
)
over
(
partition
by
g
order
by
v
)
d95a
,
percentile_disc
(
0
.
95
)
within
group
(
order
by
v
desc
)
over
(
partition
by
g
order
by
v
)
d95d
,
g
,
v
from
test
order
by
g
,
v
;
>
D05A
D05D
D50
D50D
D95A
D95D
G
V
>
---- ---- --- ---- ---- ---- - ---
>
1
1
1
1
1
1
1
1
>
1
2
1
2
2
1
1
2
>
1
3
2
2
3
1
1
3
>
1
4
2
3
4
1
1
4
>
1
5
3
3
5
1
1
5
>
1
6
3
4
6
1
1
6
>
1
7
4
4
7
1
1
7
>
1
8
4
5
8
1
1
8
>
1
9
5
5
9
1
1
9
>
1
10
5
6
10
1
1
10
>
10
10
10
10
10
10
2
10
>
10
20
10
20
20
10
2
20
>
10
30
20
20
30
10
2
30
>
10
100
20
30
100
10
2
100
>
rows
(
ordered
):
14
delete
from
test
where
g
<>
1
;
>
update
count
:
4
create
index
test_idx
on
test
(
v
);
>
ok
select
percentile_disc
(
0
.
05
)
within
group
(
order
by
v
)
d05a
,
percentile_disc
(
0
.
05
)
within
group
(
order
by
v
desc
)
d05d
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
)
d50
,
percentile_disc
(
0
.
5
)
within
group
(
order
by
v
desc
)
d50d
,
percentile_disc
(
0
.
95
)
within
group
(
order
by
v
)
d95a
,
percentile_disc
(
0
.
95
)
within
group
(
order
by
v
desc
)
d95d
from
test
;
>
D05A
D05D
D50
D50D
D95A
D95D
>
---- ---- --- ---- ---- ----
>
1
10
5
6
10
1
>
rows
:
1
SELECT
percentile_disc
(
null
)
within
group
(
order
by
v
)
from
test
;
>>
null
SELECT
percentile_disc
(
-
0
.
01
)
within
group
(
order
by
v
)
from
test
;
>
exception
INVALID_VALUE_2
SELECT
percentile_disc
(
1
.
01
)
within
group
(
order
by
v
)
from
test
;
>
exception
INVALID_VALUE_2
SELECT
percentile_disc
(
v
)
within
group
(
order
by
v
)
from
test
;
>
exception
INVALID_VALUE_2
drop
index
test_idx
;
>
ok
SELECT
percentile_disc
(
null
)
within
group
(
order
by
v
)
from
test
;
>>
null
SELECT
percentile_disc
(
-
0
.
01
)
within
group
(
order
by
v
)
from
test
;
>
exception
INVALID_VALUE_2
SELECT
percentile_disc
(
1
.
01
)
within
group
(
order
by
v
)
from
test
;
>
exception
INVALID_VALUE_2
SELECT
percentile_disc
(
v
)
within
group
(
order
by
v
)
from
test
;
>
exception
INVALID_VALUE_2
drop
table
test
;
>
ok
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论