Skip to content
项目
群组
代码片段
帮助
正在加载...
帮助
为 GitLab 提交贡献
登录/注册
切换导航
H
h2database
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
分枝图
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
计划
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
分枝图
统计图
创建新议题
作业
提交
议题看板
打开侧边栏
Administrator
h2database
Commits
cefce952
提交
cefce952
authored
8月 03, 2012
作者:
Thomas Mueller
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
A persistent tree map (work in progress).
上级
4a0120b8
隐藏空白字符变更
内嵌
并排
正在显示
8 个修改的文件
包含
331 行增加
和
168 行删除
+331
-168
TestAll.java
h2/src/test/org/h2/test/TestAll.java
+4
-1
TestCacheLIRS.java
h2/src/test/org/h2/test/store/TestCacheLIRS.java
+1
-5
TestDataUtils.java
h2/src/test/org/h2/test/store/TestDataUtils.java
+54
-0
BtreeMap.java
h2/src/tools/org/h2/dev/store/btree/BtreeMap.java
+25
-17
BtreeMapStore.java
h2/src/tools/org/h2/dev/store/btree/BtreeMapStore.java
+96
-93
Chunk.java
h2/src/tools/org/h2/dev/store/btree/Chunk.java
+4
-4
DataUtils.java
h2/src/tools/org/h2/dev/store/btree/DataUtils.java
+12
-0
Page.java
h2/src/tools/org/h2/dev/store/btree/Page.java
+135
-48
没有找到文件。
h2/src/test/org/h2/test/TestAll.java
浏览文件 @
cefce952
...
@@ -103,6 +103,8 @@ import org.h2.test.server.TestAutoServer;
...
@@ -103,6 +103,8 @@ import org.h2.test.server.TestAutoServer;
import
org.h2.test.server.TestNestedLoop
;
import
org.h2.test.server.TestNestedLoop
;
import
org.h2.test.server.TestWeb
;
import
org.h2.test.server.TestWeb
;
import
org.h2.test.server.TestInit
;
import
org.h2.test.server.TestInit
;
import
org.h2.test.store.TestCacheLIRS
;
import
org.h2.test.store.TestDataUtils
;
import
org.h2.test.store.TestTreeMapStore
;
import
org.h2.test.store.TestTreeMapStore
;
import
org.h2.test.synth.TestBtreeIndex
;
import
org.h2.test.synth.TestBtreeIndex
;
import
org.h2.test.synth.TestCrashAPI
;
import
org.h2.test.synth.TestCrashAPI
;
...
@@ -662,8 +664,9 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
...
@@ -662,8 +664,9 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
private
void
testUnit
()
{
private
void
testUnit
()
{
// store
// store
new
TestCacheLIRS
().
runTest
(
this
);
new
TestTreeMapStore
().
runTest
(
this
);
new
TestTreeMapStore
().
runTest
(
this
);
new
Test
Cache
().
runTest
(
this
);
new
Test
DataUtils
().
runTest
(
this
);
// unit
// unit
new
TestAutoReconnect
().
runTest
(
this
);
new
TestAutoReconnect
().
runTest
(
this
);
...
...
h2/src/test/org/h2/test/store/TestCache.java
→
h2/src/test/org/h2/test/store/TestCache
LIRS
.java
浏览文件 @
cefce952
...
@@ -12,13 +12,12 @@ import java.util.Map.Entry;
...
@@ -12,13 +12,12 @@ import java.util.Map.Entry;
import
java.util.Random
;
import
java.util.Random
;
import
org.h2.dev.store.btree.CacheLIRS
;
import
org.h2.dev.store.btree.CacheLIRS
;
import
org.h2.test.TestBase
;
import
org.h2.test.TestBase
;
import
org.h2.upgrade.v1_1.util.Profiler
;
import
org.h2.util.New
;
import
org.h2.util.New
;
/**
/**
* Tests the cache algorithm.
* Tests the cache algorithm.
*/
*/
public
class
TestCache
extends
TestBase
{
public
class
TestCache
LIRS
extends
TestBase
{
/**
/**
* Run just this test.
* Run just this test.
...
@@ -30,8 +29,6 @@ public class TestCache extends TestBase {
...
@@ -30,8 +29,6 @@ public class TestCache extends TestBase {
}
}
public
void
test
()
throws
Exception
{
public
void
test
()
throws
Exception
{
Profiler
p
=
new
Profiler
();
p
.
startCollecting
();
testEdgeCases
();
testEdgeCases
();
testSize
();
testSize
();
testClear
();
testClear
();
...
@@ -42,7 +39,6 @@ public class TestCache extends TestBase {
...
@@ -42,7 +39,6 @@ public class TestCache extends TestBase {
testBadHashMethod
();
testBadHashMethod
();
testScanResistance
();
testScanResistance
();
testRandomOperations
();
testRandomOperations
();
System
.
out
.
println
(
p
.
getTop
(
5
));
}
}
private
void
testEdgeCases
()
{
private
void
testEdgeCases
()
{
...
...
h2/src/test/org/h2/test/store/TestDataUtils.java
0 → 100644
浏览文件 @
cefce952
/*
* Copyright 2004-2011 H2 Group. Multiple-Licensed under the H2 License, Version
* 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html). Initial Developer: H2 Group
*/
package
org
.
h2
.
test
.
store
;
import
org.h2.dev.store.btree.Page
;
import
org.h2.test.TestBase
;
/**
* Test utility classes.
*/
public
class
TestDataUtils
extends
TestBase
{
/**
* Run just this test.
*
* @param a ignored
*/
public
static
void
main
(
String
...
a
)
throws
Exception
{
TestBase
.
createCaller
().
init
().
test
();
}
public
void
test
()
throws
Exception
{
testPagePos
();
}
private
void
testPagePos
()
{
int
lastCode
=
0
;
assertEquals
(
0
,
Page
.
encodeLength
(
32
));
assertEquals
(
1
,
Page
.
encodeLength
(
33
));
assertEquals
(
1
,
Page
.
encodeLength
(
48
));
assertEquals
(
2
,
Page
.
encodeLength
(
49
));
assertEquals
(
30
,
Page
.
encodeLength
(
1024
*
1024
));
assertEquals
(
31
,
Page
.
encodeLength
(
1024
*
1024
+
1
));
for
(
int
i
=
1024
*
1024
+
1
;
i
<
100
*
1024
*
1024
;
i
+=
1024
)
{
int
code
=
Page
.
encodeLength
(
i
);
assertEquals
(
31
,
code
);
}
for
(
int
i
=
0
;
i
<
1024
*
1024
;
i
++)
{
int
code
=
Page
.
encodeLength
(
i
);
assertTrue
(
code
<=
31
&&
code
>=
0
);
assertTrue
(
code
>=
lastCode
);
if
(
code
>
lastCode
)
{
lastCode
=
code
;
}
int
max
=
Page
.
getMaxLength
(
code
);
assertTrue
(
max
>=
i
&&
max
>=
32
);
}
}
}
h2/src/tools/org/h2/dev/store/btree/BtreeMap.java
浏览文件 @
cefce952
...
@@ -17,13 +17,13 @@ import java.util.Iterator;
...
@@ -17,13 +17,13 @@ import java.util.Iterator;
public
class
BtreeMap
<
K
,
V
>
{
public
class
BtreeMap
<
K
,
V
>
{
private
final
BtreeMapStore
store
;
private
final
BtreeMapStore
store
;
private
final
long
id
;
private
final
int
id
;
private
final
String
name
;
private
final
String
name
;
private
final
DataType
keyType
;
private
final
DataType
keyType
;
private
final
DataType
valueType
;
private
final
DataType
valueType
;
private
Page
root
;
private
Page
root
;
private
BtreeMap
(
BtreeMapStore
store
,
long
id
,
String
name
,
DataType
keyType
,
DataType
valueType
)
{
private
BtreeMap
(
BtreeMapStore
store
,
int
id
,
String
name
,
DataType
keyType
,
DataType
valueType
)
{
this
.
store
=
store
;
this
.
store
=
store
;
this
.
id
=
id
;
this
.
id
=
id
;
this
.
name
=
name
;
this
.
name
=
name
;
...
@@ -43,7 +43,7 @@ public class BtreeMap<K, V> {
...
@@ -43,7 +43,7 @@ public class BtreeMap<K, V> {
* @param valueClass the value class
* @param valueClass the value class
* @return the map
* @return the map
*/
*/
static
<
K
,
V
>
BtreeMap
<
K
,
V
>
open
(
BtreeMapStore
store
,
long
id
,
String
name
,
DataType
keyType
,
DataType
valueType
)
{
static
<
K
,
V
>
BtreeMap
<
K
,
V
>
open
(
BtreeMapStore
store
,
int
id
,
String
name
,
DataType
keyType
,
DataType
valueType
)
{
return
new
BtreeMap
<
K
,
V
>(
store
,
id
,
name
,
keyType
,
valueType
);
return
new
BtreeMap
<
K
,
V
>(
store
,
id
,
name
,
keyType
,
valueType
);
}
}
...
@@ -96,6 +96,14 @@ public class BtreeMap<K, V> {
...
@@ -96,6 +96,14 @@ public class BtreeMap<K, V> {
}
}
}
}
/**
* Remove all entries, and remove the map.
*/
public
void
remove
()
{
clear
();
store
.
removeMap
(
id
);
}
/**
/**
* Remove a key-value pair.
* Remove a key-value pair.
*
*
...
@@ -114,7 +122,7 @@ public class BtreeMap<K, V> {
...
@@ -114,7 +122,7 @@ public class BtreeMap<K, V> {
* @return true if yes
* @return true if yes
*/
*/
boolean
isChanged
()
{
boolean
isChanged
()
{
return
root
!=
null
&&
root
.
get
Id
()
<
0
;
return
root
!=
null
&&
root
.
get
Pos
()
<
0
;
}
}
private
void
markChanged
()
{
private
void
markChanged
()
{
...
@@ -167,22 +175,22 @@ public class BtreeMap<K, V> {
...
@@ -167,22 +175,22 @@ public class BtreeMap<K, V> {
}
}
/**
/**
* Read a
nod
e.
* Read a
pag
e.
*
*
* @param
id the node id
* @param
pos the position of the page
* @return the
nod
e
* @return the
pag
e
*/
*/
Page
readPage
(
long
id
)
{
Page
readPage
(
long
pos
)
{
return
store
.
readPage
(
this
,
id
);
return
store
.
readPage
(
this
,
pos
);
}
}
/**
/**
* Remove a
nod
e.
* Remove a
pag
e.
*
*
* @param
id the node id
* @param
pos the position of the page
*/
*/
void
removePage
(
long
id
)
{
void
removePage
(
long
pos
)
{
store
.
removePage
(
id
);
store
.
removePage
(
pos
);
}
}
/**
/**
...
@@ -190,7 +198,7 @@ public class BtreeMap<K, V> {
...
@@ -190,7 +198,7 @@ public class BtreeMap<K, V> {
*
*
* @param rootPos the position
* @param rootPos the position
*/
*/
void
setRoot
(
long
rootPos
)
{
void
setRoot
Pos
(
long
rootPos
)
{
root
=
readPage
(
rootPos
);
root
=
readPage
(
rootPos
);
}
}
...
@@ -205,9 +213,9 @@ public class BtreeMap<K, V> {
...
@@ -205,9 +213,9 @@ public class BtreeMap<K, V> {
}
}
/**
/**
* Get the root
nod
e.
* Get the root
pag
e.
*
*
* @return the root
nod
e
* @return the root
pag
e
*/
*/
Page
getRoot
()
{
Page
getRoot
()
{
return
root
;
return
root
;
...
@@ -226,7 +234,7 @@ public class BtreeMap<K, V> {
...
@@ -226,7 +234,7 @@ public class BtreeMap<K, V> {
return
store
.
getMaxPageSize
();
return
store
.
getMaxPageSize
();
}
}
long
getId
()
{
int
getId
()
{
return
id
;
return
id
;
}
}
...
...
h2/src/tools/org/h2/dev/store/btree/BtreeMapStore.java
浏览文件 @
cefce952
...
@@ -40,21 +40,31 @@ chunk:
...
@@ -40,21 +40,31 @@ chunk:
1 byte: 'c'
1 byte: 'c'
4 bytes: length
4 bytes: length
4 bytes: chunk id (an incrementing number)
4 bytes: chunk id (an incrementing number)
4 bytes: metaRootPos (relative to the chunk start)
8 bytes: metaRootPos
data ...
data ...
todo:
Limits: there are at most 67 million chunks (each chunk is at most 2 GB large).
- use page checksums
TODO:
- use partial page checksums
- compress chunks
- compress chunks
- possibly encode the length in pos (1=32, 2=128, 3=512,...)
- rollback feature
- rollback feature
- support range deletes
- keep page type (leaf/node) in pos to speed up large deletes
- floating header (avoid duplicate header)
- floating header (avoid duplicate header)
for each chunk, store chunk (a counter)
for each chunk, store chunk (a counter)
for each page, store chunk id and offset to root
for each page, store chunk id and offset to root
for each chunk, store position of expected next chunks
for each chunk, store position of expected next chunks
- support reading metadata to copy all data,
- support quota (per map, per storage)
- support r-tree, kd-tree
- map ids should be per chunk, to ensure uniqueness
*/
*/
/**
/**
...
@@ -73,7 +83,7 @@ public class BtreeMapStore {
...
@@ -73,7 +83,7 @@ public class BtreeMapStore {
private
FileChannel
file
;
private
FileChannel
file
;
private
int
blockSize
=
4
*
1024
;
private
int
blockSize
=
4
*
1024
;
private
long
rootChunk
Pos
;
private
long
rootChunk
Start
;
private
int
tempPageId
;
private
int
tempPageId
;
private
Map
<
Long
,
Page
>
cache
=
CacheLIRS
.
newInstance
(
readCacheSize
,
2048
);
private
Map
<
Long
,
Page
>
cache
=
CacheLIRS
.
newInstance
(
readCacheSize
,
2048
);
...
@@ -84,16 +94,14 @@ public class BtreeMapStore {
...
@@ -84,16 +94,14 @@ public class BtreeMapStore {
// TODO use bit set, and integer instead of long
// TODO use bit set, and integer instead of long
private
BtreeMap
<
String
,
String
>
meta
;
private
BtreeMap
<
String
,
String
>
meta
;
private
long
lastMapId
;
private
HashMap
<
String
,
BtreeMap
<?,
?>>
maps
=
New
.
hashMap
();
private
HashMap
<
String
,
BtreeMap
<?,
?>>
maps
=
New
.
hashMap
();
private
HashMap
<
String
,
BtreeMap
<?,
?>>
mapsChanged
=
New
.
hashMap
();
private
HashMap
<
String
,
BtreeMap
<?,
?>>
mapsChanged
=
New
.
hashMap
();
private
int
mapIdMin
;
private
BitSet
mapIds
=
new
BitSet
();
// TODO use an int instead? (with rollover to 0)
// TODO use an int instead? (with rollover to 0)
private
long
transaction
;
private
long
transaction
;
// TODO support reading metadata to support quota (per map, per storage)
// TODO support r-tree
private
BtreeMapStore
(
String
fileName
,
DataTypeFactory
typeFactory
)
{
private
BtreeMapStore
(
String
fileName
,
DataTypeFactory
typeFactory
)
{
this
.
fileName
=
fileName
;
this
.
fileName
=
fileName
;
this
.
typeFactory
=
typeFactory
;
this
.
typeFactory
=
typeFactory
;
...
@@ -137,17 +145,17 @@ public class BtreeMapStore {
...
@@ -137,17 +145,17 @@ public class BtreeMapStore {
BtreeMap
<
K
,
V
>
m
=
(
BtreeMap
<
K
,
V
>)
maps
.
get
(
name
);
BtreeMap
<
K
,
V
>
m
=
(
BtreeMap
<
K
,
V
>)
maps
.
get
(
name
);
if
(
m
==
null
)
{
if
(
m
==
null
)
{
String
identifier
=
meta
.
get
(
"map."
+
name
);
String
identifier
=
meta
.
get
(
"map."
+
name
);
long
id
;
int
id
;
String
root
;
String
root
;
if
(
identifier
==
null
)
{
if
(
identifier
==
null
)
{
id
=
++
lastMapId
;
id
=
nextMapId
()
;
String
types
=
id
+
"/"
+
keyType
.
asString
()
+
"/"
+
valueType
.
asString
();
String
types
=
id
+
"/"
+
keyType
.
asString
()
+
"/"
+
valueType
.
asString
();
meta
.
put
(
"map."
+
name
,
types
);
meta
.
put
(
"map."
+
name
,
types
);
root
=
null
;
root
=
null
;
}
else
{
}
else
{
String
types
=
meta
.
get
(
"map."
+
name
);
String
types
=
meta
.
get
(
"map."
+
name
);
String
[]
idTypeList
=
StringUtils
.
arraySplit
(
types
,
'/'
,
false
);
String
[]
idTypeList
=
StringUtils
.
arraySplit
(
types
,
'/'
,
false
);
id
=
Long
.
parseLong
(
idTypeList
[
0
]);
id
=
Integer
.
parseInt
(
idTypeList
[
0
]);
keyType
=
getDataType
(
idTypeList
[
1
]);
keyType
=
getDataType
(
idTypeList
[
1
]);
valueType
=
getDataType
(
idTypeList
[
2
]);
valueType
=
getDataType
(
idTypeList
[
2
]);
root
=
meta
.
get
(
"root."
+
id
);
root
=
meta
.
get
(
"root."
+
id
);
...
@@ -155,12 +163,26 @@ public class BtreeMapStore {
...
@@ -155,12 +163,26 @@ public class BtreeMapStore {
m
=
BtreeMap
.
open
(
this
,
id
,
name
,
keyType
,
valueType
);
m
=
BtreeMap
.
open
(
this
,
id
,
name
,
keyType
,
valueType
);
maps
.
put
(
name
,
m
);
maps
.
put
(
name
,
m
);
if
(
root
!=
null
&&
!
"0"
.
equals
(
root
))
{
if
(
root
!=
null
&&
!
"0"
.
equals
(
root
))
{
m
.
setRoot
(
Long
.
parseLong
(
root
));
m
.
setRoot
Pos
(
Long
.
parseLong
(
root
));
}
}
}
}
return
m
;
return
m
;
}
}
private
int
nextMapId
()
{
int
result
;
while
(
true
)
{
result
=
mapIds
.
nextClearBit
(
mapIdMin
);
mapIds
.
set
(
result
);
// TODO need to check in oldest
if
(
meta
.
get
(
"root."
+
result
)
==
null
)
{
break
;
}
}
mapIdMin
=
result
;
return
result
;
}
/**
/**
* Open a map.
* Open a map.
*
*
...
@@ -177,6 +199,11 @@ public class BtreeMapStore {
...
@@ -177,6 +199,11 @@ public class BtreeMapStore {
return
openMap
(
name
,
keyType
,
valueType
);
return
openMap
(
name
,
keyType
,
valueType
);
}
}
void
removeMap
(
int
id
)
{
mapIds
.
clear
(
id
);
mapIdMin
=
Math
.
min
(
id
,
mapIdMin
);
}
private
DataType
getDataType
(
Class
<?>
clazz
)
{
private
DataType
getDataType
(
Class
<?>
clazz
)
{
if
(
clazz
==
String
.
class
)
{
if
(
clazz
==
String
.
class
)
{
return
STRING_TYPE
;
return
STRING_TYPE
;
...
@@ -228,10 +255,10 @@ public class BtreeMapStore {
...
@@ -228,10 +255,10 @@ public class BtreeMapStore {
}
}
private
void
readMeta
()
{
private
void
readMeta
()
{
Chunk
header
=
readChunkHeader
(
rootChunk
Pos
);
Chunk
header
=
readChunkHeader
(
rootChunk
Start
);
lastChunkId
=
header
.
id
;
lastChunkId
=
header
.
id
;
chunks
.
put
(
header
.
id
,
header
);
chunks
.
put
(
header
.
id
,
header
);
meta
.
setRoot
(
getId
(
header
.
id
,
header
.
metaRootOffset
)
);
meta
.
setRoot
Pos
(
header
.
metaRootPos
);
Iterator
<
String
>
it
=
meta
.
keyIterator
(
"chunk."
);
Iterator
<
String
>
it
=
meta
.
keyIterator
(
"chunk."
);
while
(
it
.
hasNext
())
{
while
(
it
.
hasNext
())
{
String
s
=
it
.
next
();
String
s
=
it
.
next
();
...
@@ -242,7 +269,7 @@ public class BtreeMapStore {
...
@@ -242,7 +269,7 @@ public class BtreeMapStore {
if
(
c
.
id
==
header
.
id
)
{
if
(
c
.
id
==
header
.
id
)
{
c
.
start
=
header
.
start
;
c
.
start
=
header
.
start
;
c
.
length
=
header
.
length
;
c
.
length
=
header
.
length
;
c
.
metaRoot
Offset
=
header
.
metaRootOffset
;
c
.
metaRoot
Pos
=
header
.
metaRootPos
;
}
}
lastChunkId
=
Math
.
max
(
c
.
id
,
lastChunkId
);
lastChunkId
=
Math
.
max
(
c
.
id
,
lastChunkId
);
chunks
.
put
(
c
.
id
,
c
);
chunks
.
put
(
c
.
id
,
c
);
...
@@ -256,8 +283,8 @@ public class BtreeMapStore {
...
@@ -256,8 +283,8 @@ public class BtreeMapStore {
"versionRead:1\n"
+
"versionRead:1\n"
+
"versionWrite:1\n"
+
"versionWrite:1\n"
+
"blockSize:"
+
blockSize
+
"\n"
+
"blockSize:"
+
blockSize
+
"\n"
+
"rootChunk:"
+
rootChunk
Pos
+
"\n"
+
"rootChunk:"
+
rootChunk
Start
+
"\n"
+
"lastMapId:"
+
lastMapId
+
"\n"
+
"lastMapId:"
+
mapIdMin
+
"\n"
+
"transaction:"
+
transaction
+
"\n"
).
getBytes
(
"UTF-8"
));
"transaction:"
+
transaction
+
"\n"
).
getBytes
(
"UTF-8"
));
file
.
position
(
0
);
file
.
position
(
0
);
file
.
write
(
header
);
file
.
write
(
header
);
...
@@ -276,9 +303,9 @@ public class BtreeMapStore {
...
@@ -276,9 +303,9 @@ public class BtreeMapStore {
file
.
read
(
ByteBuffer
.
wrap
(
header
));
file
.
read
(
ByteBuffer
.
wrap
(
header
));
Properties
prop
=
new
Properties
();
Properties
prop
=
new
Properties
();
prop
.
load
(
new
StringReader
(
new
String
(
header
,
"UTF-8"
)));
prop
.
load
(
new
StringReader
(
new
String
(
header
,
"UTF-8"
)));
rootChunk
Pos
=
Long
.
parseLong
(
prop
.
get
(
"rootChunk"
).
toString
());
rootChunk
Start
=
Long
.
parseLong
(
prop
.
get
(
"rootChunk"
).
toString
());
transaction
=
Long
.
parseLong
(
prop
.
get
(
"transaction"
).
toString
());
transaction
=
Long
.
parseLong
(
prop
.
get
(
"transaction"
).
toString
());
lastMapId
=
Long
.
parseLong
(
prop
.
get
(
"lastMapId"
).
toString
());
mapIdMin
=
Integer
.
parseInt
(
prop
.
get
(
"lastMapId"
).
toString
());
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
throw
convert
(
e
);
throw
convert
(
e
);
}
}
...
@@ -303,18 +330,18 @@ public class BtreeMapStore {
...
@@ -303,18 +330,18 @@ public class BtreeMapStore {
}
}
}
}
private
long
getPosition
(
long
posId
)
{
private
Chunk
getChunk
(
long
pos
)
{
Chunk
c
=
getChunk
(
posId
);
return
chunks
.
get
(
Page
.
getChunkId
(
pos
));
if
(
c
==
null
)
{
throw
new
RuntimeException
(
"Chunk "
+
getChunkId
(
posId
)
+
" not found"
);
}
long
pos
=
c
.
start
;
pos
+=
(
int
)
(
posId
&
Integer
.
MAX_VALUE
);
return
pos
;
}
}
private
static
long
getId
(
int
chunkId
,
int
offset
)
{
private
long
getFilePosition
(
long
pos
)
{
return
((
long
)
chunkId
<<
32
)
|
offset
;
Chunk
c
=
getChunk
(
pos
);
if
(
c
==
null
)
{
throw
new
RuntimeException
(
"Chunk "
+
Page
.
getChunkId
(
pos
)
+
" not found"
);
}
long
filePos
=
c
.
start
;
filePos
+=
Page
.
getOffset
(
pos
);
return
filePos
;
}
}
/**
/**
...
@@ -351,7 +378,7 @@ public class BtreeMapStore {
...
@@ -351,7 +378,7 @@ public class BtreeMapStore {
chunks
.
remove
(
x
);
chunks
.
remove
(
x
);
}
}
int
count
=
0
;
int
count
=
0
;
int
maxLength
=
1
+
4
+
4
+
4
;
int
maxLength
=
1
+
4
+
4
+
8
;
for
(
BtreeMap
<?,
?>
m
:
mapsChanged
.
values
())
{
for
(
BtreeMap
<?,
?>
m
:
mapsChanged
.
values
())
{
Page
p
=
m
.
getRoot
();
Page
p
=
m
.
getRoot
();
if
(
p
!=
null
)
{
if
(
p
!=
null
)
{
...
@@ -370,12 +397,11 @@ public class BtreeMapStore {
...
@@ -370,12 +397,11 @@ public class BtreeMapStore {
buff
.
put
((
byte
)
'c'
);
buff
.
put
((
byte
)
'c'
);
buff
.
putInt
(
0
);
buff
.
putInt
(
0
);
buff
.
putInt
(
0
);
buff
.
putInt
(
0
);
buff
.
putInt
(
0
);
buff
.
putLong
(
0
);
long
idOffset
=
getId
(
chunkId
,
0
);
for
(
BtreeMap
<?,
?>
m
:
mapsChanged
.
values
())
{
for
(
BtreeMap
<?,
?>
m
:
mapsChanged
.
values
())
{
Page
p
=
m
.
getRoot
();
Page
p
=
m
.
getRoot
();
if
(
p
!=
null
)
{
if
(
p
!=
null
)
{
long
root
=
p
.
writeTempRecursive
(
buff
,
idOffset
);
long
root
=
p
.
writeTempRecursive
(
buff
,
chunkId
);
meta
.
put
(
"root."
+
m
.
getId
(),
""
+
root
);
meta
.
put
(
"root."
+
m
.
getId
(),
""
+
root
);
}
}
}
}
...
@@ -385,33 +411,32 @@ public class BtreeMapStore {
...
@@ -385,33 +411,32 @@ public class BtreeMapStore {
c
.
liveCount
=
count
;
c
.
liveCount
=
count
;
meta
.
put
(
"chunk."
+
c
.
id
,
c
.
toString
());
meta
.
put
(
"chunk."
+
c
.
id
,
c
.
toString
());
meta
.
getRoot
().
writeTempRecursive
(
buff
,
idOffset
);
meta
.
getRoot
().
writeTempRecursive
(
buff
,
chunkId
);
buff
.
flip
();
buff
.
flip
();
int
length
=
buff
.
limit
();
int
length
=
buff
.
limit
();
long
storePos
=
allocateChunk
(
length
);
long
filePos
=
allocateChunk
(
length
);
int
rootOffset
=
(
int
)
(
meta
.
getRoot
().
getId
()
-
idOffset
);
buff
.
rewind
();
buff
.
rewind
();
buff
.
put
((
byte
)
'c'
);
buff
.
put
((
byte
)
'c'
);
buff
.
putInt
(
length
);
buff
.
putInt
(
length
);
buff
.
putInt
(
chunkId
);
buff
.
putInt
(
chunkId
);
buff
.
put
Int
(
rootOffset
);
buff
.
put
Long
(
meta
.
getRoot
().
getPos
()
);
buff
.
rewind
();
buff
.
rewind
();
try
{
try
{
file
.
position
(
stor
ePos
);
file
.
position
(
fil
ePos
);
file
.
write
(
buff
);
file
.
write
(
buff
);
}
catch
(
IOException
e
)
{
}
catch
(
IOException
e
)
{
throw
new
RuntimeException
(
e
);
throw
new
RuntimeException
(
e
);
}
}
rootChunk
Pos
=
stor
ePos
;
rootChunk
Start
=
fil
ePos
;
writeHeader
();
writeHeader
();
mapsChanged
.
clear
();
mapsChanged
.
clear
();
temp
.
clear
();
temp
.
clear
();
tempPageId
=
0
;
tempPageId
=
0
;
// update the start position and length
// update the start position and length
c
.
start
=
stor
ePos
;
c
.
start
=
fil
ePos
;
c
.
length
=
length
;
c
.
length
=
length
;
meta
.
put
(
"chunk."
+
c
.
id
,
c
.
toString
());
meta
.
put
(
"chunk."
+
c
.
id
,
c
.
toString
());
...
@@ -478,22 +503,22 @@ public class BtreeMapStore {
...
@@ -478,22 +503,22 @@ public class BtreeMapStore {
return
++
transaction
;
return
++
transaction
;
}
}
private
Chunk
readChunkHeader
(
long
pos
)
{
private
Chunk
readChunkHeader
(
long
start
)
{
try
{
try
{
file
.
position
(
pos
);
file
.
position
(
start
);
ByteBuffer
buff
=
ByteBuffer
.
wrap
(
new
byte
[
16
]);
ByteBuffer
buff
=
ByteBuffer
.
wrap
(
new
byte
[
32
]);
file
.
read
(
buff
);
DataUtils
.
readFully
(
file
,
buff
);
buff
.
rewind
();
buff
.
rewind
();
if
(
buff
.
get
()
!=
'c'
)
{
if
(
buff
.
get
()
!=
'c'
)
{
throw
new
RuntimeException
(
"File corrupt"
);
throw
new
RuntimeException
(
"File corrupt"
);
}
}
int
length
=
buff
.
getInt
();
int
length
=
buff
.
getInt
();
int
chunkId
=
buff
.
getInt
();
int
chunkId
=
buff
.
getInt
();
int
offset
=
buff
.
getInt
();
long
metaRootPos
=
buff
.
getLong
();
Chunk
c
=
new
Chunk
(
chunkId
);
Chunk
c
=
new
Chunk
(
chunkId
);
c
.
start
=
pos
;
c
.
start
=
start
;
c
.
length
=
length
;
c
.
length
=
length
;
c
.
metaRoot
Offset
=
offset
;
c
.
metaRoot
Pos
=
metaRootPos
;
return
c
;
return
c
;
}
catch
(
IOException
e
)
{
}
catch
(
IOException
e
)
{
throw
new
RuntimeException
(
e
);
throw
new
RuntimeException
(
e
);
...
@@ -552,9 +577,9 @@ public class BtreeMapStore {
...
@@ -552,9 +577,9 @@ public class BtreeMapStore {
}
}
}
}
Chunk
header
=
readChunkHeader
(
move
.
start
);
Chunk
header
=
readChunkHeader
(
move
.
start
);
log
(
" meta:"
+
move
.
id
+
"/"
+
header
.
metaRoot
Offset
+
" start: "
+
move
.
start
);
log
(
" meta:"
+
move
.
id
+
"/"
+
header
.
metaRoot
Pos
+
" start: "
+
move
.
start
);
BtreeMap
<
String
,
String
>
oldMeta
=
BtreeMap
.
open
(
this
,
0
,
"old-meta"
,
STRING_TYPE
,
STRING_TYPE
);
BtreeMap
<
String
,
String
>
oldMeta
=
BtreeMap
.
open
(
this
,
0
,
"old-meta"
,
STRING_TYPE
,
STRING_TYPE
);
oldMeta
.
setRoot
(
getId
(
header
.
id
,
header
.
metaRootOffset
)
);
oldMeta
.
setRoot
Pos
(
header
.
metaRootPos
);
Iterator
<
String
>
it
=
oldMeta
.
keyIterator
(
null
);
Iterator
<
String
>
it
=
oldMeta
.
keyIterator
(
null
);
ArrayList
<
Integer
>
oldChunks
=
New
.
arrayList
();
ArrayList
<
Integer
>
oldChunks
=
New
.
arrayList
();
while
(
it
.
hasNext
())
{
while
(
it
.
hasNext
())
{
...
@@ -577,7 +602,7 @@ public class BtreeMapStore {
...
@@ -577,7 +602,7 @@ public class BtreeMapStore {
continue
;
continue
;
}
}
String
[]
idTypesList
=
StringUtils
.
arraySplit
(
s
,
'/'
,
false
);
String
[]
idTypesList
=
StringUtils
.
arraySplit
(
s
,
'/'
,
false
);
long
id
=
Long
.
parseLong
(
idTypesList
[
0
]);
int
id
=
Integer
.
parseInt
(
idTypesList
[
0
]);
DataType
kt
=
getDataType
(
idTypesList
[
1
]);
DataType
kt
=
getDataType
(
idTypesList
[
1
]);
DataType
vt
=
getDataType
(
idTypesList
[
2
]);
DataType
vt
=
getDataType
(
idTypesList
[
2
]);
long
oldDataRoot
=
Long
.
parseLong
(
oldMeta
.
get
(
"root."
+
id
));
long
oldDataRoot
=
Long
.
parseLong
(
oldMeta
.
get
(
"root."
+
id
));
...
@@ -585,7 +610,7 @@ public class BtreeMapStore {
...
@@ -585,7 +610,7 @@ public class BtreeMapStore {
if
(
oldDataRoot
==
0
)
{
if
(
oldDataRoot
==
0
)
{
// no rows
// no rows
}
else
{
}
else
{
oldData
.
setRoot
(
oldDataRoot
);
oldData
.
setRoot
Pos
(
oldDataRoot
);
@SuppressWarnings
(
"unchecked"
)
@SuppressWarnings
(
"unchecked"
)
BtreeMap
<
Object
,
Object
>
data
=
(
BtreeMap
<
Object
,
Object
>)
maps
.
get
(
k
);
BtreeMap
<
Object
,
Object
>
data
=
(
BtreeMap
<
Object
,
Object
>)
maps
.
get
(
k
);
Iterator
<?>
dataIt
=
oldData
.
keyIterator
(
null
);
Iterator
<?>
dataIt
=
oldData
.
keyIterator
(
null
);
...
@@ -594,11 +619,11 @@ public class BtreeMapStore {
...
@@ -594,11 +619,11 @@ public class BtreeMapStore {
Page
p
=
data
.
getPage
(
o
);
Page
p
=
data
.
getPage
(
o
);
if
(
p
==
null
)
{
if
(
p
==
null
)
{
// was removed later - ignore
// was removed later - ignore
}
else
if
(
p
.
get
Id
()
<
0
)
{
}
else
if
(
p
.
get
Pos
()
<
0
)
{
// temporarily changed - ok
// temporarily changed - ok
// TODO move old data if
changed temporarily
?
// TODO move old data if
there is an uncommitted change
?
}
else
{
}
else
{
Chunk
c
=
getChunk
(
p
.
get
Id
());
Chunk
c
=
getChunk
(
p
.
get
Pos
());
if
(
old
.
contains
(
c
))
{
if
(
old
.
contains
(
c
))
{
log
(
" move key:"
+
o
+
" chunk:"
+
c
.
id
);
log
(
" move key:"
+
o
+
" chunk:"
+
c
.
id
);
Object
value
=
data
.
get
(
o
);
Object
value
=
data
.
get
(
o
);
...
@@ -618,32 +643,18 @@ public class BtreeMapStore {
...
@@ -618,32 +643,18 @@ public class BtreeMapStore {
* Read a page.
* Read a page.
*
*
* @param map the map
* @param map the map
* @param
id the page id
* @param
pos the page position
* @return the page
* @return the page
*/
*/
Page
readPage
(
BtreeMap
<?,
?>
map
,
long
id
)
{
Page
readPage
(
BtreeMap
<?,
?>
map
,
long
pos
)
{
if
(
id
<
0
)
{
if
(
pos
<
0
)
{
return
temp
.
get
(
id
);
return
temp
.
get
(
pos
);
}
}
Page
p
=
cache
.
get
(
id
);
Page
p
=
cache
.
get
(
pos
);
if
(
p
==
null
)
{
if
(
p
==
null
)
{
try
{
long
filePos
=
getFilePosition
(
pos
);
long
pos
=
getPosition
(
id
);
p
=
Page
.
read
(
file
,
map
,
filePos
,
pos
);
file
.
position
(
pos
);
cache
.
put
(
pos
,
p
);
ByteBuffer
buff
=
ByteBuffer
.
wrap
(
new
byte
[
8
*
1024
]);
// TODO read fully; read only required bytes
do
{
int
len
=
file
.
read
(
buff
);
if
(
len
<
0
)
{
break
;
}
}
while
(
buff
.
remaining
()
>
0
);
buff
.
rewind
();
p
=
Page
.
read
(
map
,
id
,
buff
);
}
catch
(
Exception
e
)
{
throw
new
RuntimeException
(
e
);
}
cache
.
put
(
id
,
p
);
}
}
return
p
;
return
p
;
}
}
...
@@ -651,31 +662,23 @@ public class BtreeMapStore {
...
@@ -651,31 +662,23 @@ public class BtreeMapStore {
/**
/**
* Remove a page.
* Remove a page.
*
*
* @param
id the page id
* @param
pos the position of the page
*/
*/
void
removePage
(
long
id
)
{
void
removePage
(
long
pos
)
{
if
(
id
>
0
)
{
if
(
pos
>
0
)
{
cache
.
remove
(
id
);
cache
.
remove
(
pos
);
if
(
getChunk
(
id
).
liveCount
==
0
)
{
if
(
getChunk
(
pos
).
liveCount
==
0
)
{
throw
new
RuntimeException
(
"Negative live count: "
+
id
);
throw
new
RuntimeException
(
"Negative live count: "
+
pos
);
}
}
getChunk
(
id
).
liveCount
--;
getChunk
(
pos
).
liveCount
--;
}
else
{
}
else
{
temp
.
remove
(
id
);
temp
.
remove
(
pos
);
if
(
temp
.
size
()
==
0
)
{
if
(
temp
.
size
()
==
0
)
{
tempPageId
=
0
;
tempPageId
=
0
;
}
}
}
}
}
}
private
static
int
getChunkId
(
long
pos
)
{
return
(
int
)
(
pos
>>>
32
);
}
private
Chunk
getChunk
(
long
pos
)
{
return
chunks
.
get
(
getChunkId
(
pos
));
}
/**
/**
* Log the string, if logging is enabled.
* Log the string, if logging is enabled.
*
*
...
...
h2/src/tools/org/h2/dev/store/btree/Chunk.java
浏览文件 @
cefce952
...
@@ -46,9 +46,9 @@ class Chunk {
...
@@ -46,9 +46,9 @@ class Chunk {
int
collectPriority
;
int
collectPriority
;
/**
/**
* The
offset
of the meta root.
* The
position
of the meta root.
*/
*/
int
metaRootOffset
;
long
metaRootPos
;
Chunk
(
int
id
)
{
Chunk
(
int
id
)
{
this
.
id
=
id
;
this
.
id
=
id
;
...
@@ -70,7 +70,7 @@ class Chunk {
...
@@ -70,7 +70,7 @@ class Chunk {
c
.
length
=
Long
.
parseLong
(
prop
.
get
(
"length"
).
toString
());
c
.
length
=
Long
.
parseLong
(
prop
.
get
(
"length"
).
toString
());
c
.
entryCount
=
Integer
.
parseInt
(
prop
.
get
(
"entryCount"
).
toString
());
c
.
entryCount
=
Integer
.
parseInt
(
prop
.
get
(
"entryCount"
).
toString
());
c
.
liveCount
=
Integer
.
parseInt
(
prop
.
get
(
"liveCount"
).
toString
());
c
.
liveCount
=
Integer
.
parseInt
(
prop
.
get
(
"liveCount"
).
toString
());
c
.
metaRoot
Offset
=
Integer
.
parseInt
(
prop
.
get
(
"metaRoot"
).
toString
());
c
.
metaRoot
Pos
=
Long
.
parseLong
(
prop
.
get
(
"metaRoot"
).
toString
());
return
c
;
return
c
;
}
catch
(
IOException
e
)
{
}
catch
(
IOException
e
)
{
throw
new
RuntimeException
(
e
);
throw
new
RuntimeException
(
e
);
...
@@ -96,7 +96,7 @@ class Chunk {
...
@@ -96,7 +96,7 @@ class Chunk {
"length:"
+
length
+
"\n"
+
"length:"
+
length
+
"\n"
+
"entryCount:"
+
entryCount
+
"\n"
+
"entryCount:"
+
entryCount
+
"\n"
+
"liveCount:"
+
liveCount
+
"\n"
+
"liveCount:"
+
liveCount
+
"\n"
+
"metaRoot:"
+
metaRoot
Offset
+
"\n"
;
"metaRoot:"
+
metaRoot
Pos
+
"\n"
;
}
}
}
}
...
...
h2/src/tools/org/h2/dev/store/btree/DataUtils.java
浏览文件 @
cefce952
...
@@ -6,7 +6,9 @@
...
@@ -6,7 +6,9 @@
*/
*/
package
org
.
h2
.
dev
.
store
.
btree
;
package
org
.
h2
.
dev
.
store
.
btree
;
import
java.io.IOException
;
import
java.nio.ByteBuffer
;
import
java.nio.ByteBuffer
;
import
java.nio.channels.FileChannel
;
/**
/**
* Utility methods
* Utility methods
...
@@ -172,4 +174,14 @@ public class DataUtils {
...
@@ -172,4 +174,14 @@ public class DataUtils {
}
}
}
}
static
void
readFully
(
FileChannel
file
,
ByteBuffer
buff
)
throws
IOException
{
do
{
int
len
=
file
.
read
(
buff
);
if
(
len
<
0
)
{
break
;
}
}
while
(
buff
.
remaining
()
>
0
);
buff
.
rewind
();
}
}
}
h2/src/tools/org/h2/dev/store/btree/Page.java
浏览文件 @
cefce952
...
@@ -6,7 +6,9 @@
...
@@ -6,7 +6,9 @@
*/
*/
package
org
.
h2
.
dev
.
store
.
btree
;
package
org
.
h2
.
dev
.
store
.
btree
;
import
java.io.IOException
;
import
java.nio.ByteBuffer
;
import
java.nio.ByteBuffer
;
import
java.nio.channels.FileChannel
;
import
java.util.ArrayList
;
import
java.util.ArrayList
;
/**
/**
...
@@ -18,7 +20,7 @@ import java.util.ArrayList;
...
@@ -18,7 +20,7 @@ import java.util.ArrayList;
public
class
Page
{
public
class
Page
{
private
final
BtreeMap
<?,
?>
map
;
private
final
BtreeMap
<?,
?>
map
;
private
long
id
;
private
long
pos
;
private
long
transaction
;
private
long
transaction
;
private
Object
[]
keys
;
private
Object
[]
keys
;
private
Object
[]
values
;
private
Object
[]
values
;
...
@@ -44,7 +46,7 @@ public class Page {
...
@@ -44,7 +46,7 @@ public class Page {
p
.
values
=
values
;
p
.
values
=
values
;
p
.
children
=
children
;
p
.
children
=
children
;
p
.
transaction
=
map
.
getTransaction
();
p
.
transaction
=
map
.
getTransaction
();
p
.
id
=
map
.
registerTempPage
(
p
);
p
.
pos
=
map
.
registerTempPage
(
p
);
return
p
;
return
p
;
}
}
...
@@ -52,25 +54,38 @@ public class Page {
...
@@ -52,25 +54,38 @@ public class Page {
* Read a page.
* Read a page.
*
*
* @param map the map
* @param map the map
* @param
id the page id
* @param
pos the page position
* @param buff the source buffer
* @param buff the source buffer
* @return the page
* @return the page
*/
*/
static
Page
read
(
BtreeMap
<?,
?>
map
,
long
id
,
ByteBuffer
buff
)
{
static
Page
read
(
FileChannel
file
,
BtreeMap
<?,
?>
map
,
long
filePos
,
long
pos
)
{
int
maxLength
=
Page
.
getMaxLength
(
pos
),
length
=
maxLength
;
ByteBuffer
buff
;
try
{
file
.
position
(
filePos
);
if
(
maxLength
==
Integer
.
MAX_VALUE
)
{
buff
=
ByteBuffer
.
wrap
(
new
byte
[
128
]);
DataUtils
.
readFully
(
file
,
buff
);
maxLength
=
buff
.
getInt
();
file
.
position
(
filePos
);
}
buff
=
ByteBuffer
.
wrap
(
new
byte
[
length
]);
DataUtils
.
readFully
(
file
,
buff
);
}
catch
(
IOException
e
)
{
throw
new
RuntimeException
(
e
);
}
Page
p
=
new
Page
(
map
);
Page
p
=
new
Page
(
map
);
p
.
id
=
id
;
p
.
pos
=
pos
;
p
.
read
(
buff
);
p
.
read
(
buff
,
maxLength
);
return
p
;
return
p
;
}
}
private
Page
copyOnWrite
()
{
private
Page
copyOnWrite
()
{
// TODO avoid creating objects (arrays) that are then not used
// possibly add shortcut for copy with add / copy with remove
long
t
=
map
.
getTransaction
();
long
t
=
map
.
getTransaction
();
if
(
transaction
==
t
)
{
if
(
transaction
==
t
)
{
return
this
;
return
this
;
}
}
map
.
removePage
(
id
);
map
.
removePage
(
pos
);
Page
newPage
=
create
(
map
,
keys
,
values
,
children
);
Page
newPage
=
create
(
map
,
keys
,
values
,
children
);
newPage
.
transaction
=
t
;
newPage
.
transaction
=
t
;
newPage
.
cachedCompare
=
cachedCompare
;
newPage
.
cachedCompare
=
cachedCompare
;
...
@@ -79,7 +94,7 @@ public class Page {
...
@@ -79,7 +94,7 @@ public class Page {
public
String
toString
()
{
public
String
toString
()
{
StringBuilder
buff
=
new
StringBuilder
();
StringBuilder
buff
=
new
StringBuilder
();
buff
.
append
(
"
nodeId: "
).
append
(
id
).
append
(
"\n"
);
buff
.
append
(
"
pos: "
).
append
(
pos
).
append
(
"\n"
);
for
(
int
i
=
0
;
i
<=
keys
.
length
;
i
++)
{
for
(
int
i
=
0
;
i
<=
keys
.
length
;
i
++)
{
if
(
i
>
0
)
{
if
(
i
>
0
)
{
buff
.
append
(
" "
);
buff
.
append
(
" "
);
...
@@ -99,12 +114,12 @@ public class Page {
...
@@ -99,12 +114,12 @@ public class Page {
}
}
/**
/**
* Get the p
age id.
* Get the p
osition of the page
*
*
* @return the p
age id
* @return the p
osition
*/
*/
long
get
Id
()
{
long
get
Pos
()
{
return
id
;
return
pos
;
}
}
/**
/**
...
@@ -236,7 +251,7 @@ public class Page {
...
@@ -236,7 +251,7 @@ public class Page {
return
null
;
return
null
;
}
}
while
(
true
)
{
while
(
true
)
{
// TODO avoid remove/add pairs if possible
// TODO
performance:
avoid remove/add pairs if possible
CursorPos
p
=
parents
.
remove
(
parents
.
size
()
-
1
);
CursorPos
p
=
parents
.
remove
(
parents
.
size
()
-
1
);
int
index
=
p
.
index
++;
int
index
=
p
.
index
++;
if
(
index
<
p
.
page
.
keys
.
length
)
{
if
(
index
<
p
.
page
.
keys
.
length
)
{
...
@@ -323,7 +338,7 @@ public class Page {
...
@@ -323,7 +338,7 @@ public class Page {
int
parentIndex
=
0
;
int
parentIndex
=
0
;
while
(
true
)
{
while
(
true
)
{
if
(
parent
!=
null
)
{
if
(
parent
!=
null
)
{
parent
.
setChild
(
parentIndex
,
p
.
id
);
parent
.
setChild
(
parentIndex
,
p
.
pos
);
}
}
if
(!
p
.
isLeaf
())
{
if
(!
p
.
isLeaf
())
{
if
(
p
.
keyCount
()
>=
map
.
getMaxPageSize
())
{
if
(
p
.
keyCount
()
>=
map
.
getMaxPageSize
())
{
...
@@ -333,11 +348,11 @@ public class Page {
...
@@ -333,11 +348,11 @@ public class Page {
Page
split
=
p
.
splitNode
(
pos
);
Page
split
=
p
.
splitNode
(
pos
);
if
(
parent
==
null
)
{
if
(
parent
==
null
)
{
Object
[]
keys
=
{
k
};
Object
[]
keys
=
{
k
};
long
[]
children
=
{
p
.
get
Id
(),
split
.
getId
()
};
long
[]
children
=
{
p
.
get
Pos
(),
split
.
getPos
()
};
top
=
create
(
map
,
keys
,
null
,
children
);
top
=
create
(
map
,
keys
,
null
,
children
);
p
=
top
;
p
=
top
;
}
else
{
}
else
{
parent
.
insert
(
parentIndex
,
k
,
null
,
split
.
get
Id
());
parent
.
insert
(
parentIndex
,
k
,
null
,
split
.
get
Pos
());
p
=
parent
;
p
=
parent
;
}
}
}
}
...
@@ -356,10 +371,10 @@ public class Page {
...
@@ -356,10 +371,10 @@ public class Page {
Page
split
=
p
.
splitLeaf
(
pos
);
Page
split
=
p
.
splitLeaf
(
pos
);
if
(
parent
==
null
)
{
if
(
parent
==
null
)
{
Object
[]
keys
=
{
k
};
Object
[]
keys
=
{
k
};
long
[]
children
=
{
p
.
get
Id
(),
split
.
getId
()
};
long
[]
children
=
{
p
.
get
Pos
(),
split
.
getPos
()
};
top
=
create
(
map
,
keys
,
null
,
children
);
top
=
create
(
map
,
keys
,
null
,
children
);
}
else
{
}
else
{
parent
.
insert
(
parentIndex
,
k
,
null
,
split
.
get
Id
());
parent
.
insert
(
parentIndex
,
k
,
null
,
split
.
get
Pos
());
}
}
}
}
break
;
break
;
...
@@ -402,22 +417,22 @@ public class Page {
...
@@ -402,22 +417,22 @@ public class Page {
map
.
readPage
(
c
).
removeAllRecursive
();
map
.
readPage
(
c
).
removeAllRecursive
();
}
}
}
}
map
.
removePage
(
id
);
map
.
removePage
(
pos
);
}
}
/**
/**
* Remove a key-value pair.
* Remove a key-value pair.
*
*
* @param p the root
nod
e
* @param p the root
pag
e
* @param key the key
* @param key the key
* @return the new root
nod
e
* @return the new root
pag
e
*/
*/
static
Page
remove
(
Page
p
,
Object
key
)
{
static
Page
remove
(
Page
p
,
Object
key
)
{
int
index
=
p
.
findKey
(
key
);
int
index
=
p
.
findKey
(
key
);
if
(
p
.
isLeaf
())
{
if
(
p
.
isLeaf
())
{
if
(
index
>=
0
)
{
if
(
index
>=
0
)
{
if
(
p
.
keyCount
()
==
1
)
{
if
(
p
.
keyCount
()
==
1
)
{
p
.
map
.
removePage
(
p
.
id
);
p
.
map
.
removePage
(
p
.
pos
);
return
null
;
return
null
;
}
}
p
=
p
.
copyOnWrite
();
p
=
p
.
copyOnWrite
();
...
@@ -442,12 +457,12 @@ public class Page {
...
@@ -442,12 +457,12 @@ public class Page {
p
=
p
.
copyOnWrite
();
p
=
p
.
copyOnWrite
();
p
.
remove
(
index
);
p
.
remove
(
index
);
if
(
p
.
keyCount
()
==
0
)
{
if
(
p
.
keyCount
()
==
0
)
{
p
.
map
.
removePage
(
p
.
id
);
p
.
map
.
removePage
(
p
.
pos
);
p
=
p
.
map
.
readPage
(
p
.
children
[
0
]);
p
=
p
.
map
.
readPage
(
p
.
children
[
0
]);
}
}
}
else
{
}
else
{
p
=
p
.
copyOnWrite
();
p
=
p
.
copyOnWrite
();
p
.
setChild
(
index
,
c2
.
id
);
p
.
setChild
(
index
,
c2
.
pos
);
}
}
return
p
;
return
p
;
}
}
...
@@ -488,15 +503,17 @@ public class Page {
...
@@ -488,15 +503,17 @@ public class Page {
}
}
}
}
private
void
read
(
ByteBuffer
buff
)
{
private
void
read
(
ByteBuffer
buff
,
int
maxLength
)
{
// len
int
len
=
buff
.
getInt
();
buff
.
getInt
();
if
(
len
>
maxLength
)
{
long
id
=
buff
.
getLong
();
throw
new
RuntimeException
(
"Length too large, expected < "
+
maxLength
+
" got "
+
len
);
if
(
id
!=
map
.
getId
())
{
}
throw
new
RuntimeException
(
"Page map id mismatch, expected "
+
map
.
getId
()
+
" got "
+
id
);
int
mapId
=
DataUtils
.
readVarInt
(
buff
);
if
(
mapId
!=
map
.
getId
())
{
throw
new
RuntimeException
(
"Page pos mismatch, expected "
+
map
.
getId
()
+
" got "
+
mapId
);
}
}
boolean
node
=
buff
.
get
()
==
1
;
boolean
node
=
buff
.
get
()
==
1
;
int
len
=
DataUtils
.
readVarInt
(
buff
);
len
=
DataUtils
.
readVarInt
(
buff
);
if
(
node
)
{
if
(
node
)
{
children
=
new
long
[
len
];
children
=
new
long
[
len
];
keys
=
new
Object
[
len
-
1
];
keys
=
new
Object
[
len
-
1
];
...
@@ -517,14 +534,15 @@ public class Page {
...
@@ -517,14 +534,15 @@ public class Page {
}
}
/**
/**
* Store the page.
* Store the page
and update the position
.
*
*
* @param buff the target buffer
* @param buff the target buffer
* @param chunkId the chunk id
*/
*/
private
void
write
(
ByteBuffer
buff
)
{
private
void
write
(
ByteBuffer
buff
,
int
chunkId
)
{
int
pos
=
buff
.
position
();
int
offset
=
buff
.
position
();
buff
.
putInt
(
0
);
buff
.
putInt
(
0
);
buff
.
putLong
(
map
.
getId
());
DataUtils
.
writeVarInt
(
buff
,
map
.
getId
());
if
(
children
!=
null
)
{
if
(
children
!=
null
)
{
buff
.
put
((
byte
)
1
);
buff
.
put
((
byte
)
1
);
int
len
=
children
.
length
;
int
len
=
children
.
length
;
...
@@ -544,8 +562,9 @@ public class Page {
...
@@ -544,8 +562,9 @@ public class Page {
map
.
getValueType
().
write
(
buff
,
values
[
i
]);
map
.
getValueType
().
write
(
buff
,
values
[
i
]);
}
}
}
}
int
len
=
buff
.
position
()
-
pos
;
int
len
=
buff
.
position
()
-
offset
;
buff
.
putInt
(
pos
,
len
);
buff
.
putInt
(
offset
,
len
);
this
.
pos
=
Page
.
getPos
(
chunkId
,
offset
,
len
);
}
}
/**
/**
...
@@ -554,7 +573,7 @@ public class Page {
...
@@ -554,7 +573,7 @@ public class Page {
* @return the next page id
* @return the next page id
*/
*/
int
getMaxLengthTempRecursive
()
{
int
getMaxLengthTempRecursive
()
{
int
maxLength
=
4
+
8
+
1
;
int
maxLength
=
4
+
DataUtils
.
MAX_VAR_INT_LEN
+
1
;
if
(
children
!=
null
)
{
if
(
children
!=
null
)
{
int
len
=
children
.
length
;
int
len
=
children
.
length
;
maxLength
+=
DataUtils
.
MAX_VAR_INT_LEN
;
maxLength
+=
DataUtils
.
MAX_VAR_INT_LEN
;
...
@@ -580,26 +599,25 @@ public class Page {
...
@@ -580,26 +599,25 @@ public class Page {
}
}
/**
/**
* Store this page and all children that are changed,
* Store this page and all children that are changed,
in reverse order, and
*
in reverse order, and update the id and child ids
.
*
update the position and the children
.
*
*
* @param buff the target buffer
* @param buff the target buffer
* @param
id
Offset the offset of the id
* @param
pos
Offset the offset of the id
* @return the page id
* @return the page id
*/
*/
long
writeTempRecursive
(
ByteBuffer
buff
,
long
idOffset
)
{
long
writeTempRecursive
(
ByteBuffer
buff
,
int
chunkId
)
{
if
(
children
!=
null
)
{
if
(
children
!=
null
)
{
int
len
=
children
.
length
;
int
len
=
children
.
length
;
for
(
int
i
=
0
;
i
<
len
;
i
++)
{
for
(
int
i
=
0
;
i
<
len
;
i
++)
{
long
c
=
children
[
i
];
long
c
=
children
[
i
];
if
(
c
<
0
)
{
if
(
c
<
0
)
{
children
[
i
]
=
map
.
readPage
(
c
).
writeTempRecursive
(
buff
,
idOffset
);
children
[
i
]
=
map
.
readPage
(
c
).
writeTempRecursive
(
buff
,
chunkId
);
}
}
}
}
}
}
this
.
id
=
idOffset
+
buff
.
position
();
write
(
buff
,
chunkId
);
write
(
buff
);
return
pos
;
return
id
;
}
}
/**
/**
...
@@ -621,4 +639,73 @@ public class Page {
...
@@ -621,4 +639,73 @@ public class Page {
return
count
;
return
count
;
}
}
/**
* Get the chunk id from the position.
*
* @param pos the position
* @return the chunk id
*/
static
int
getChunkId
(
long
pos
)
{
return
(
int
)
(
pos
>>>
37
);
}
/**
* Get the offset from the position.
*
* @param pos the position
* @return the offset
*/
public
static
long
getOffset
(
long
pos
)
{
return
(
int
)
(
pos
>>
5
);
}
/**
* Get the position of this page. The following information is encoded in
* the position: the chunk id, the offset, and the maximum length.
*
* @param chunkId the chunk id
* @param offset the offset
* @param length the length
* @return the position
*/
static
long
getPos
(
int
chunkId
,
int
offset
,
int
length
)
{
return
((
long
)
chunkId
<<
37
)
|
((
long
)
offset
<<
5
)
|
encodeLength
(
length
);
}
/**
* Convert the length to a length code 0..31. 31 means more than 1 MB.
*
* @param len the length
* @return the length code
*/
public
static
int
encodeLength
(
int
len
)
{
if
(
len
<=
32
)
{
return
0
;
}
int
x
=
len
;
int
shift
=
0
;
while
(
x
>
3
)
{
shift
++;
x
=
(
x
>>
1
)
+
(
x
&
1
);
}
shift
=
Math
.
max
(
0
,
shift
-
4
);
int
code
=
(
shift
<<
1
)
+
(
x
&
1
);
return
Math
.
min
(
31
,
code
);
}
/**
* Get the maximum length for the given code.
* For the code 31, Integer.MAX_VALUE is returned.
*
* @param pos the position
* @return the maximum length
*/
public
static
int
getMaxLength
(
long
pos
)
{
int
code
=
(
int
)
(
pos
&
31
);
if
(
code
==
31
)
{
return
Integer
.
MAX_VALUE
;
}
return
(
2
+
(
code
&
1
))
<<
((
code
>>
1
)
+
4
);
}
}
}
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论