Skip to content
项目
群组
代码片段
帮助
正在加载...
帮助
为 GitLab 提交贡献
登录/注册
切换导航
H
h2database
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
分枝图
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
计划
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
分枝图
统计图
创建新议题
作业
提交
议题看板
打开侧边栏
Administrator
h2database
Commits
16d95eef
提交
16d95eef
authored
2月 04, 2014
作者:
Thomas Mueller
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
MVStore: the file format was changed slightly.
上级
115f51dc
隐藏空白字符变更
内嵌
并排
正在显示
9 个修改的文件
包含
296 行增加
和
245 行删除
+296
-245
Chunk.java
h2/src/main/org/h2/mvstore/Chunk.java
+45
-39
DataUtils.java
h2/src/main/org/h2/mvstore/DataUtils.java
+40
-25
MVMap.java
h2/src/main/org/h2/mvstore/MVMap.java
+7
-7
MVStore.java
h2/src/main/org/h2/mvstore/MVStore.java
+163
-145
MVStoreTool.java
h2/src/main/org/h2/mvstore/MVStoreTool.java
+7
-16
Page.java
h2/src/main/org/h2/mvstore/Page.java
+1
-1
SequenceMap.java
h2/src/test/org/h2/test/store/SequenceMap.java
+1
-1
TestDataUtils.java
h2/src/test/org/h2/test/store/TestDataUtils.java
+1
-1
TestMVStore.java
h2/src/test/org/h2/test/store/TestMVStore.java
+31
-10
没有找到文件。
h2/src/main/org/h2/mvstore/Chunk.java
浏览文件 @
16d95eef
...
@@ -36,7 +36,7 @@ public class Chunk {
...
@@ -36,7 +36,7 @@ public class Chunk {
/**
/**
* The length in number of blocks.
* The length in number of blocks.
*/
*/
public
int
blocks
;
public
int
len
;
/**
/**
* The total number of pages in this chunk.
* The total number of pages in this chunk.
...
@@ -56,7 +56,7 @@ public class Chunk {
...
@@ -56,7 +56,7 @@ public class Chunk {
/**
/**
* The sum of the max length of all pages that are in use.
* The sum of the max length of all pages that are in use.
*/
*/
public
long
maxLen
gth
Live
;
public
long
maxLenLive
;
/**
/**
* The garbage collection priority.
* The garbage collection priority.
...
@@ -78,6 +78,11 @@ public class Chunk {
...
@@ -78,6 +78,11 @@ public class Chunk {
*/
*/
public
long
time
;
public
long
time
;
/**
* The last used map id.
*/
public
int
mapId
;
Chunk
(
int
id
)
{
Chunk
(
int
id
)
{
this
.
id
=
id
;
this
.
id
=
id
;
}
}
...
@@ -91,22 +96,23 @@ public class Chunk {
...
@@ -91,22 +96,23 @@ public class Chunk {
*/
*/
static
Chunk
fromHeader
(
ByteBuffer
buff
,
long
start
)
{
static
Chunk
fromHeader
(
ByteBuffer
buff
,
long
start
)
{
int
pos
=
buff
.
position
();
int
pos
=
buff
.
position
();
if
(
buff
.
get
()
!=
'{'
)
{
throw
DataUtils
.
newIllegalStateException
(
DataUtils
.
ERROR_FILE_CORRUPT
,
"File corrupt reading chunk at position {0}"
,
start
);
}
byte
[]
data
=
new
byte
[
Math
.
min
(
buff
.
remaining
(),
MAX_HEADER_LENGTH
)];
byte
[]
data
=
new
byte
[
Math
.
min
(
buff
.
remaining
(),
MAX_HEADER_LENGTH
)];
// set the position to the start of the first page
buff
.
get
(
data
);
buff
.
get
(
data
);
for
(
int
i
=
0
;
i
<
data
.
length
;
i
++)
{
try
{
if
(
data
[
i
]
==
'\n'
)
{
for
(
int
i
=
0
;
i
<
data
.
length
;
i
++)
{
buff
.
position
(
pos
+
i
+
2
);
if
(
data
[
i
]
==
'\n'
)
{
break
;
// set the position to the start of the first page
buff
.
position
(
pos
+
i
+
1
);
String
s
=
new
String
(
data
,
0
,
i
,
DataUtils
.
LATIN
).
trim
();
return
fromString
(
s
);
}
}
}
}
catch
(
Exception
e
)
{
// there could be various reasons
}
}
String
s
=
new
String
(
data
,
0
,
data
.
length
,
DataUtils
.
UTF8
);
throw
DataUtils
.
newIllegalStateException
(
return
fromString
(
s
);
DataUtils
.
ERROR_FILE_CORRUPT
,
"File corrupt reading chunk at position {0}"
,
start
);
}
}
/**
/**
...
@@ -116,9 +122,7 @@ public class Chunk {
...
@@ -116,9 +122,7 @@ public class Chunk {
*/
*/
void
writeHeader
(
WriteBuffer
buff
,
int
minLength
)
{
void
writeHeader
(
WriteBuffer
buff
,
int
minLength
)
{
long
pos
=
buff
.
position
();
long
pos
=
buff
.
position
();
buff
.
put
((
byte
)
'{'
);
buff
.
put
(
asString
().
getBytes
(
DataUtils
.
LATIN
));
buff
.
put
(
asString
().
getBytes
(
DataUtils
.
UTF8
));
buff
.
put
((
byte
)
'}'
);
while
(
buff
.
position
()
-
pos
<
minLength
-
1
)
{
while
(
buff
.
position
()
-
pos
<
minLength
-
1
)
{
buff
.
put
((
byte
)
' '
);
buff
.
put
((
byte
)
' '
);
}
}
...
@@ -137,22 +141,23 @@ public class Chunk {
...
@@ -137,22 +141,23 @@ public class Chunk {
*/
*/
public
static
Chunk
fromString
(
String
s
)
{
public
static
Chunk
fromString
(
String
s
)
{
HashMap
<
String
,
String
>
map
=
DataUtils
.
parseMap
(
s
);
HashMap
<
String
,
String
>
map
=
DataUtils
.
parseMap
(
s
);
int
id
=
Integer
.
parseInt
(
map
.
get
(
"chunk"
),
16
);
int
id
=
DataUtils
.
readHexInt
(
map
,
"chunk"
,
0
);
Chunk
c
=
new
Chunk
(
id
);
Chunk
c
=
new
Chunk
(
id
);
c
.
block
=
Long
.
parseLong
(
map
.
get
(
"block"
),
16
);
c
.
block
=
DataUtils
.
readHexLong
(
map
,
"block"
,
0
);
c
.
blocks
=
Integer
.
parseInt
(
map
.
get
(
"blocks"
),
16
);
c
.
len
=
DataUtils
.
readHexInt
(
map
,
"len"
,
0
);
c
.
pageCount
=
Integer
.
parseInt
(
map
.
get
(
"pages"
),
16
);
c
.
pageCount
=
DataUtils
.
readHexInt
(
map
,
"pages"
,
0
);
c
.
pageCountLive
=
DataUtils
.
parseHexInt
(
map
.
get
(
"livePages"
),
c
.
pageCount
);
c
.
pageCountLive
=
DataUtils
.
readHexInt
(
map
,
"livePages"
,
c
.
pageCount
);
c
.
maxLength
=
Long
.
parseLong
(
map
.
get
(
"max"
),
16
);
c
.
mapId
=
Integer
.
parseInt
(
map
.
get
(
"map"
),
16
);
c
.
maxLengthLive
=
DataUtils
.
parseHexLong
(
map
.
get
(
"liveMax"
),
c
.
maxLength
);
c
.
maxLength
=
DataUtils
.
readHexLong
(
map
,
"max"
,
0
);
c
.
metaRootPos
=
Long
.
parseLong
(
map
.
get
(
"root"
),
16
);
c
.
maxLenLive
=
DataUtils
.
readHexLong
(
map
,
"liveMax"
,
c
.
maxLength
);
c
.
time
=
Long
.
parseLong
(
map
.
get
(
"time"
),
16
);
c
.
metaRootPos
=
DataUtils
.
readHexLong
(
map
,
"root"
,
0
);
c
.
version
=
Long
.
parseLong
(
map
.
get
(
"version"
),
16
);
c
.
time
=
DataUtils
.
readHexLong
(
map
,
"time"
,
0
);
c
.
version
=
DataUtils
.
readHexLong
(
map
,
"version"
,
0
);
return
c
;
return
c
;
}
}
public
int
getFillRate
()
{
public
int
getFillRate
()
{
return
(
int
)
(
maxLength
==
0
?
0
:
100
*
maxLen
gth
Live
/
maxLength
);
return
(
int
)
(
maxLength
==
0
?
0
:
100
*
maxLenLive
/
maxLength
);
}
}
@Override
@Override
...
@@ -172,20 +177,21 @@ public class Chunk {
...
@@ -172,20 +177,21 @@ public class Chunk {
*/
*/
public
String
asString
()
{
public
String
asString
()
{
StringBuilder
buff
=
new
StringBuilder
();
StringBuilder
buff
=
new
StringBuilder
();
buff
.
append
(
"chunk:"
).
append
(
Integer
.
toHexString
(
id
)).
DataUtils
.
appendMap
(
buff
,
"chunk"
,
id
);
append
(
",block:"
).
append
(
Long
.
toHexString
(
block
)).
DataUtils
.
appendMap
(
buff
,
"block"
,
block
);
append
(
",blocks:"
).
append
(
Integer
.
toHexString
(
blocks
)
);
DataUtils
.
appendMap
(
buff
,
"len"
,
len
);
if
(
maxLength
!=
maxLen
gth
Live
)
{
if
(
maxLength
!=
maxLenLive
)
{
buff
.
append
(
",liveMax:"
).
append
(
Long
.
toHexString
(
maxLengthLive
)
);
DataUtils
.
appendMap
(
buff
,
"liveMax"
,
maxLenLive
);
}
}
if
(
pageCount
!=
pageCountLive
)
{
if
(
pageCount
!=
pageCountLive
)
{
buff
.
append
(
",livePages:"
).
append
(
Integer
.
toHexString
(
pageCountLive
)
);
DataUtils
.
appendMap
(
buff
,
"livePages"
,
pageCountLive
);
}
}
buff
.
append
(
",max:"
).
append
(
Long
.
toHexString
(
maxLength
)).
DataUtils
.
appendMap
(
buff
,
"map"
,
mapId
);
append
(
",pages:"
).
append
(
Integer
.
toHexString
(
pageCount
)).
DataUtils
.
appendMap
(
buff
,
"max"
,
maxLength
);
append
(
",root:"
).
append
(
Long
.
toHexString
(
metaRootPos
)).
DataUtils
.
appendMap
(
buff
,
"pages"
,
pageCount
);
append
(
",time:"
).
append
(
Long
.
toHexString
(
time
)).
DataUtils
.
appendMap
(
buff
,
"root"
,
metaRootPos
);
append
(
",version:"
).
append
(
Long
.
toHexString
(
version
));
DataUtils
.
appendMap
(
buff
,
"time"
,
time
);
DataUtils
.
appendMap
(
buff
,
"version"
,
version
);
return
buff
.
toString
();
return
buff
.
toString
();
}
}
...
...
h2/src/main/org/h2/mvstore/DataUtils.java
浏览文件 @
16d95eef
...
@@ -131,9 +131,14 @@ public class DataUtils {
...
@@ -131,9 +131,14 @@ public class DataUtils {
public
static
final
int
PAGE_MEMORY_CHILD
=
16
;
public
static
final
int
PAGE_MEMORY_CHILD
=
16
;
/**
/**
*
Name of the
character encoding format.
*
The UTF-8
character encoding format.
*/
*/
public
static
final
Charset
UTF8
=
Charset
.
forName
(
"UTF-8"
);
public
static
final
Charset
UTF8
=
Charset
.
forName
(
"UTF-8"
);
/**
* The ISO Latin character encoding format.
*/
public
static
final
Charset
LATIN
=
Charset
.
forName
(
"ISO-8859-1"
);
/**
/**
* An 0-size byte array.
* An 0-size byte array.
...
@@ -554,7 +559,7 @@ public class DataUtils {
...
@@ -554,7 +559,7 @@ public class DataUtils {
}
}
return
buff
;
return
buff
;
}
}
/**
/**
* Append a key-value pair to the string builder. Keys may not contain a
* Append a key-value pair to the string builder. Keys may not contain a
* colon. Values that contain a comma or a double quote are enclosed in
* colon. Values that contain a comma or a double quote are enclosed in
...
@@ -569,9 +574,16 @@ public class DataUtils {
...
@@ -569,9 +574,16 @@ public class DataUtils {
buff
.
append
(
','
);
buff
.
append
(
','
);
}
}
buff
.
append
(
key
).
append
(
':'
);
buff
.
append
(
key
).
append
(
':'
);
String
v
=
value
.
toString
();
String
v
;
if
(
v
.
indexOf
(
','
)
<
0
&&
v
.
indexOf
(
'\"'
)
<
0
&&
v
.
indexOf
(
'}'
)
<
0
)
{
if
(
value
instanceof
Long
)
{
buff
.
append
(
value
);
v
=
Long
.
toHexString
((
Long
)
value
);
}
else
if
(
value
instanceof
Integer
)
{
v
=
Integer
.
toHexString
((
Integer
)
value
);
}
else
{
v
=
value
.
toString
();
}
if
(
v
.
indexOf
(
','
)
<
0
&&
v
.
indexOf
(
'\"'
)
<
0
)
{
buff
.
append
(
v
);
}
else
{
}
else
{
buff
.
append
(
'\"'
);
buff
.
append
(
'\"'
);
for
(
int
i
=
0
,
size
=
v
.
length
();
i
<
size
;
i
++)
{
for
(
int
i
=
0
,
size
=
v
.
length
();
i
<
size
;
i
++)
{
...
@@ -595,9 +607,6 @@ public class DataUtils {
...
@@ -595,9 +607,6 @@ public class DataUtils {
public
static
HashMap
<
String
,
String
>
parseMap
(
String
s
)
{
public
static
HashMap
<
String
,
String
>
parseMap
(
String
s
)
{
HashMap
<
String
,
String
>
map
=
New
.
hashMap
();
HashMap
<
String
,
String
>
map
=
New
.
hashMap
();
for
(
int
i
=
0
,
size
=
s
.
length
();
i
<
size
;)
{
for
(
int
i
=
0
,
size
=
s
.
length
();
i
<
size
;)
{
if
(
s
.
charAt
(
i
)
==
'}'
)
{
break
;
}
int
startKey
=
i
;
int
startKey
=
i
;
i
=
s
.
indexOf
(
':'
,
i
);
i
=
s
.
indexOf
(
':'
,
i
);
if
(
i
<
0
)
{
if
(
i
<
0
)
{
...
@@ -610,9 +619,6 @@ public class DataUtils {
...
@@ -610,9 +619,6 @@ public class DataUtils {
char
c
=
s
.
charAt
(
i
++);
char
c
=
s
.
charAt
(
i
++);
if
(
c
==
','
)
{
if
(
c
==
','
)
{
break
;
break
;
}
else
if
(
c
==
'}'
)
{
i
--;
break
;
}
else
if
(
c
==
'\"'
)
{
}
else
if
(
c
==
'\"'
)
{
while
(
i
<
size
)
{
while
(
i
<
size
)
{
c
=
s
.
charAt
(
i
++);
c
=
s
.
charAt
(
i
++);
...
@@ -828,42 +834,51 @@ public class DataUtils {
...
@@ -828,42 +834,51 @@ public class DataUtils {
}
}
/**
/**
*
Parse a string as a hexadecimal number
.
*
Read a hex long value from a map
.
*
*
* @param x the number
* @param map the map
* @param defaultValue if x is null
* @param key the key
* @param defaultValue if the value is null
* @return the parsed value
* @return the parsed value
* @throws IllegalStateException if parsing fails
* @throws IllegalStateException if parsing fails
*/
*/
public
static
long
parseHexLong
(
String
x
,
long
defaultValue
)
{
public
static
long
readHexLong
(
HashMap
<
String
,
?
extends
Object
>
map
,
String
key
,
long
defaultValue
)
{
if
(
x
==
null
)
{
Object
v
=
map
.
get
(
key
);
if
(
v
==
null
)
{
return
defaultValue
;
return
defaultValue
;
}
else
if
(
v
instanceof
Long
)
{
return
(
Long
)
v
;
}
}
try
{
try
{
return
Long
.
parseLong
(
x
,
16
);
return
Long
.
parseLong
(
(
String
)
v
,
16
);
}
catch
(
NumberFormatException
e
)
{
}
catch
(
NumberFormatException
e
)
{
throw
newIllegalStateException
(
ERROR_FILE_CORRUPT
,
throw
newIllegalStateException
(
ERROR_FILE_CORRUPT
,
"Error parsing the value {0}"
,
x
,
e
);
"Error parsing the value {0}"
,
v
,
e
);
}
}
}
}
/**
/**
*
Parse a string as a hexadecimal number
.
*
Read a hex int value from a map
.
*
*
* @param x the number
* @param map the map
* @param defaultValue if x is null
* @param key the key
* @param defaultValue if the value is null
* @return the parsed value
* @return the parsed value
* @throws IllegalStateException if parsing fails
* @throws IllegalStateException if parsing fails
*/
*/
public
static
int
parseHexInt
(
String
x
,
int
defaultValue
)
{
public
static
int
readHexInt
(
HashMap
<
String
,
?
extends
Object
>
map
,
String
key
,
int
defaultValue
)
{
if
(
x
==
null
)
{
Object
v
=
map
.
get
(
key
);
if
(
v
==
null
)
{
return
defaultValue
;
return
defaultValue
;
}
else
if
(
v
instanceof
Integer
)
{
return
(
Integer
)
v
;
}
}
try
{
try
{
return
Integer
.
parseInt
(
x
,
16
);
// support unsigned hex value
return
(
int
)
Long
.
parseLong
((
String
)
v
,
16
);
}
catch
(
NumberFormatException
e
)
{
}
catch
(
NumberFormatException
e
)
{
throw
newIllegalStateException
(
ERROR_FILE_CORRUPT
,
throw
newIllegalStateException
(
ERROR_FILE_CORRUPT
,
"Error parsing the value {0}"
,
x
,
e
);
"Error parsing the value {0}"
,
v
,
e
);
}
}
}
}
...
...
h2/src/main/org/h2/mvstore/MVMap.java
浏览文件 @
16d95eef
...
@@ -70,10 +70,10 @@ public class MVMap<K, V> extends AbstractMap<K, V>
...
@@ -70,10 +70,10 @@ public class MVMap<K, V> extends AbstractMap<K, V>
* @param store the store
* @param store the store
* @param config the configuration
* @param config the configuration
*/
*/
protected
void
init
(
MVStore
store
,
HashMap
<
String
,
String
>
config
)
{
protected
void
init
(
MVStore
store
,
HashMap
<
String
,
Object
>
config
)
{
this
.
store
=
store
;
this
.
store
=
store
;
this
.
id
=
Integer
.
parseInt
(
config
.
get
(
"id"
),
16
);
this
.
id
=
DataUtils
.
readHexInt
(
config
,
"id"
,
0
);
this
.
createVersion
=
DataUtils
.
parseHexLong
(
config
.
get
(
"createVersion"
)
,
0
);
this
.
createVersion
=
DataUtils
.
readHexLong
(
config
,
"createVersion"
,
0
);
this
.
writeVersion
=
store
.
getCurrentVersion
();
this
.
writeVersion
=
store
.
getCurrentVersion
();
}
}
...
@@ -1036,9 +1036,9 @@ public class MVMap<K, V> extends AbstractMap<K, V>
...
@@ -1036,9 +1036,9 @@ public class MVMap<K, V> extends AbstractMap<K, V>
MVMap
<
K
,
V
>
openReadOnly
()
{
MVMap
<
K
,
V
>
openReadOnly
()
{
MVMap
<
K
,
V
>
m
=
new
MVMap
<
K
,
V
>(
keyType
,
valueType
);
MVMap
<
K
,
V
>
m
=
new
MVMap
<
K
,
V
>(
keyType
,
valueType
);
m
.
readOnly
=
true
;
m
.
readOnly
=
true
;
HashMap
<
String
,
String
>
config
=
New
.
hashMap
();
HashMap
<
String
,
Object
>
config
=
New
.
hashMap
();
config
.
put
(
"id"
,
Integer
.
toHexString
(
id
)
);
config
.
put
(
"id"
,
id
);
config
.
put
(
"createVersion"
,
Long
.
toHexString
(
createVersion
)
);
config
.
put
(
"createVersion"
,
createVersion
);
m
.
init
(
store
,
config
);
m
.
init
(
store
,
config
);
m
.
root
=
root
;
m
.
root
=
root
;
return
m
;
return
m
;
...
@@ -1097,7 +1097,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
...
@@ -1097,7 +1097,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
DataUtils
.
appendMap
(
buff
,
"name"
,
name
);
DataUtils
.
appendMap
(
buff
,
"name"
,
name
);
}
}
if
(
createVersion
!=
0
)
{
if
(
createVersion
!=
0
)
{
DataUtils
.
appendMap
(
buff
,
"createVersion"
,
Long
.
toHexString
(
createVersion
)
);
DataUtils
.
appendMap
(
buff
,
"createVersion"
,
createVersion
);
}
}
String
type
=
getType
();
String
type
=
getType
();
if
(
type
!=
null
)
{
if
(
type
!=
null
)
{
...
...
h2/src/main/org/h2/mvstore/MVStore.java
浏览文件 @
16d95eef
...
@@ -37,6 +37,12 @@ store header: (blockSize) bytes
...
@@ -37,6 +37,12 @@ store header: (blockSize) bytes
(there are two headers for security at the beginning of the file,
(there are two headers for security at the beginning of the file,
and there is a store header at the end of each chunk)
and there is a store header at the end of each chunk)
H:2,block:0,blockSize:1000,chunk:0,created:143fd8e5767,format:1,fletcher:a3acedfb
chunk:1,block:2,len:1,map:6,max:1c0,pages:2,root:4000004c8c,time:20a,version:1
chunk:2,block:3,fletcher:ca8cb347
maybe split chunk metadata into static and variable
TODO:
TODO:
Documentation
Documentation
...
@@ -59,7 +65,6 @@ TransactionStore:
...
@@ -59,7 +65,6 @@ TransactionStore:
MVStore:
MVStore:
- maybe reduce size of store header
- maybe reduce size of store header
- maybe make the free space bitset operate on blocks
- maybe let a chunk point to a list of potential next chunks
- maybe let a chunk point to a list of potential next chunks
(so no fixed location header is needed), similar to a skip list
(so no fixed location header is needed), similar to a skip list
- document and review the file format
- document and review the file format
...
@@ -120,6 +125,7 @@ MVStore:
...
@@ -120,6 +125,7 @@ MVStore:
- StreamStore: split blocks similar to rsync crypto, where the split is made
- StreamStore: split blocks similar to rsync crypto, where the split is made
"if the sum of the past 8196 bytes divides by 4096 with zero remainder"
"if the sum of the past 8196 bytes divides by 4096 with zero remainder"
- Compression: try using a bloom filter (64 bit) before trying to match
- Compression: try using a bloom filter (64 bit) before trying to match
- LIRS cache: maybe remove 'mask' field, and dynamically grow the arrays
*/
*/
...
@@ -142,7 +148,7 @@ public class MVStore {
...
@@ -142,7 +148,7 @@ public class MVStore {
/**
/**
* The maximum length of the store header.
* The maximum length of the store header.
*/
*/
static
final
int
STORE_HEADER_LENGTH
=
256
;
static
final
int
CHUNK_FOOTER_LENGTH
=
64
;
private
static
final
int
FORMAT_WRITE
=
1
;
private
static
final
int
FORMAT_WRITE
=
1
;
private
static
final
int
FORMAT_READ
=
1
;
private
static
final
int
FORMAT_READ
=
1
;
...
@@ -192,7 +198,7 @@ public class MVStore {
...
@@ -192,7 +198,7 @@ public class MVStore {
private
final
ConcurrentHashMap
<
Integer
,
MVMap
<?,
?>>
maps
=
new
ConcurrentHashMap
<
Integer
,
MVMap
<?,
?>>();
private
final
ConcurrentHashMap
<
Integer
,
MVMap
<?,
?>>
maps
=
new
ConcurrentHashMap
<
Integer
,
MVMap
<?,
?>>();
private
HashMap
<
String
,
String
>
stor
eHeader
=
New
.
hashMap
();
private
HashMap
<
String
,
Object
>
fil
eHeader
=
New
.
hashMap
();
private
WriteBuffer
writeBuffer
;
private
WriteBuffer
writeBuffer
;
...
@@ -274,9 +280,9 @@ public class MVStore {
...
@@ -274,9 +280,9 @@ public class MVStore {
this
.
backgroundExceptionHandler
=
(
UncaughtExceptionHandler
)
o
;
this
.
backgroundExceptionHandler
=
(
UncaughtExceptionHandler
)
o
;
meta
=
new
MVMapConcurrent
<
String
,
String
>(
StringDataType
.
INSTANCE
,
meta
=
new
MVMapConcurrent
<
String
,
String
>(
StringDataType
.
INSTANCE
,
StringDataType
.
INSTANCE
);
StringDataType
.
INSTANCE
);
HashMap
<
String
,
String
>
c
=
New
.
hashMap
();
HashMap
<
String
,
Object
>
c
=
New
.
hashMap
();
c
.
put
(
"id"
,
"0"
);
c
.
put
(
"id"
,
0
);
c
.
put
(
"createVersion"
,
Long
.
toString
(
currentVersion
)
);
c
.
put
(
"createVersion"
,
currentVersion
);
meta
.
init
(
this
,
c
);
meta
.
init
(
this
,
c
);
fileStore
=
(
FileStore
)
config
.
get
(
"fileStore"
);
fileStore
=
(
FileStore
)
config
.
get
(
"fileStore"
);
if
(
fileName
==
null
&&
fileStore
==
null
)
{
if
(
fileName
==
null
&&
fileStore
==
null
)
{
...
@@ -311,13 +317,13 @@ public class MVStore {
...
@@ -311,13 +317,13 @@ public class MVStore {
creationTime
=
0
;
creationTime
=
0
;
creationTime
=
getTime
();
creationTime
=
getTime
();
lastCommitTime
=
creationTime
;
lastCommitTime
=
creationTime
;
storeHeader
.
put
(
"blockSize"
,
Integer
.
toHexString
(
BLOCK_SIZE
)
);
fileHeader
.
put
(
"blockSize"
,
BLOCK_SIZE
);
storeHeader
.
put
(
"format"
,
Integer
.
toHexString
(
FORMAT_WRITE
)
);
fileHeader
.
put
(
"format"
,
FORMAT_WRITE
);
storeHeader
.
put
(
"created"
,
Long
.
toHexString
(
creationTime
)
);
fileHeader
.
put
(
"created"
,
creationTime
);
write
Stor
eHeader
();
write
Fil
eHeader
();
}
else
{
}
else
{
read
Stor
eHeader
();
read
Fil
eHeader
();
long
format
=
DataUtils
.
parseHexLong
(
storeHeader
.
get
(
"format"
),
0
);
long
format
=
DataUtils
.
readHexLong
(
fileHeader
,
"format"
,
1
);
if
(
format
>
FORMAT_WRITE
&&
!
fileStore
.
isReadOnly
())
{
if
(
format
>
FORMAT_WRITE
&&
!
fileStore
.
isReadOnly
())
{
throw
DataUtils
.
newIllegalStateException
(
throw
DataUtils
.
newIllegalStateException
(
DataUtils
.
ERROR_UNSUPPORTED_FORMAT
,
DataUtils
.
ERROR_UNSUPPORTED_FORMAT
,
...
@@ -325,7 +331,7 @@ public class MVStore {
...
@@ -325,7 +331,7 @@ public class MVStore {
"and the file was not opened in read-only mode"
,
"and the file was not opened in read-only mode"
,
format
,
FORMAT_WRITE
);
format
,
FORMAT_WRITE
);
}
}
format
=
DataUtils
.
parseHexLong
(
storeHeader
.
get
(
"formatRead"
)
,
format
);
format
=
DataUtils
.
readHexLong
(
fileHeader
,
"formatRead"
,
format
);
if
(
format
>
FORMAT_READ
)
{
if
(
format
>
FORMAT_READ
)
{
throw
DataUtils
.
newIllegalStateException
(
throw
DataUtils
.
newIllegalStateException
(
DataUtils
.
ERROR_UNSUPPORTED_FORMAT
,
DataUtils
.
ERROR_UNSUPPORTED_FORMAT
,
...
@@ -382,8 +388,7 @@ public class MVStore {
...
@@ -382,8 +388,7 @@ public class MVStore {
<
T
extends
MVMap
<?,
?>>
T
openMapVersion
(
long
version
,
int
mapId
,
<
T
extends
MVMap
<?,
?>>
T
openMapVersion
(
long
version
,
int
mapId
,
MVMap
<?,
?>
template
)
{
MVMap
<?,
?>
template
)
{
MVMap
<
String
,
String
>
oldMeta
=
getMetaMap
(
version
);
MVMap
<
String
,
String
>
oldMeta
=
getMetaMap
(
version
);
String
r
=
oldMeta
.
get
(
"root."
+
Integer
.
toHexString
(
mapId
));
long
rootPos
=
getRootPos
(
oldMeta
,
mapId
);
long
rootPos
=
DataUtils
.
parseHexLong
(
r
,
0
);
MVMap
<?,
?>
m
=
template
.
openReadOnly
();
MVMap
<?,
?>
m
=
template
.
openReadOnly
();
m
.
setRootPos
(
rootPos
,
version
);
m
.
setRootPos
(
rootPos
,
version
);
return
(
T
)
m
;
return
(
T
)
m
;
...
@@ -419,7 +424,7 @@ public class MVStore {
...
@@ -419,7 +424,7 @@ public class MVStore {
String
x
=
meta
.
get
(
"name."
+
name
);
String
x
=
meta
.
get
(
"name."
+
name
);
int
id
;
int
id
;
long
root
;
long
root
;
HashMap
<
String
,
String
>
c
;
HashMap
<
String
,
Object
>
c
;
M
map
;
M
map
;
if
(
x
!=
null
)
{
if
(
x
!=
null
)
{
id
=
Integer
.
parseInt
(
x
,
16
);
id
=
Integer
.
parseInt
(
x
,
16
);
...
@@ -430,20 +435,20 @@ public class MVStore {
...
@@ -430,20 +435,20 @@ public class MVStore {
}
}
map
=
builder
.
create
();
map
=
builder
.
create
();
String
config
=
meta
.
get
(
"map."
+
x
);
String
config
=
meta
.
get
(
"map."
+
x
);
c
=
DataUtils
.
parseMap
(
config
);
c
=
New
.
hashMap
();
c
.
put
(
"id"
,
x
);
c
.
putAll
(
DataUtils
.
parseMap
(
config
));
c
.
put
(
"id"
,
id
);
map
.
init
(
this
,
c
);
map
.
init
(
this
,
c
);
String
r
=
meta
.
get
(
"root."
+
x
);
root
=
getRootPos
(
meta
,
id
);
root
=
DataUtils
.
parseHexLong
(
r
,
0
);
}
else
{
}
else
{
c
=
New
.
hashMap
();
c
=
New
.
hashMap
();
id
=
++
lastMapId
;
id
=
++
lastMapId
;
x
=
Integer
.
toHexString
(
id
);
c
.
put
(
"id"
,
id
);
c
.
put
(
"id"
,
x
);
c
.
put
(
"createVersion"
,
currentVersion
);
c
.
put
(
"createVersion"
,
Long
.
toHexString
(
currentVersion
));
map
=
builder
.
create
();
map
=
builder
.
create
();
map
.
init
(
this
,
c
);
map
.
init
(
this
,
c
);
markMetaChanged
();
markMetaChanged
();
x
=
Integer
.
toHexString
(
id
);
meta
.
put
(
"map."
+
x
,
map
.
asString
(
name
));
meta
.
put
(
"map."
+
x
,
map
.
asString
(
name
));
meta
.
put
(
"name."
+
name
,
x
);
meta
.
put
(
"name."
+
name
,
x
);
root
=
0
;
root
=
0
;
...
@@ -540,6 +545,9 @@ public class MVStore {
...
@@ -540,6 +545,9 @@ public class MVStore {
"Chunk {0} is invalid"
,
header
.
id
);
"Chunk {0} is invalid"
,
header
.
id
);
}
}
lastChunkId
=
header
.
id
;
lastChunkId
=
header
.
id
;
lastMapId
=
header
.
mapId
;
currentVersion
=
header
.
version
;
setWriteVersion
(
currentVersion
);
chunks
.
put
(
header
.
id
,
header
);
chunks
.
put
(
header
.
id
,
header
);
meta
.
setRootPos
(
header
.
metaRootPos
,
-
1
);
meta
.
setRootPos
(
header
.
metaRootPos
,
-
1
);
chunks
.
put
(
header
.
id
,
header
);
chunks
.
put
(
header
.
id
,
header
);
...
@@ -569,100 +577,106 @@ public class MVStore {
...
@@ -569,100 +577,106 @@ public class MVStore {
registerFreePage
(
currentVersion
,
c
.
id
,
0
,
0
);
registerFreePage
(
currentVersion
,
c
.
id
,
0
,
0
);
}
}
long
start
=
c
.
block
*
BLOCK_SIZE
;
long
start
=
c
.
block
*
BLOCK_SIZE
;
int
len
=
c
.
blocks
*
BLOCK_SIZE
;
int
len
gth
=
c
.
len
*
BLOCK_SIZE
;
fileStore
.
markUsed
(
start
,
len
);
fileStore
.
markUsed
(
start
,
len
gth
);
}
}
}
}
private
void
readStoreHeader
()
{
private
void
readFileHeader
()
{
// we don't have a valid header yet
boolean
validHeader
=
false
;
currentVersion
=
-
1
;
// we don't know yet which chunk is the newest
// we don't know which chunk is the newest
long
newestChunk
=
-
1
;
long
newestChunk
=
-
1
;
// read the last block of the file, and then the two first blocks
// read the last block of the file, and then the two first blocks
ByteBuffer
buffLastBlock
=
fileStore
.
readFully
(
fileStore
.
size
()
ByteBuffer
fileHeaderBlocks
=
fileStore
.
readFully
(
0
,
2
*
BLOCK_SIZE
);
-
BLOCK_SIZE
,
BLOCK_SIZE
);
byte
[]
buff
=
new
byte
[
BLOCK_SIZE
];
ByteBuffer
buffFirst2Blocks
=
fileStore
.
readFully
(
0
,
BLOCK_SIZE
*
2
);
for
(
int
i
=
0
;
i
<=
BLOCK_SIZE
;
i
+=
BLOCK_SIZE
)
{
ByteBuffer
buff
=
ByteBuffer
.
allocate
(
3
*
BLOCK_SIZE
);
fileHeaderBlocks
.
get
(
buff
);
buff
.
put
(
buffLastBlock
);
// the following can fail for various reasons
buff
.
put
(
buffFirst2Blocks
);
for
(
int
i
=
0
;
i
<
3
*
BLOCK_SIZE
;
i
+=
BLOCK_SIZE
)
{
int
start
=
i
;
if
(
i
==
0
)
{
start
=
BLOCK_SIZE
-
STORE_HEADER_LENGTH
;
}
if
(
buff
.
array
()[
start
]
!=
'{'
)
{
continue
;
}
String
s
=
new
String
(
buff
.
array
(),
start
,
STORE_HEADER_LENGTH
,
DataUtils
.
UTF8
)
.
trim
();
HashMap
<
String
,
String
>
m
;
try
{
m
=
DataUtils
.
parseMap
(
s
);
}
catch
(
IllegalStateException
e
)
{
continue
;
}
String
f
=
m
.
remove
(
"fletcher"
);
if
(
f
==
null
)
{
continue
;
}
int
check
;
try
{
try
{
check
=
(
int
)
Long
.
parseLong
(
f
,
16
);
String
s
=
new
String
(
buff
,
0
,
BLOCK_SIZE
,
DataUtils
.
LATIN
).
trim
();
}
catch
(
NumberFormatException
e
)
{
HashMap
<
String
,
String
>
m
=
DataUtils
.
parseMap
(
s
);
continue
;
int
check
=
DataUtils
.
readHexInt
(
m
,
"fletcher"
,
0
);
}
m
.
remove
(
"fletcher"
);
s
=
s
.
substring
(
0
,
s
.
lastIndexOf
(
"fletcher"
)
-
1
);
s
=
s
.
substring
(
0
,
s
.
lastIndexOf
(
"fletcher"
)
-
1
);
byte
[]
bytes
=
s
.
getBytes
(
DataUtils
.
UTF8
);
byte
[]
bytes
=
s
.
getBytes
(
DataUtils
.
LATIN
);
int
checksum
=
DataUtils
.
getFletcher32
(
bytes
,
bytes
.
length
/
2
*
2
);
int
checksum
=
DataUtils
.
getFletcher32
(
bytes
,
bytes
.
length
/
2
*
2
);
if
(
check
!=
checksum
)
{
if
(
check
!=
checksum
)
{
continue
;
}
long
chunk
=
DataUtils
.
readHexLong
(
m
,
"chunk"
,
0
);
if
(
chunk
>
newestChunk
)
{
newestChunk
=
chunk
;
fileHeader
.
putAll
(
m
);
lastChunkBlock
=
DataUtils
.
readHexLong
(
m
,
"block"
,
0
);
creationTime
=
DataUtils
.
readHexLong
(
m
,
"created"
,
0
);
validHeader
=
true
;
}
}
catch
(
Exception
e
)
{
continue
;
continue
;
}
}
long
chunk
=
Long
.
parseLong
(
m
.
get
(
"chunk"
),
16
);
if
(
chunk
>
newestChunk
)
{
newestChunk
=
chunk
;
storeHeader
=
m
;
lastChunkBlock
=
Long
.
parseLong
(
m
.
get
(
"block"
),
16
);
creationTime
=
Long
.
parseLong
(
m
.
get
(
"created"
),
16
);
lastMapId
=
Integer
.
parseInt
(
m
.
get
(
"map"
),
16
);
currentVersion
=
Long
.
parseLong
(
m
.
get
(
"version"
),
16
);
}
}
}
if
(
currentVersion
<
0
)
{
if
(
!
validHeader
)
{
throw
DataUtils
.
newIllegalStateException
(
throw
DataUtils
.
newIllegalStateException
(
DataUtils
.
ERROR_FILE_CORRUPT
,
DataUtils
.
ERROR_FILE_CORRUPT
,
"Store header is corrupt: {0}"
,
fileStore
);
"Store header is corrupt: {0}"
,
fileStore
);
}
}
setWriteVersion
(
currentVersion
);
ByteBuffer
lastBlock
=
fileStore
.
readFully
(
fileStore
.
size
()
-
CHUNK_FOOTER_LENGTH
,
CHUNK_FOOTER_LENGTH
);
buff
=
new
byte
[
CHUNK_FOOTER_LENGTH
];
lastBlock
.
get
(
buff
);
// the following can fail for various reasons
try
{
String
s
=
new
String
(
buff
,
DataUtils
.
LATIN
).
trim
();
HashMap
<
String
,
String
>
m
=
DataUtils
.
parseMap
(
s
);
int
check
=
DataUtils
.
readHexInt
(
m
,
"fletcher"
,
0
);
m
.
remove
(
"fletcher"
);
s
=
s
.
substring
(
0
,
s
.
lastIndexOf
(
"fletcher"
)
-
1
);
byte
[]
bytes
=
s
.
getBytes
(
DataUtils
.
LATIN
);
int
checksum
=
DataUtils
.
getFletcher32
(
bytes
,
bytes
.
length
/
2
*
2
);
if
(
check
==
checksum
)
{
long
chunk
=
DataUtils
.
readHexLong
(
m
,
"chunk"
,
0
);
if
(
chunk
>
newestChunk
)
{
fileHeader
.
putAll
(
m
);
lastChunkBlock
=
DataUtils
.
readHexLong
(
m
,
"block"
,
0
);
validHeader
=
true
;
}
}
}
catch
(
Exception
e
)
{
// ignore
}
lastStoredVersion
=
-
1
;
lastStoredVersion
=
-
1
;
}
}
private
byte
[]
getStoreHeaderBytes
(
int
minLength
)
{
private
byte
[]
getFileHeaderBytes
()
{
StringBuilder
buff
=
new
StringBuilder
(
"{H:2"
);
StringBuilder
buff
=
new
StringBuilder
(
"H:2"
);
storeHeader
.
put
(
"map"
,
Integer
.
toHexString
(
lastMapId
));
fileHeader
.
put
(
"block"
,
lastChunkBlock
);
storeHeader
.
put
(
"chunk"
,
Integer
.
toHexString
(
lastChunkId
));
fileHeader
.
put
(
"chunk"
,
lastChunkId
);
storeHeader
.
put
(
"block"
,
Long
.
toHexString
(
lastChunkBlock
));
DataUtils
.
appendMap
(
buff
,
fileHeader
);
storeHeader
.
put
(
"version"
,
Long
.
toHexString
(
currentVersion
));
byte
[]
bytes
=
buff
.
toString
().
getBytes
(
DataUtils
.
LATIN
);
DataUtils
.
appendMap
(
buff
,
storeHeader
);
byte
[]
bytes
=
buff
.
toString
().
getBytes
(
DataUtils
.
UTF8
);
int
checksum
=
DataUtils
.
getFletcher32
(
bytes
,
bytes
.
length
/
2
*
2
);
int
checksum
=
DataUtils
.
getFletcher32
(
bytes
,
bytes
.
length
/
2
*
2
);
DataUtils
.
appendMap
(
buff
,
"fletcher"
,
Integer
.
toHexString
(
checksum
));
DataUtils
.
appendMap
(
buff
,
"fletcher"
,
checksum
);
buff
.
append
(
"}"
);
buff
.
append
(
"\n"
);
if
(
buff
.
length
()
>=
STORE_HEADER_LENGTH
-
1
)
{
return
buff
.
toString
().
getBytes
(
DataUtils
.
LATIN
);
throw
DataUtils
.
newIllegalStateException
(
}
DataUtils
.
ERROR_UNSUPPORTED_FORMAT
,
"Store header too large: {0}"
,
buff
);
private
byte
[]
getChunkFooterBytes
()
{
}
StringBuilder
buff
=
new
StringBuilder
();
while
(
buff
.
length
()
<
minLength
-
1
)
{
fileHeader
.
put
(
"chunk"
,
lastChunkId
);
fileHeader
.
put
(
"block"
,
lastChunkBlock
);
DataUtils
.
appendMap
(
buff
,
"chunk"
,
lastChunkId
);
DataUtils
.
appendMap
(
buff
,
"block"
,
lastChunkBlock
);
byte
[]
bytes
=
buff
.
toString
().
getBytes
(
DataUtils
.
LATIN
);
int
checksum
=
DataUtils
.
getFletcher32
(
bytes
,
bytes
.
length
/
2
*
2
);
DataUtils
.
appendMap
(
buff
,
"fletcher"
,
checksum
);
while
(
buff
.
length
()
<
CHUNK_FOOTER_LENGTH
-
1
)
{
buff
.
append
(
' '
);
buff
.
append
(
' '
);
}
}
buff
.
append
(
"\n"
);
buff
.
append
(
"\n"
);
return
buff
.
toString
().
getBytes
(
DataUtils
.
UTF8
);
return
buff
.
toString
().
getBytes
(
DataUtils
.
LATIN
);
}
}
private
void
write
Stor
eHeader
()
{
private
void
write
Fil
eHeader
()
{
byte
[]
bytes
=
get
StoreHeaderBytes
(
0
);
byte
[]
bytes
=
get
FileHeaderBytes
(
);
ByteBuffer
header
=
ByteBuffer
.
allocate
(
2
*
BLOCK_SIZE
);
ByteBuffer
header
=
ByteBuffer
.
allocate
(
2
*
BLOCK_SIZE
);
header
.
put
(
bytes
);
header
.
put
(
bytes
);
header
.
position
(
BLOCK_SIZE
);
header
.
position
(
BLOCK_SIZE
);
...
@@ -869,12 +883,13 @@ public class MVStore {
...
@@ -869,12 +883,13 @@ public class MVStore {
c
.
pageCount
=
Integer
.
MAX_VALUE
;
c
.
pageCount
=
Integer
.
MAX_VALUE
;
c
.
pageCountLive
=
Integer
.
MAX_VALUE
;
c
.
pageCountLive
=
Integer
.
MAX_VALUE
;
c
.
maxLength
=
Long
.
MAX_VALUE
;
c
.
maxLength
=
Long
.
MAX_VALUE
;
c
.
maxLen
gth
Live
=
Long
.
MAX_VALUE
;
c
.
maxLenLive
=
Long
.
MAX_VALUE
;
c
.
metaRootPos
=
Long
.
MAX_VALUE
;
c
.
metaRootPos
=
Long
.
MAX_VALUE
;
c
.
block
=
Long
.
MAX_VALUE
;
c
.
block
=
Long
.
MAX_VALUE
;
c
.
blocks
=
Integer
.
MAX_VALUE
;
c
.
len
=
Integer
.
MAX_VALUE
;
c
.
time
=
time
;
c
.
time
=
time
;
c
.
version
=
version
;
c
.
version
=
version
;
c
.
mapId
=
lastMapId
;
chunks
.
put
(
c
.
id
,
c
);
chunks
.
put
(
c
.
id
,
c
);
// force a metadata update
// force a metadata update
meta
.
put
(
Chunk
.
getMetaKey
(
c
.
id
),
c
.
asString
());
meta
.
put
(
Chunk
.
getMetaKey
(
c
.
id
),
c
.
asString
());
...
@@ -913,7 +928,7 @@ public class MVStore {
...
@@ -913,7 +928,7 @@ public class MVStore {
c
.
pageCount
=
0
;
c
.
pageCount
=
0
;
c
.
pageCountLive
=
0
;
c
.
pageCountLive
=
0
;
c
.
maxLength
=
0
;
c
.
maxLength
=
0
;
c
.
maxLen
gth
Live
=
0
;
c
.
maxLenLive
=
0
;
for
(
MVMap
<?,
?>
m
:
changed
)
{
for
(
MVMap
<?,
?>
m
:
changed
)
{
Page
p
=
m
.
getRoot
();
Page
p
=
m
.
getRoot
();
if
(
p
.
getTotalCount
()
>
0
)
{
if
(
p
.
getTotalCount
()
>
0
)
{
...
@@ -932,13 +947,13 @@ public class MVStore {
...
@@ -932,13 +947,13 @@ public class MVStore {
// add the store header and round to the next block
// add the store header and round to the next block
int
length
=
MathUtils
.
roundUpInt
(
chunkLength
+
int
length
=
MathUtils
.
roundUpInt
(
chunkLength
+
STORE_HEAD
ER_LENGTH
,
BLOCK_SIZE
);
CHUNK_FOOT
ER_LENGTH
,
BLOCK_SIZE
);
buff
.
limit
(
length
);
buff
.
limit
(
length
);
// free up the space of unused chunks now
// free up the space of unused chunks now
for
(
Chunk
x
:
removedChunks
)
{
for
(
Chunk
x
:
removedChunks
)
{
long
start
=
x
.
block
*
BLOCK_SIZE
;
long
start
=
x
.
block
*
BLOCK_SIZE
;
int
len
=
x
.
blocks
*
BLOCK_SIZE
;
int
len
=
x
.
len
*
BLOCK_SIZE
;
fileStore
.
free
(
start
,
len
);
fileStore
.
free
(
start
,
len
);
}
}
...
@@ -956,16 +971,15 @@ public class MVStore {
...
@@ -956,16 +971,15 @@ public class MVStore {
boolean
storeAtEndOfFile
=
filePos
+
length
>=
fileStore
.
size
();
boolean
storeAtEndOfFile
=
filePos
+
length
>=
fileStore
.
size
();
c
.
block
=
filePos
/
BLOCK_SIZE
;
c
.
block
=
filePos
/
BLOCK_SIZE
;
c
.
blocks
=
length
/
BLOCK_SIZE
;
c
.
len
=
length
/
BLOCK_SIZE
;
c
.
metaRootPos
=
metaRoot
.
getPos
();
c
.
metaRootPos
=
metaRoot
.
getPos
();
buff
.
position
(
0
);
buff
.
position
(
0
);
c
.
writeHeader
(
buff
,
headerLength
);
c
.
writeHeader
(
buff
,
headerLength
);
lastChunkBlock
=
filePos
/
BLOCK_SIZE
;
lastChunkBlock
=
filePos
/
BLOCK_SIZE
;
revertTemp
(
storeVersion
);
revertTemp
(
storeVersion
);
buff
.
position
(
buff
.
limit
()
-
STORE_HEADER_LENGTH
);
buff
.
position
(
buff
.
limit
()
-
CHUNK_FOOTER_LENGTH
);
byte
[]
header
=
getStoreHeaderBytes
(
STORE_HEADER_LENGTH
);
buff
.
put
(
getChunkFooterBytes
());
buff
.
put
(
header
);
buff
.
position
(
0
);
buff
.
position
(
0
);
write
(
filePos
,
buff
.
getBuffer
());
write
(
filePos
,
buff
.
getBuffer
());
...
@@ -973,7 +987,7 @@ public class MVStore {
...
@@ -973,7 +987,7 @@ public class MVStore {
// overwrite the header if required
// overwrite the header if required
if
(!
storeAtEndOfFile
)
{
if
(!
storeAtEndOfFile
)
{
write
Stor
eHeader
();
write
Fil
eHeader
();
shrinkFileIfPossible
(
1
);
shrinkFileIfPossible
(
1
);
}
}
...
@@ -1068,29 +1082,29 @@ public class MVStore {
...
@@ -1068,29 +1082,29 @@ public class MVStore {
}
}
// no need to synchronize, as old entries
// no need to synchronize, as old entries
// are not concurrently modified
// are not concurrently modified
c
.
maxLen
gthLive
+=
f
.
maxLength
Live
;
c
.
maxLen
Live
+=
f
.
maxLen
Live
;
c
.
pageCountLive
+=
f
.
pageCountLive
;
c
.
pageCountLive
+=
f
.
pageCountLive
;
if
(
c
.
pageCountLive
<
0
)
{
if
(
c
.
pageCountLive
<
0
)
{
throw
DataUtils
.
newIllegalStateException
(
throw
DataUtils
.
newIllegalStateException
(
DataUtils
.
ERROR_INTERNAL
,
DataUtils
.
ERROR_INTERNAL
,
"Corrupt page count {0}"
,
c
.
pageCountLive
);
"Corrupt page count {0}"
,
c
.
pageCountLive
);
}
}
if
(
c
.
maxLen
gth
Live
<
0
)
{
if
(
c
.
maxLenLive
<
0
)
{
throw
DataUtils
.
newIllegalStateException
(
throw
DataUtils
.
newIllegalStateException
(
DataUtils
.
ERROR_INTERNAL
,
DataUtils
.
ERROR_INTERNAL
,
"Corrupt max length {0}"
,
c
.
maxLen
gth
Live
);
"Corrupt max length {0}"
,
c
.
maxLenLive
);
}
}
if
(
c
.
pageCount
==
0
&&
c
.
maxLen
gth
Live
>
0
)
{
if
(
c
.
pageCount
==
0
&&
c
.
maxLenLive
>
0
)
{
throw
DataUtils
.
newIllegalStateException
(
throw
DataUtils
.
newIllegalStateException
(
DataUtils
.
ERROR_INTERNAL
,
DataUtils
.
ERROR_INTERNAL
,
"Corrupt max length {0}"
,
c
.
maxLen
gth
Live
);
"Corrupt max length {0}"
,
c
.
maxLenLive
);
}
}
modified
.
add
(
c
);
modified
.
add
(
c
);
}
}
it
.
remove
();
it
.
remove
();
}
}
for
(
Chunk
c
:
modified
)
{
for
(
Chunk
c
:
modified
)
{
if
(
c
.
maxLen
gth
Live
==
0
)
{
if
(
c
.
maxLenLive
==
0
)
{
if
(
canOverwriteChunk
(
c
,
time
))
{
if
(
canOverwriteChunk
(
c
,
time
))
{
removedChunks
.
add
(
c
);
removedChunks
.
add
(
c
);
chunks
.
remove
(
c
.
id
);
chunks
.
remove
(
c
.
id
);
...
@@ -1141,8 +1155,8 @@ public class MVStore {
...
@@ -1141,8 +1155,8 @@ public class MVStore {
private
long
getFileLengthInUse
()
{
private
long
getFileLengthInUse
()
{
long
size
=
2
*
BLOCK_SIZE
;
long
size
=
2
*
BLOCK_SIZE
;
for
(
Chunk
c
:
chunks
.
values
())
{
for
(
Chunk
c
:
chunks
.
values
())
{
if
(
c
.
blocks
!=
Integer
.
MAX_VALUE
)
{
if
(
c
.
len
!=
Integer
.
MAX_VALUE
)
{
long
x
=
(
c
.
block
+
c
.
blocks
)
*
BLOCK_SIZE
;
long
x
=
(
c
.
block
+
c
.
len
)
*
BLOCK_SIZE
;
size
=
Math
.
max
(
size
,
x
);
size
=
Math
.
max
(
size
,
x
);
}
}
}
}
...
@@ -1195,7 +1209,7 @@ public class MVStore {
...
@@ -1195,7 +1209,7 @@ public class MVStore {
long
time
=
getTime
();
long
time
=
getTime
();
ArrayList
<
Chunk
>
free
=
New
.
arrayList
();
ArrayList
<
Chunk
>
free
=
New
.
arrayList
();
for
(
Chunk
c
:
chunks
.
values
())
{
for
(
Chunk
c
:
chunks
.
values
())
{
if
(
c
.
maxLen
gth
Live
==
0
)
{
if
(
c
.
maxLenLive
==
0
)
{
if
(
canOverwriteChunk
(
c
,
time
))
{
if
(
canOverwriteChunk
(
c
,
time
))
{
free
.
add
(
c
);
free
.
add
(
c
);
}
}
...
@@ -1206,7 +1220,7 @@ public class MVStore {
...
@@ -1206,7 +1220,7 @@ public class MVStore {
markMetaChanged
();
markMetaChanged
();
meta
.
remove
(
Chunk
.
getMetaKey
(
c
.
id
));
meta
.
remove
(
Chunk
.
getMetaKey
(
c
.
id
));
long
start
=
c
.
block
*
BLOCK_SIZE
;
long
start
=
c
.
block
*
BLOCK_SIZE
;
int
length
=
c
.
blocks
*
BLOCK_SIZE
;
int
length
=
c
.
len
*
BLOCK_SIZE
;
fileStore
.
free
(
start
,
length
);
fileStore
.
free
(
start
,
length
);
}
}
if
(
fileStore
.
getFillRate
()
==
100
)
{
if
(
fileStore
.
getFillRate
()
==
100
)
{
...
@@ -1222,10 +1236,10 @@ public class MVStore {
...
@@ -1222,10 +1236,10 @@ public class MVStore {
for
(
Chunk
c
:
move
)
{
for
(
Chunk
c
:
move
)
{
WriteBuffer
buff
=
getWriteBuffer
();
WriteBuffer
buff
=
getWriteBuffer
();
long
start
=
c
.
block
*
BLOCK_SIZE
;
long
start
=
c
.
block
*
BLOCK_SIZE
;
int
length
=
c
.
blocks
*
BLOCK_SIZE
;
int
length
=
c
.
len
*
BLOCK_SIZE
;
buff
.
limit
(
length
);
buff
.
limit
(
length
);
ByteBuffer
readBuff
=
fileStore
.
readFully
(
start
,
length
);
ByteBuffer
readBuff
=
fileStore
.
readFully
(
start
,
length
);
Chunk
.
fromHeader
(
readBuff
,
0
);
Chunk
.
fromHeader
(
readBuff
,
start
);
int
chunkHeaderLen
=
readBuff
.
position
();
int
chunkHeaderLen
=
readBuff
.
position
();
buff
.
position
(
chunkHeaderLen
);
buff
.
position
(
chunkHeaderLen
);
buff
.
put
(
readBuff
);
buff
.
put
(
readBuff
);
...
@@ -1235,9 +1249,8 @@ public class MVStore {
...
@@ -1235,9 +1249,8 @@ public class MVStore {
c
.
block
=
end
/
BLOCK_SIZE
;
c
.
block
=
end
/
BLOCK_SIZE
;
buff
.
position
(
0
);
buff
.
position
(
0
);
c
.
writeHeader
(
buff
,
chunkHeaderLen
);
c
.
writeHeader
(
buff
,
chunkHeaderLen
);
buff
.
position
(
length
-
STORE_HEADER_LENGTH
);
buff
.
position
(
length
-
CHUNK_FOOTER_LENGTH
);
byte
[]
header
=
getStoreHeaderBytes
(
STORE_HEADER_LENGTH
);
buff
.
put
(
getChunkFooterBytes
());
buff
.
put
(
header
);
buff
.
position
(
0
);
buff
.
position
(
0
);
write
(
end
,
buff
.
getBuffer
());
write
(
end
,
buff
.
getBuffer
());
releaseWriteBuffer
(
buff
);
releaseWriteBuffer
(
buff
);
...
@@ -1262,7 +1275,7 @@ public class MVStore {
...
@@ -1262,7 +1275,7 @@ public class MVStore {
}
}
WriteBuffer
buff
=
getWriteBuffer
();
WriteBuffer
buff
=
getWriteBuffer
();
long
start
=
c
.
block
*
BLOCK_SIZE
;
long
start
=
c
.
block
*
BLOCK_SIZE
;
int
length
=
c
.
blocks
*
BLOCK_SIZE
;
int
length
=
c
.
len
*
BLOCK_SIZE
;
buff
.
limit
(
length
);
buff
.
limit
(
length
);
ByteBuffer
readBuff
=
fileStore
.
readFully
(
start
,
length
);
ByteBuffer
readBuff
=
fileStore
.
readFully
(
start
,
length
);
Chunk
.
fromHeader
(
readBuff
,
0
);
Chunk
.
fromHeader
(
readBuff
,
0
);
...
@@ -1274,9 +1287,8 @@ public class MVStore {
...
@@ -1274,9 +1287,8 @@ public class MVStore {
buff
.
position
(
0
);
buff
.
position
(
0
);
c
.
block
=
pos
/
BLOCK_SIZE
;
c
.
block
=
pos
/
BLOCK_SIZE
;
c
.
writeHeader
(
buff
,
chunkHeaderLen
);
c
.
writeHeader
(
buff
,
chunkHeaderLen
);
buff
.
position
(
length
-
STORE_HEADER_LENGTH
);
buff
.
position
(
length
-
CHUNK_FOOTER_LENGTH
);
byte
[]
header
=
getStoreHeaderBytes
(
STORE_HEADER_LENGTH
);
buff
.
put
(
getChunkFooterBytes
());
buff
.
put
(
header
);
buff
.
position
(
0
);
buff
.
position
(
0
);
write
(
pos
,
buff
.
getBuffer
());
write
(
pos
,
buff
.
getBuffer
());
releaseWriteBuffer
(
buff
);
releaseWriteBuffer
(
buff
);
...
@@ -1324,7 +1336,7 @@ public class MVStore {
...
@@ -1324,7 +1336,7 @@ public class MVStore {
long
maxLengthSum
=
0
,
maxLengthLiveSum
=
0
;
long
maxLengthSum
=
0
,
maxLengthLiveSum
=
0
;
for
(
Chunk
c
:
chunks
.
values
())
{
for
(
Chunk
c
:
chunks
.
values
())
{
maxLengthSum
+=
c
.
maxLength
;
maxLengthSum
+=
c
.
maxLength
;
maxLengthLiveSum
+=
c
.
maxLen
gth
Live
;
maxLengthLiveSum
+=
c
.
maxLenLive
;
}
}
if
(
maxLengthSum
<=
0
)
{
if
(
maxLengthSum
<=
0
)
{
// avoid division by 0
// avoid division by 0
...
@@ -1369,10 +1381,10 @@ public class MVStore {
...
@@ -1369,10 +1381,10 @@ public class MVStore {
long
moved
=
0
;
long
moved
=
0
;
Chunk
move
=
null
;
Chunk
move
=
null
;
for
(
Chunk
c
:
old
)
{
for
(
Chunk
c
:
old
)
{
if
(
move
!=
null
&&
moved
+
c
.
maxLen
gth
Live
>
averageMaxLength
)
{
if
(
move
!=
null
&&
moved
+
c
.
maxLenLive
>
averageMaxLength
)
{
break
;
break
;
}
}
moved
+=
c
.
maxLen
gth
Live
;
moved
+=
c
.
maxLenLive
;
move
=
c
;
move
=
c
;
}
}
...
@@ -1398,8 +1410,8 @@ public class MVStore {
...
@@ -1398,8 +1410,8 @@ public class MVStore {
private
void
copyLive
(
Chunk
chunk
,
ArrayList
<
Chunk
>
old
)
{
private
void
copyLive
(
Chunk
chunk
,
ArrayList
<
Chunk
>
old
)
{
long
start
=
chunk
.
block
*
BLOCK_SIZE
;
long
start
=
chunk
.
block
*
BLOCK_SIZE
;
int
chunkLength
=
chunk
.
blocks
*
BLOCK_SIZE
;
int
length
=
chunk
.
len
*
BLOCK_SIZE
;
ByteBuffer
buff
=
fileStore
.
readFully
(
start
,
chunkL
ength
);
ByteBuffer
buff
=
fileStore
.
readFully
(
start
,
l
ength
);
Chunk
.
fromHeader
(
buff
,
start
);
Chunk
.
fromHeader
(
buff
,
start
);
int
pagesRemaining
=
chunk
.
pageCount
;
int
pagesRemaining
=
chunk
.
pageCount
;
markMetaChanged
();
markMetaChanged
();
...
@@ -1423,7 +1435,7 @@ public class MVStore {
...
@@ -1423,7 +1435,7 @@ public class MVStore {
}
}
buff
.
position
(
offset
);
buff
.
position
(
offset
);
Page
page
=
new
Page
(
map
,
0
);
Page
page
=
new
Page
(
map
,
0
);
page
.
read
(
buff
,
chunk
.
id
,
buff
.
position
(),
chunkL
ength
);
page
.
read
(
buff
,
chunk
.
id
,
buff
.
position
(),
l
ength
);
for
(
int
i
=
0
;
i
<
page
.
getKeyCount
();
i
++)
{
for
(
int
i
=
0
;
i
<
page
.
getKeyCount
();
i
++)
{
Object
k
=
page
.
getKey
(
i
);
Object
k
=
page
.
getKey
(
i
);
Page
p
=
map
.
getPage
(
k
);
Page
p
=
map
.
getPage
(
k
);
...
@@ -1535,7 +1547,7 @@ public class MVStore {
...
@@ -1535,7 +1547,7 @@ public class MVStore {
f
=
new
Chunk
(
chunkId
);
f
=
new
Chunk
(
chunkId
);
freed
.
put
(
chunkId
,
f
);
freed
.
put
(
chunkId
,
f
);
}
}
f
.
maxLen
gth
Live
-=
maxLengthLive
;
f
.
maxLenLive
-=
maxLengthLive
;
f
.
pageCountLive
-=
pageCount
;
f
.
pageCountLive
-=
pageCount
;
}
}
}
}
...
@@ -1793,18 +1805,21 @@ public class MVStore {
...
@@ -1793,18 +1805,21 @@ public class MVStore {
}
}
chunks
.
remove
(
lastChunkId
);
chunks
.
remove
(
lastChunkId
);
long
start
=
last
.
block
*
BLOCK_SIZE
;
long
start
=
last
.
block
*
BLOCK_SIZE
;
int
len
=
last
.
blocks
*
BLOCK_SIZE
;
int
length
=
last
.
len
*
BLOCK_SIZE
;
fileStore
.
free
(
start
,
len
);
fileStore
.
free
(
start
,
length
);
// need to overwrite the last page,
// need to overwrite the chunk,
// so that old end headers is not used
// so it can not be used
long
pos
=
start
+
len
-
STORE_HEADER_LENGTH
;
WriteBuffer
buff
=
getWriteBuffer
();
ByteBuffer
header
=
ByteBuffer
.
allocate
(
STORE_HEADER_LENGTH
);
buff
.
limit
(
length
);
write
(
pos
,
header
);
// buff.clear() does not set the data
Arrays
.
fill
(
buff
.
getBuffer
().
array
(),
(
byte
)
0
);
write
(
start
,
buff
.
getBuffer
());
releaseWriteBuffer
(
buff
);
lastChunkId
--;
lastChunkId
--;
}
}
lastChunkBlock
=
last
.
block
;
lastChunkBlock
=
last
.
block
;
write
Stor
eHeader
();
write
Fil
eHeader
();
read
Stor
eHeader
();
read
Fil
eHeader
();
readMeta
();
readMeta
();
}
}
for
(
MVMap
<?,
?>
m
:
New
.
arrayList
(
maps
.
values
()))
{
for
(
MVMap
<?,
?>
m
:
New
.
arrayList
(
maps
.
values
()))
{
...
@@ -1814,9 +1829,7 @@ public class MVStore {
...
@@ -1814,9 +1829,7 @@ public class MVStore {
maps
.
remove
(
id
);
maps
.
remove
(
id
);
}
else
{
}
else
{
if
(
loadFromFile
)
{
if
(
loadFromFile
)
{
String
r
=
meta
.
get
(
"root."
+
Integer
.
toHexString
(
id
));
m
.
setRootPos
(
getRootPos
(
meta
,
id
),
-
1
);
long
root
=
DataUtils
.
parseHexLong
(
r
,
0
);
m
.
setRootPos
(
root
,
-
1
);
}
}
}
}
...
@@ -1829,6 +1842,11 @@ public class MVStore {
...
@@ -1829,6 +1842,11 @@ public class MVStore {
currentVersion
=
version
;
currentVersion
=
version
;
setWriteVersion
(
version
);
setWriteVersion
(
version
);
}
}
private
static
long
getRootPos
(
MVMap
<
String
,
String
>
map
,
int
mapId
)
{
String
root
=
map
.
get
(
"root."
+
Integer
.
toHexString
(
mapId
));
return
root
==
null
?
0
:
Long
.
parseLong
(
root
,
16
);
}
private
void
revertTemp
(
long
storeVersion
)
{
private
void
revertTemp
(
long
storeVersion
)
{
for
(
Iterator
<
Long
>
it
=
freedPageSpace
.
keySet
().
iterator
();
it
.
hasNext
();)
{
for
(
Iterator
<
Long
>
it
=
freedPageSpace
.
keySet
().
iterator
();
it
.
hasNext
();)
{
...
@@ -1869,8 +1887,8 @@ public class MVStore {
...
@@ -1869,8 +1887,8 @@ public class MVStore {
*
*
* @return the store header
* @return the store header
*/
*/
public
Map
<
String
,
String
>
getStoreHeader
()
{
public
Map
<
String
,
Object
>
getStoreHeader
()
{
return
stor
eHeader
;
return
fil
eHeader
;
}
}
private
void
checkOpen
()
{
private
void
checkOpen
()
{
...
...
h2/src/main/org/h2/mvstore/MVStoreTool.java
浏览文件 @
16d95eef
...
@@ -73,13 +73,7 @@ public class MVStoreTool {
...
@@ -73,13 +73,7 @@ public class MVStoreTool {
block
.
rewind
();
block
.
rewind
();
DataUtils
.
readFully
(
file
,
pos
,
block
);
DataUtils
.
readFully
(
file
,
pos
,
block
);
block
.
rewind
();
block
.
rewind
();
if
(
block
.
get
()
!=
'{'
)
{
int
headerType
=
block
.
get
();
block
.
position
(
MVStore
.
BLOCK_SIZE
-
MVStore
.
STORE_HEADER_LENGTH
);
if
(
block
.
get
()
!=
'{'
)
{
continue
;
}
}
byte
headerType
=
block
.
get
();
if
(
headerType
==
'H'
)
{
if
(
headerType
==
'H'
)
{
pw
.
println
(
" store header at "
+
Long
.
toHexString
(
pos
));
pw
.
println
(
" store header at "
+
Long
.
toHexString
(
pos
));
pw
.
println
(
" "
+
new
String
(
block
.
array
(),
"UTF-8"
).
trim
());
pw
.
println
(
" "
+
new
String
(
block
.
array
(),
"UTF-8"
).
trim
());
...
@@ -92,12 +86,12 @@ public class MVStoreTool {
...
@@ -92,12 +86,12 @@ public class MVStoreTool {
}
}
block
.
position
(
0
);
block
.
position
(
0
);
Chunk
c
=
Chunk
.
fromHeader
(
block
,
pos
);
Chunk
c
=
Chunk
.
fromHeader
(
block
,
pos
);
int
chunkLength
=
c
.
blocks
*
MVStore
.
BLOCK_SIZE
;
int
length
=
c
.
len
*
MVStore
.
BLOCK_SIZE
;
pw
.
println
(
" "
+
c
.
toString
());
pw
.
println
(
" "
+
c
.
toString
());
ByteBuffer
chunk
=
ByteBuffer
.
allocate
(
chunkL
ength
);
ByteBuffer
chunk
=
ByteBuffer
.
allocate
(
l
ength
);
DataUtils
.
readFully
(
file
,
pos
,
chunk
);
DataUtils
.
readFully
(
file
,
pos
,
chunk
);
int
p
=
block
.
position
();
int
p
=
block
.
position
();
pos
+=
chunkL
ength
;
pos
+=
l
ength
;
int
remaining
=
c
.
pageCount
;
int
remaining
=
c
.
pageCount
;
while
(
remaining
>
0
)
{
while
(
remaining
>
0
)
{
chunk
.
position
(
p
);
chunk
.
position
(
p
);
...
@@ -153,12 +147,9 @@ public class MVStoreTool {
...
@@ -153,12 +147,9 @@ public class MVStoreTool {
}
}
}
}
}
}
chunk
.
position
(
chunk
.
limit
()
-
MVStore
.
STORE_HEADER_LENGTH
);
chunk
.
position
(
chunk
.
limit
()
-
MVStore
.
CHUNK_FOOTER_LENGTH
);
if
(
chunk
.
get
()
==
'{'
&&
chunk
.
get
()
==
'H'
)
{
pw
.
println
(
" store header"
);
pw
.
println
(
" store header"
);
pw
.
println
(
" "
+
new
String
(
chunk
.
array
(),
chunk
.
position
(),
MVStore
.
CHUNK_FOOTER_LENGTH
,
"UTF-8"
).
trim
());
pw
.
println
(
" "
+
new
String
(
chunk
.
array
(),
chunk
.
position
()
-
2
,
MVStore
.
STORE_HEADER_LENGTH
,
"UTF-8"
).
trim
());
}
}
}
}
catch
(
IOException
e
)
{
}
catch
(
IOException
e
)
{
pw
.
println
(
"ERROR: "
+
e
);
pw
.
println
(
"ERROR: "
+
e
);
...
...
h2/src/main/org/h2/mvstore/Page.java
浏览文件 @
16d95eef
...
@@ -841,7 +841,7 @@ public class Page {
...
@@ -841,7 +841,7 @@ public class Page {
store
.
cachePage
(
pos
,
this
,
getMemory
());
store
.
cachePage
(
pos
,
this
,
getMemory
());
long
max
=
DataUtils
.
getPageMaxLength
(
pos
);
long
max
=
DataUtils
.
getPageMaxLength
(
pos
);
chunk
.
maxLength
+=
max
;
chunk
.
maxLength
+=
max
;
chunk
.
maxLen
gth
Live
+=
max
;
chunk
.
maxLenLive
+=
max
;
chunk
.
pageCount
++;
chunk
.
pageCount
++;
chunk
.
pageCountLive
++;
chunk
.
pageCountLive
++;
}
}
...
...
h2/src/test/org/h2/test/store/SequenceMap.java
浏览文件 @
16d95eef
...
@@ -33,7 +33,7 @@ public class SequenceMap extends MVMap<Long, Long> {
...
@@ -33,7 +33,7 @@ public class SequenceMap extends MVMap<Long, Long> {
}
}
@Override
@Override
public
void
init
(
MVStore
store
,
HashMap
<
String
,
String
>
config
)
{
public
void
init
(
MVStore
store
,
HashMap
<
String
,
Object
>
config
)
{
super
.
init
(
store
,
config
);
super
.
init
(
store
,
config
);
}
}
...
...
h2/src/test/org/h2/test/store/TestDataUtils.java
浏览文件 @
16d95eef
...
@@ -80,7 +80,7 @@ public class TestDataUtils extends TestBase {
...
@@ -80,7 +80,7 @@ public class TestDataUtils extends TestBase {
DataUtils
.
appendMap
(
buff
,
"c"
,
"1,2"
);
DataUtils
.
appendMap
(
buff
,
"c"
,
"1,2"
);
DataUtils
.
appendMap
(
buff
,
"d"
,
"\"test\""
);
DataUtils
.
appendMap
(
buff
,
"d"
,
"\"test\""
);
DataUtils
.
appendMap
(
buff
,
"e"
,
"}"
);
DataUtils
.
appendMap
(
buff
,
"e"
,
"}"
);
assertEquals
(
":,a:1,b:\",\",c:\"1,2\",d:\"\\\"test\\\"\",e:
\"}\"
"
,
buff
.
toString
());
assertEquals
(
":,a:1,b:\",\",c:\"1,2\",d:\"\\\"test\\\"\",e:
}
"
,
buff
.
toString
());
HashMap
<
String
,
String
>
m
=
DataUtils
.
parseMap
(
buff
.
toString
());
HashMap
<
String
,
String
>
m
=
DataUtils
.
parseMap
(
buff
.
toString
());
assertEquals
(
6
,
m
.
size
());
assertEquals
(
6
,
m
.
size
());
...
...
h2/src/test/org/h2/test/store/TestMVStore.java
浏览文件 @
16d95eef
...
@@ -212,14 +212,21 @@ public class TestMVStore extends TestBase {
...
@@ -212,14 +212,21 @@ public class TestMVStore extends TestBase {
encryptionKey
(
"007"
.
toCharArray
()).
encryptionKey
(
"007"
.
toCharArray
()).
fileName
(
fileName
).
fileName
(
fileName
).
open
();
open
();
Map
<
String
,
String
>
header
=
s
.
getStoreHeader
();
Map
<
String
,
Object
>
header
=
s
.
getStoreHeader
();
assertEquals
(
"1"
,
header
.
get
(
"format"
));
assertEquals
(
"1"
,
header
.
get
(
"format"
)
.
toString
()
);
header
.
put
(
"formatRead"
,
"1"
);
header
.
put
(
"formatRead"
,
"1"
);
header
.
put
(
"format"
,
"2"
);
header
.
put
(
"format"
,
"2"
);
MVMap
<
Integer
,
String
>
m
=
s
.
openMap
(
"data"
);
MVMap
<
Integer
,
String
>
m
=
s
.
openMap
(
"data"
);
// this is to ensure the file header is overwritten
for
(
int
i
=
0
;
i
<
10
;
i
++)
{
m
.
put
(
0
,
"Hello World "
+
i
);
s
.
commit
();
if
(
i
>
5
)
{
s
.
setRetentionTime
(
0
);
}
}
m
.
put
(
0
,
"Hello World"
);
m
.
put
(
0
,
"Hello World"
);
s
.
close
();
s
.
close
();
try
{
try
{
s
=
new
MVStore
.
Builder
().
s
=
new
MVStore
.
Builder
().
encryptionKey
(
"007"
.
toCharArray
()).
encryptionKey
(
"007"
.
toCharArray
()).
...
@@ -523,10 +530,17 @@ public class TestMVStore extends TestBase {
...
@@ -523,10 +530,17 @@ public class TestMVStore extends TestBase {
s
=
openStore
(
fileName
);
s
=
openStore
(
fileName
);
m
=
s
.
openMap
(
"test"
);
m
=
s
.
openMap
(
"test"
);
m
.
put
(
1
,
1
);
m
.
put
(
1
,
1
);
Map
<
String
,
String
>
header
=
s
.
getStoreHeader
();
Map
<
String
,
Object
>
header
=
s
.
getStoreHeader
();
int
format
=
Integer
.
parseInt
(
header
.
get
(
"format"
));
int
format
=
Integer
.
parseInt
(
header
.
get
(
"format"
)
.
toString
()
);
assertEquals
(
1
,
format
);
assertEquals
(
1
,
format
);
header
.
put
(
"format"
,
Integer
.
toString
(
format
+
1
));
header
.
put
(
"format"
,
Integer
.
toString
(
format
+
1
));
// ensure the file header is overwritten
s
.
commit
();
m
.
put
(
1
,
10
);
s
.
commit
();
m
.
put
(
1
,
20
);
s
.
setRetentionTime
(
0
);
s
.
commit
();
s
.
close
();
s
.
close
();
try
{
try
{
openStore
(
fileName
).
close
();
openStore
(
fileName
).
close
();
...
@@ -649,16 +663,23 @@ public class TestMVStore extends TestBase {
...
@@ -649,16 +663,23 @@ public class TestMVStore extends TestBase {
String
fileName
=
getBaseDir
()
+
"/testFileHeader.h3"
;
String
fileName
=
getBaseDir
()
+
"/testFileHeader.h3"
;
MVStore
s
=
openStore
(
fileName
);
MVStore
s
=
openStore
(
fileName
);
long
time
=
System
.
currentTimeMillis
();
long
time
=
System
.
currentTimeMillis
();
assertEquals
(
"1"
,
s
.
getStoreHeader
().
get
(
"format"
)
);
Map
<
String
,
Object
>
m
=
s
.
getStoreHeader
(
);
long
creationTime
=
Long
.
parseLong
(
assertEquals
(
"1"
,
m
.
get
(
"format"
).
toString
());
s
.
getStoreHeader
().
get
(
"created"
),
16
);
long
creationTime
=
(
Long
)
m
.
get
(
"created"
);
assertTrue
(
Math
.
abs
(
time
-
creationTime
)
<
100
);
assertTrue
(
Math
.
abs
(
time
-
creationTime
)
<
100
);
s
.
getStoreHeader
()
.
put
(
"test"
,
"123"
);
m
.
put
(
"test"
,
"123"
);
MVMap
<
Integer
,
Integer
>
map
=
s
.
openMap
(
"test"
);
MVMap
<
Integer
,
Integer
>
map
=
s
.
openMap
(
"test"
);
map
.
put
(
10
,
100
);
map
.
put
(
10
,
100
);
// ensure the file header is overwritten
s
.
commit
();
map
.
put
(
10
,
110
);
s
.
commit
();
map
.
put
(
1
,
120
);
s
.
setRetentionTime
(
0
);
s
.
commit
();
s
.
close
();
s
.
close
();
s
=
openStore
(
fileName
);
s
=
openStore
(
fileName
);
assertEquals
(
"123"
,
s
.
getStoreHeader
().
get
(
"test"
));
assertEquals
(
"123"
,
s
.
getStoreHeader
().
get
(
"test"
)
.
toString
()
);
s
.
close
();
s
.
close
();
}
}
...
...
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论