Unverified 提交 f33de19b authored 作者: Andrei Tokar's avatar Andrei Tokar 提交者: GitHub

Merge pull request #1560 from h2database/defrag_oom

Defrag OOM
...@@ -1053,7 +1053,10 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1053,7 +1053,10 @@ public class MVMap<K, V> extends AbstractMap<K, V>
* @return version * @return version
*/ */
public final long getVersion() { public final long getVersion() {
RootReference rootReference = getRoot(); return getVersion(getRoot());
}
private long getVersion(RootReference rootReference) {
RootReference previous = rootReference.previous; RootReference previous = rootReference.previous;
return previous == null || previous.root != rootReference.root || return previous == null || previous.root != rootReference.root ||
previous.appendCounter != rootReference.appendCounter ? previous.appendCounter != rootReference.appendCounter ?
...@@ -1061,7 +1064,10 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1061,7 +1064,10 @@ public class MVMap<K, V> extends AbstractMap<K, V>
} }
final boolean hasChangesSince(long version) { final boolean hasChangesSince(long version) {
return getVersion() > version; RootReference rootReference = getRoot();
Page root = rootReference.root;
return !root.isSaved() && root.getTotalCount() > 0 ||
getVersion(rootReference) > version;
} }
public boolean isSingleWriter() { public boolean isSingleWriter() {
...@@ -1153,28 +1159,33 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1153,28 +1159,33 @@ public class MVMap<K, V> extends AbstractMap<K, V>
MVStore.TxCounter txCounter = store.registerVersionUsage(); MVStore.TxCounter txCounter = store.registerVersionUsage();
try { try {
beforeWrite(); beforeWrite();
setRoot(copy(sourceMap.getRootPage())); copy(sourceMap.getRootPage(), null, 0);
} finally { } finally {
store.deregisterVersionUsage(txCounter); store.deregisterVersionUsage(txCounter);
} }
} }
private Page copy(Page source) { private Page copy(Page source, Page parent, int index) {
Page target = source.copy(this); Page target = source.copy(this);
store.registerUnsavedPage(target.getMemory()); if (parent == null) {
setRoot(target);
} else {
parent.setChild(index, target);
}
if (!source.isLeaf()) { if (!source.isLeaf()) {
for (int i = 0; i < getChildPageCount(target); i++) { for (int i = 0; i < getChildPageCount(target); i++) {
if (source.getChildPagePos(i) != 0) { if (source.getChildPagePos(i) != 0) {
// position 0 means no child // position 0 means no child
// (for example the last entry of an r-tree node) // (for example the last entry of an r-tree node)
// (the MVMap is also used for r-trees for compacting) // (the MVMap is also used for r-trees for compacting)
Page child = copy(source.getChildPage(i)); copy(source.getChildPage(i), target, i);
target.setChild(i, child);
} }
} }
target.setComplete();
setRoot(target); }
beforeWrite(); store.registerUnsavedPage(target.getMemory());
if (store.isSaveNeeded()) {
store.commit();
} }
return target; return target;
} }
...@@ -1186,7 +1197,6 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1186,7 +1197,6 @@ public class MVMap<K, V> extends AbstractMap<K, V>
* @return potentially updated RootReference * @return potentially updated RootReference
*/ */
private RootReference flushAppendBuffer(RootReference rootReference) { private RootReference flushAppendBuffer(RootReference rootReference) {
beforeWrite();
int attempt = 0; int attempt = 0;
int keyCount; int keyCount;
while((keyCount = rootReference.getAppendCounter()) > 0) { while((keyCount = rootReference.getAppendCounter()) > 0) {
...@@ -1276,6 +1286,7 @@ public class MVMap<K, V> extends AbstractMap<K, V> ...@@ -1276,6 +1286,7 @@ public class MVMap<K, V> extends AbstractMap<K, V>
RootReference rootReference = getRootInternal(); RootReference rootReference = getRootInternal();
int appendCounter = rootReference.getAppendCounter(); int appendCounter = rootReference.getAppendCounter();
if (appendCounter >= keysPerPage) { if (appendCounter >= keysPerPage) {
beforeWrite();
rootReference = flushAppendBuffer(rootReference); rootReference = flushAppendBuffer(rootReference);
appendCounter = rootReference.getAppendCounter(); appendCounter = rootReference.getAppendCounter();
assert appendCounter < keysPerPage; assert appendCounter < keysPerPage;
......
...@@ -130,7 +130,7 @@ MVStore: ...@@ -130,7 +130,7 @@ MVStore:
/** /**
* A persistent storage for maps. * A persistent storage for maps.
*/ */
public class MVStore { public class MVStore implements AutoCloseable {
/** /**
* The block size (physical sector size) of the disk. The store header is * The block size (physical sector size) of the disk. The store header is
...@@ -1326,10 +1326,8 @@ public class MVStore { ...@@ -1326,10 +1326,8 @@ public class MVStore {
shrinkFileIfPossible(1); shrinkFileIfPossible(1);
} }
for (Page p : changed) { for (Page p : changed) {
if (p.getTotalCount() > 0) {
p.writeEnd(); p.writeEnd();
} }
}
metaRoot.writeEnd(); metaRoot.writeEnd();
// some pages might have been changed in the meantime (in the newest // some pages might have been changed in the meantime (in the newest
...@@ -1346,7 +1344,7 @@ public class MVStore { ...@@ -1346,7 +1344,7 @@ public class MVStore {
*/ */
private void freeUnusedIfNeeded(long time) { private void freeUnusedIfNeeded(long time) {
int freeDelay = retentionTime / 5; int freeDelay = retentionTime / 5;
if (time >= lastFreeUnusedChunks + freeDelay) { if (time - lastFreeUnusedChunks >= freeDelay) {
// set early in case it fails (out of memory or so) // set early in case it fails (out of memory or so)
lastFreeUnusedChunks = time; lastFreeUnusedChunks = time;
freeUnusedChunks(true); freeUnusedChunks(true);
...@@ -1391,6 +1389,7 @@ public class MVStore { ...@@ -1391,6 +1389,7 @@ public class MVStore {
* @param fast if true, simplified version is used, which assumes that recent chunks * @param fast if true, simplified version is used, which assumes that recent chunks
* are still in-use and do not scan recent versions of the store. * are still in-use and do not scan recent versions of the store.
* Also is this case only oldest available version of the store is scanned. * Also is this case only oldest available version of the store is scanned.
* @return set of chunk ids in-use, or null if all chunks should be considered in-use
*/ */
private Set<Integer> collectReferencedChunks(boolean fast) { private Set<Integer> collectReferencedChunks(boolean fast) {
assert lastChunk != null; assert lastChunk != null;
...@@ -1428,6 +1427,15 @@ public class MVStore { ...@@ -1428,6 +1427,15 @@ public class MVStore {
} }
} }
/**
* Scans all map of a particular store version and marks visited chunks as in-use.
* @param rootReference of the meta map of the version
* @param collector to report visited chunks to
* @param executorService to use for parallel processing
* @param executingThreadCounter counter for threads already in use
* @param inspectedRoots set of page positions for map's roots already inspected
* or null if not to be used
*/
private void inspectVersion(MVMap.RootReference rootReference, ChunkIdsCollector collector, private void inspectVersion(MVMap.RootReference rootReference, ChunkIdsCollector collector,
ThreadPoolExecutor executorService, ThreadPoolExecutor executorService,
AtomicInteger executingThreadCounter, AtomicInteger executingThreadCounter,
...@@ -1443,12 +1451,11 @@ public class MVStore { ...@@ -1443,12 +1451,11 @@ public class MVStore {
} }
for (Cursor<String, String> c = new Cursor<>(rootPage, "root."); c.hasNext(); ) { for (Cursor<String, String> c = new Cursor<>(rootPage, "root."); c.hasNext(); ) {
String key = c.next(); String key = c.next();
assert key != null;
if (!key.startsWith("root.")) { if (!key.startsWith("root.")) {
break; break;
} }
pos = DataUtils.parseHexLong(c.getValue()); pos = DataUtils.parseHexLong(c.getValue());
assert DataUtils.isPageSaved(pos); if (DataUtils.isPageSaved(pos)) {
if (inspectedRoots == null || inspectedRoots.add(pos)) { if (inspectedRoots == null || inspectedRoots.add(pos)) {
// to allow for something like "root.tmp.123" to be processed // to allow for something like "root.tmp.123" to be processed
int mapId = DataUtils.parseHexInt(key.substring(key.lastIndexOf('.') + 1)); int mapId = DataUtils.parseHexInt(key.substring(key.lastIndexOf('.') + 1));
...@@ -1457,6 +1464,7 @@ public class MVStore { ...@@ -1457,6 +1464,7 @@ public class MVStore {
} }
} }
} }
}
final class ChunkIdsCollector { final class ChunkIdsCollector {
...@@ -1641,12 +1649,12 @@ public class MVStore { ...@@ -1641,12 +1649,12 @@ public class MVStore {
} }
freedPageSpace.clear(); freedPageSpace.clear();
} }
for (Chunk c : modified) {
meta.put(Chunk.getMetaKey(c.id), c.asString());
}
if (modified.isEmpty()) { if (modified.isEmpty()) {
break; break;
} }
for (Chunk c : modified) {
meta.put(Chunk.getMetaKey(c.id), c.asString());
}
markMetaChanged(); markMetaChanged();
} }
} }
......
...@@ -481,30 +481,22 @@ public class MVStoreTool { ...@@ -481,30 +481,22 @@ public class MVStoreTool {
* @param compress whether to compress the data * @param compress whether to compress the data
*/ */
public static void compact(String sourceFileName, String targetFileName, boolean compress) { public static void compact(String sourceFileName, String targetFileName, boolean compress) {
MVStore source = new MVStore.Builder(). try (MVStore source = new MVStore.Builder().
fileName(sourceFileName). fileName(sourceFileName).readOnly().open()) {
readOnly().
open();
// Bugfix - Add double "try-finally" statements to close source and target stores for // Bugfix - Add double "try-finally" statements to close source and target stores for
//releasing lock and file resources in these stores even if OOM occurs. //releasing lock and file resources in these stores even if OOM occurs.
// Fix issues such as "Cannot delete file "/h2/data/test.mv.db.tempFile" [90025-197]" // Fix issues such as "Cannot delete file "/h2/data/test.mv.db.tempFile" [90025-197]"
//when client connects to this server and reopens this store database in this process. //when client connects to this server and reopens this store database in this process.
// @since 2018-09-13 little-pan // @since 2018-09-13 little-pan
try{
FileUtils.delete(targetFileName); FileUtils.delete(targetFileName);
MVStore.Builder b = new MVStore.Builder(). MVStore.Builder b = new MVStore.Builder().
fileName(targetFileName); fileName(targetFileName);
if (compress) { if (compress) {
b.compress(); b.compress();
} }
MVStore target = b.open(); try (MVStore target = b.open()) {
try{
compact(source, target); compact(source, target);
}finally{
target.close();
} }
}finally{
source.close();
} }
} }
...@@ -515,6 +507,10 @@ public class MVStoreTool { ...@@ -515,6 +507,10 @@ public class MVStoreTool {
* @param target the target store * @param target the target store
*/ */
public static void compact(MVStore source, MVStore target) { public static void compact(MVStore source, MVStore target) {
int autoCommitDelay = target.getAutoCommitDelay();
int retentionTime = target.getRetentionTime();
target.setAutoCommitDelay(0);
target.setRetentionTime(Integer.MAX_VALUE); // disable unused chunks collection
MVMap<String, String> sourceMeta = source.getMetaMap(); MVMap<String, String> sourceMeta = source.getMetaMap();
MVMap<String, String> targetMeta = target.getMetaMap(); MVMap<String, String> targetMeta = target.getMetaMap();
for (Entry<String, String> m : sourceMeta.entrySet()) { for (Entry<String, String> m : sourceMeta.entrySet()) {
...@@ -540,6 +536,8 @@ public class MVStoreTool { ...@@ -540,6 +536,8 @@ public class MVStoreTool {
MVMap<Object, Object> targetMap = target.openMap(mapName, mp); MVMap<Object, Object> targetMap = target.openMap(mapName, mp);
targetMap.copyFrom(sourceMap); targetMap.copyFrom(sourceMap);
} }
target.setRetentionTime(retentionTime);
target.setAutoCommitDelay(autoCommitDelay);
} }
/** /**
......
...@@ -813,6 +813,12 @@ public abstract class Page implements Cloneable ...@@ -813,6 +813,12 @@ public abstract class Page implements Cloneable
return mem; return mem;
} }
public boolean isComplete() {
return true;
}
public void setComplete() {}
/** /**
* Remove the page. * Remove the page.
*/ */
...@@ -883,16 +889,15 @@ public abstract class Page implements Cloneable ...@@ -883,16 +889,15 @@ public abstract class Page implements Cloneable
void clearPageReference() { void clearPageReference() {
if (page != null) { if (page != null) {
if (!page.isSaved()) {
throw DataUtils.newIllegalStateException(
DataUtils.ERROR_INTERNAL, "Page not written");
}
page.writeEnd(); page.writeEnd();
assert page.isSaved() || !page.isComplete();
if (page.isSaved()) {
assert pos == page.getPos(); assert pos == page.getPos();
assert count == page.getTotalCount(); assert count == page.getTotalCount() : count + " != " + page.getTotalCount();
page = null; page = null;
} }
} }
}
long getPos() { long getPos() {
return pos; return pos;
...@@ -900,7 +905,7 @@ public abstract class Page implements Cloneable ...@@ -900,7 +905,7 @@ public abstract class Page implements Cloneable
void resetPos() { void resetPos() {
Page p = page; Page p = page;
if (p != null) { if (p != null && p.isSaved()) {
pos = p.getPos(); pos = p.getPos();
assert count == p.getTotalCount(); assert count == p.getTotalCount();
} }
...@@ -910,12 +915,12 @@ public abstract class Page implements Cloneable ...@@ -910,12 +915,12 @@ public abstract class Page implements Cloneable
public String toString() { public String toString() {
return "Cnt:" + count + ", pos:" + DataUtils.getPageChunkId(pos) + return "Cnt:" + count + ", pos:" + DataUtils.getPageChunkId(pos) +
"-" + DataUtils.getPageOffset(pos) + ":" + DataUtils.getPageMaxLength(pos) + "-" + DataUtils.getPageOffset(pos) + ":" + DataUtils.getPageMaxLength(pos) +
(DataUtils.getPageType(pos) == 0 ? " leaf" : " node") + ", " + page; (page == null ? DataUtils.getPageType(pos) == 0 : page.isLeaf() ? " leaf" : " node") + ", " + page;
} }
} }
private static final class NonLeaf extends Page private static class NonLeaf extends Page
{ {
/** /**
* The child page references. * The child page references.
...@@ -950,10 +955,7 @@ public abstract class Page implements Cloneable ...@@ -950,10 +955,7 @@ public abstract class Page implements Cloneable
@Override @Override
public Page copy(MVMap<?, ?> map) { public Page copy(MVMap<?, ?> map) {
// replace child pages with empty pages return new IncompleteNonLeaf(map, this);
PageReference[] children = new PageReference[this.children.length];
Arrays.fill(children, PageReference.EMPTY);
return new NonLeaf(map, this, children, 0);
} }
@Override @Override
...@@ -1012,7 +1014,7 @@ public abstract class Page implements Cloneable ...@@ -1012,7 +1014,7 @@ public abstract class Page implements Cloneable
@Override @Override
public long getTotalCount() { public long getTotalCount() {
assert totalCount == calculateTotalCount() : assert !isComplete() || totalCount == calculateTotalCount() :
"Total count: " + totalCount + " != " + calculateTotalCount(); "Total count: " + totalCount + " != " + calculateTotalCount();
return totalCount; return totalCount;
} }
...@@ -1026,6 +1028,10 @@ public abstract class Page implements Cloneable ...@@ -1026,6 +1028,10 @@ public abstract class Page implements Cloneable
return check; return check;
} }
protected void recalculateTotalCount() {
totalCount = calculateTotalCount();
}
@Override @Override
long getCounts(int index) { long getCounts(int index) {
return children[index].count; return children[index].count;
...@@ -1150,6 +1156,15 @@ public abstract class Page implements Cloneable ...@@ -1150,6 +1156,15 @@ public abstract class Page implements Cloneable
void writeUnsavedRecursive(Chunk chunk, WriteBuffer buff) { void writeUnsavedRecursive(Chunk chunk, WriteBuffer buff) {
if (!isSaved()) { if (!isSaved()) {
int patch = write(chunk, buff); int patch = write(chunk, buff);
writeChildrenRecursive(chunk, buff);
int old = buff.position();
buff.position(patch);
writeChildren(buff, false);
buff.position(old);
}
}
void writeChildrenRecursive(Chunk chunk, WriteBuffer buff) {
int len = getRawChildPageCount(); int len = getRawChildPageCount();
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
PageReference ref = children[i]; PageReference ref = children[i];
...@@ -1159,11 +1174,6 @@ public abstract class Page implements Cloneable ...@@ -1159,11 +1174,6 @@ public abstract class Page implements Cloneable
ref.resetPos(); ref.resetPos();
} }
} }
int old = buff.position();
buff.position(patch);
writeChildren(buff, false);
buff.position(old);
}
} }
@Override @Override
...@@ -1202,6 +1212,48 @@ public abstract class Page implements Cloneable ...@@ -1202,6 +1212,48 @@ public abstract class Page implements Cloneable
} }
private static class IncompleteNonLeaf extends NonLeaf {
private boolean complete;
IncompleteNonLeaf(MVMap<?, ?> map, NonLeaf source) {
super(map, source, constructEmptyPageRefs(source.getRawChildPageCount()), source.getTotalCount());
}
private static PageReference[] constructEmptyPageRefs(int size) {
// replace child pages with empty pages
PageReference[] children = new PageReference[size];
Arrays.fill(children, PageReference.EMPTY);
return children;
}
@Override
void writeUnsavedRecursive(Chunk chunk, WriteBuffer buff) {
if (complete) {
super.writeUnsavedRecursive(chunk, buff);
} else if (!isSaved()) {
writeChildrenRecursive(chunk, buff);
}
}
public boolean isComplete() {
return complete;
}
public void setComplete() {
recalculateTotalCount();
complete = true;
}
@Override
public void dump(StringBuilder buff) {
super.dump(buff);
buff.append(", complete:").append(complete);
}
}
private static class Leaf extends Page private static class Leaf extends Page
{ {
/** /**
......
...@@ -133,6 +133,7 @@ import org.h2.test.store.TestCacheLIRS; ...@@ -133,6 +133,7 @@ import org.h2.test.store.TestCacheLIRS;
import org.h2.test.store.TestCacheLongKeyLIRS; import org.h2.test.store.TestCacheLongKeyLIRS;
import org.h2.test.store.TestConcurrent; import org.h2.test.store.TestConcurrent;
import org.h2.test.store.TestDataUtils; import org.h2.test.store.TestDataUtils;
import org.h2.test.store.TestDefrag;
import org.h2.test.store.TestFreeSpace; import org.h2.test.store.TestFreeSpace;
import org.h2.test.store.TestKillProcessWhileWriting; import org.h2.test.store.TestKillProcessWhileWriting;
import org.h2.test.store.TestMVRTree; import org.h2.test.store.TestMVRTree;
...@@ -922,6 +923,7 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1` ...@@ -922,6 +923,7 @@ kill -9 `jps -l | grep "org.h2.test." | cut -d " " -f 1`
addTest(new TestFileLockSerialized()); addTest(new TestFileLockSerialized());
addTest(new TestFileLockProcess()); addTest(new TestFileLockProcess());
addTest(new TestFileSystem()); addTest(new TestFileSystem());
addTest(new TestDefrag());
addTest(new TestTools()); addTest(new TestTools());
addTest(new TestSampleApps()); addTest(new TestSampleApps());
......
/*
* Copyright 2004-2018 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.test.store;
import static org.h2.engine.Constants.SUFFIX_MV_FILE;
import org.h2.test.TestBase;
import org.h2.test.TestDb;
import java.io.File;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
/**
* Test off-line compaction procedure used by SHUTDOWN DEFRAG command
*
* @author <a href='mailto:andrei.tokar@gmail.com'>Andrei Tokar</a>
*/
public class TestDefrag extends TestDb
{
/**
* Run just this test.
*
* @param a ignored
*/
public static void main(String... a) throws Exception {
TestBase.createCaller().init().test();
}
@Override
public boolean isEnabled() {
return config.mvStore && !config.memory && config.big && !config.travis;
}
@Override
public void test() throws Exception {
String dbName = getTestName();
deleteDb(dbName);
File dbFile = new File(getBaseDir(), dbName + SUFFIX_MV_FILE);
try (Connection c = getConnection(dbName)) {
try (Statement st = c.createStatement()) {
st.execute("CREATE TABLE IF NOT EXISTS test (id INT PRIMARY KEY, txt varchar)" +
" AS SELECT x, x || SPACE(200) FROM SYSTEM_RANGE(1,10000000)");
}
long origSize = dbFile.length();
assertTrue(origSize > 4_000_000_000L);
try (Statement st = c.createStatement()) {
st.execute("shutdown defrag");
}
long compactedSize = dbFile.length();
assertTrue(compactedSize < 400_000_000);
}
try (Connection c = getConnection(dbName + ";LAZY_QUERY_EXECUTION=1")) {
try (Statement st = c.createStatement()) {
ResultSet rs = st.executeQuery("SELECT * FROM test");
int count = 0;
while (rs.next()) {
++count;
assertEquals(count, rs.getInt(1));
assertTrue(rs.getString(2).startsWith(count + " "));
}
assertEquals(10_000_000, count);
}
}
deleteDb(dbName);
}
}
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论