提交 c5d33cf2 authored 作者: Thomas Mueller's avatar Thomas Mueller

Formatting / javadocs

上级 a6c34373
......@@ -168,9 +168,9 @@ public class MVStore {
private CacheLongKeyLIRS<Page> cache;
/**
* The page chunk references cache. The default size is 4 MB, and the average size is 2 KB.
* It is split in 16 segments. The stack move distance is 2% of the expected
* number of entries.
* The page chunk references cache. The default size is 4 MB, and the
* average size is 2 KB. It is split in 16 segments. The stack move distance
* is 2% of the expected number of entries.
*/
private CacheLongKeyLIRS<PageChildren> cacheChunkRef;
......@@ -1240,7 +1240,7 @@ public class MVStore {
return count;
}
PageChildren readPageChunkReferences(int mapId, long pos, int parentChunk) {
private PageChildren readPageChunkReferences(int mapId, long pos, int parentChunk) {
if (DataUtils.getPageType(pos) == DataUtils.PAGE_TYPE_LEAF) {
return null;
}
......
......@@ -86,6 +86,7 @@ public class MVStoreTool {
*
* @param fileName the name of the file
* @param writer the print writer
* @param details print the page details
*/
public static void dump(String fileName, Writer writer, boolean details) {
PrintWriter pw = new PrintWriter(writer, true);
......
......@@ -999,14 +999,14 @@ public class Page {
}
/**
* Read the page from the buffer.
* Read an inner node page from the buffer, but ignore the keys and
* values.
*
* @param pos the position
* @param buff the buffer
* @param chunkId the chunk id
* @param fileStore the file store
* @param filePos the position in the file
* @param mapId the map id
* @param offset the offset within the chunk
* @param maxLength the maximum length
* @param pos the position
* @return the page children object
*/
static PageChildren read(FileStore fileStore, long filePos, int mapId, long pos) {
ByteBuffer buff;
......@@ -1067,10 +1067,19 @@ public class Page {
return new PageChildren(pos, children);
}
/**
* Only keep one reference to the same chunk. Only leaf references are
* removed (references to inner nodes are not removed, as they could
* indirectly point to other chunks).
*/
void removeDuplicateChunkReferences() {
HashSet<Integer> chunks = New.hashSet();
// we don't need references to leaves in the same chunk
chunks.add(DataUtils.getPageChunkId(pos));
// possible space optimization:
// we could remove more children, for example
// we could remove all leaf references to the same chunk
// if there is also a inner node reference to that chunk
for (int i = 0; i < children.length; i++) {
long p = children[i];
if (DataUtils.getPageType(p) == DataUtils.PAGE_TYPE_NODE) {
......
......@@ -52,7 +52,8 @@ public class MVTable extends TableBase {
private volatile Session lockExclusiveSession;
// using a ConcurrentHashMap as a set
private final ConcurrentHashMap<Session, Session> lockSharedSessions = new ConcurrentHashMap<Session, Session>();
private final ConcurrentHashMap<Session, Session> lockSharedSessions =
new ConcurrentHashMap<Session, Session>();
/**
* The queue of sessions waiting to lock the table. It is a FIFO queue to
......
......@@ -766,6 +766,5 @@ linearly patching perfect hole sip enwiki flooding uniformly recursions happenin
permanently nucleus forbidden student trusted poodle agentlib
jech ladislav cognitect sergey thompson evdokimov arykov mfulton
dimitrijs fedotovs kingdom manley xso latvia ontwikkeling reeve
extendable republic uniquely
datasources accidentally recursing respecting
\ No newline at end of file
extendable republic uniquely datasources accidentally recursing respecting
young sweep
\ No newline at end of file
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论