提交 5841c4df authored 作者: Noel Grandin's avatar Noel Grandin 提交者: GitHub

Merge pull request #575 from andreitokar/full_text_mt

Support for full text search in multithreaded mode
......@@ -1320,13 +1320,6 @@ SELECT * FROM FTL_SEARCH_DATA('John', 0, 0);
SELECT * FROM FTL_SEARCH_DATA('LAST_NAME:John', 0, 0);
CALL FTL_DROP_ALL();
</pre>
<p>
The Lucene fulltext search implementation is not synchronized internally.
If you update the database and query the fulltext search concurrently
(directly using the Java API of H2 or Lucene itself), you need to ensure
operations are properly synchronized. If this is not the case, you may get
exceptions such as <code>org.apache.lucene.store.AlreadyClosedException: this IndexReader is closed</code>.
</p>
<h2 id="user_defined_variables">User-Defined Variables</h2>
<p>
......
......@@ -18,11 +18,12 @@ import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.UUID;
import org.h2.api.Trigger;
import org.h2.command.Parser;
import org.h2.engine.Session;
......@@ -151,13 +152,12 @@ public class FullText {
}
}
rs = stat.executeQuery("SELECT * FROM " + SCHEMA + ".WORDS");
HashMap<String, Integer> map = setting.getWordList();
while (rs.next()) {
String word = rs.getString("NAME");
int id = rs.getInt("ID");
word = setting.convertWord(word);
if (word != null) {
map.put(word, id);
setting.addWord(word, id);
}
}
setting.setInitialized(true);
......@@ -195,7 +195,7 @@ public class FullText {
init(conn);
removeAllTriggers(conn, TRIGGER_PREFIX);
FullTextSettings setting = FullTextSettings.getInstance(conn);
setting.getWordList().clear();
setting.clearWordList();
Statement stat = conn.createStatement();
stat.execute("TRUNCATE TABLE " + SCHEMA + ".WORDS");
stat.execute("TRUNCATE TABLE " + SCHEMA + ".ROWS");
......@@ -243,9 +243,9 @@ public class FullText {
break;
}
}
prep = conn.prepareStatement("DELETE FROM " + SCHEMA + ".MAP M " +
prep = conn.prepareStatement("DELETE FROM " + SCHEMA + ".MAP " +
"WHERE NOT EXISTS (SELECT * FROM " + SCHEMA +
".ROWS R WHERE R.ID=M.ROWID) AND ROWID<10000");
".ROWS R WHERE R.ID=ROWID) AND ROWID<10000");
while (true) {
int deleted = prep.executeUpdate();
if (deleted == 0) {
......@@ -266,8 +266,8 @@ public class FullText {
removeAllTriggers(conn, TRIGGER_PREFIX);
FullTextSettings setting = FullTextSettings.getInstance(conn);
setting.removeAllIndexes();
setting.getIgnoreList().clear();
setting.getWordList().clear();
setting.clearInored();
setting.clearWordList();
}
/**
......@@ -605,17 +605,16 @@ public class FullText {
if (!setting.isInitialized()) {
init(conn);
}
HashSet<String> words = New.hashSet();
Set<String> words = New.hashSet();
addWords(setting, words, text);
HashSet<Integer> rIds = null, lastRowIds = null;
HashMap<String, Integer> allWords = setting.getWordList();
Set<Integer> rIds = null, lastRowIds;
PreparedStatement prepSelectMapByWordId = setting.prepare(conn,
SELECT_MAP_BY_WORD_ID);
for (String word : words) {
lastRowIds = rIds;
rIds = New.hashSet();
Integer wId = allWords.get(word);
Integer wId = setting.getWordId(word);
if (wId == null) {
continue;
}
......@@ -698,7 +697,7 @@ public class FullText {
* @param reader the reader
*/
protected static void addWords(FullTextSettings setting,
HashSet<String> set, Reader reader) {
Set<String> set, Reader reader) {
StreamTokenizer tokenizer = new StreamTokenizer(reader);
tokenizer.resetSyntax();
tokenizer.wordChars(' ' + 1, 255);
......@@ -732,7 +731,7 @@ public class FullText {
* @param text the text
*/
protected static void addWords(FullTextSettings setting,
HashSet<String> set, String text) {
Set<String> set, String text) {
String whitespaceChars = setting.getWhitespaceChars();
StringTokenizer tokenizer = new StringTokenizer(text, whitespaceChars);
while (tokenizer.hasMoreTokens()) {
......@@ -751,23 +750,30 @@ public class FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void createTrigger(Connection conn, String schema,
private static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
createOrDropTrigger(conn, schema, table, true);
}
private static void createOrDropTrigger(Connection conn,
String schema, String table, boolean create) throws SQLException {
Statement stat = conn.createStatement();
try (Statement stat = conn.createStatement()) {
String trigger = StringUtils.quoteIdentifier(schema) + "."
+ StringUtils.quoteIdentifier(TRIGGER_PREFIX + table);
stat.execute("DROP TRIGGER IF EXISTS " + trigger);
if (create) {
StringBuilder buff = new StringBuilder("CREATE TRIGGER IF NOT EXISTS ");
// needs to be called on rollback as well, because we use the init
// connection do to changes in the index (not the user connection)
boolean multiThread = FullTextTrigger.isMultiThread(conn);
StringBuilder buff = new StringBuilder(
"CREATE TRIGGER IF NOT EXISTS ");
// unless multithread, trigger needs to be called on rollback as well,
// because we use the init connection do to changes in the index
// (not the user connection)
buff.append(trigger).
append(" AFTER INSERT, UPDATE, DELETE, ROLLBACK ON ").
append(" AFTER INSERT, UPDATE, DELETE");
if(!multiThread) {
buff.append(", ROLLBACK");
}
buff.append(" ON ").
append(StringUtils.quoteIdentifier(schema)).
append('.').
append(StringUtils.quoteIdentifier(table)).
......@@ -777,6 +783,7 @@ public class FullText {
stat.execute(buff.toString());
}
}
}
/**
* Add the existing data to the index.
......@@ -785,7 +792,7 @@ public class FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void indexExistingRows(Connection conn, String schema,
private static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
FullText.FullTextTrigger existing = new FullText.FullTextTrigger();
existing.init(conn, schema, null, table, false, Trigger.INSERT);
......@@ -823,13 +830,7 @@ public class FullText {
private static void setIgnoreList(FullTextSettings setting,
String commaSeparatedList) {
String[] list = StringUtils.arraySplit(commaSeparatedList, ',', true);
HashSet<String> set = setting.getIgnoreList();
for (String word : list) {
String converted = setting.convertWord(word);
if (converted != null) {
set.add(converted);
}
}
setting.addIgnored(Arrays.asList(list));
}
/**
......@@ -860,14 +861,28 @@ public class FullText {
/**
* Trigger updates the index when a inserting, updating, or deleting a row.
*/
public static class FullTextTrigger implements Trigger {
public static final class FullTextTrigger implements Trigger {
private FullTextSettings setting;
private IndexInfo index;
private int[] columnTypes;
private final PreparedStatement[] prepStatements = new PreparedStatement[SQL.length];
private boolean useOwnConnection;
private static final int INSERT_WORD = 0;
private static final int INSERT_ROW = 1;
private static final int INSERT_MAP = 2;
private static final int DELETE_ROW = 3;
private static final int DELETE_MAP = 4;
private static final int SELECT_ROW = 5;
protected FullTextSettings setting;
protected IndexInfo index;
protected int[] columnTypes;
protected PreparedStatement prepInsertWord, prepInsertRow, prepInsertMap;
protected PreparedStatement prepDeleteRow, prepDeleteMap;
protected PreparedStatement prepSelectRow;
private static final String SQL[] = {
"MERGE INTO " + SCHEMA + ".WORDS(NAME) KEY(NAME) VALUES(?)",
"INSERT INTO " + SCHEMA + ".ROWS(HASH, INDEXID, KEY) VALUES(?, ?, ?)",
"INSERT INTO " + SCHEMA + ".MAP(ROWID, WORDID) VALUES(?, ?)",
"DELETE FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?",
"DELETE FROM " + SCHEMA + ".MAP WHERE ROWID=? AND WORDID=?",
"SELECT ID FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?"
};
/**
* INTERNAL
......@@ -915,7 +930,8 @@ public class FullText {
}
ArrayList<String> indexList = New.arrayList();
PreparedStatement prep = conn.prepareStatement(
"SELECT ID, COLUMNS FROM " + SCHEMA + ".INDEXES WHERE SCHEMA=? AND TABLE=?");
"SELECT ID, COLUMNS FROM " + SCHEMA + ".INDEXES" +
" WHERE SCHEMA=? AND TABLE=?");
prep.setString(1, schemaName);
prep.setString(2, tableName);
rs = prep.executeQuery();
......@@ -923,9 +939,7 @@ public class FullText {
index.id = rs.getInt(1);
String columns = rs.getString(2);
if (columns != null) {
for (String s : StringUtils.arraySplit(columns, ',', true)) {
indexList.add(s);
}
Collections.addAll(indexList, StringUtils.arraySplit(columns, ',', true));
}
}
if (indexList.size() == 0) {
......@@ -936,18 +950,23 @@ public class FullText {
index.indexColumns = new int[indexList.size()];
setColumns(index.indexColumns, indexList, columnList);
setting.addIndexInfo(index);
prepInsertWord = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".WORDS(NAME) VALUES(?)");
prepInsertRow = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".ROWS(HASH, INDEXID, KEY) VALUES(?, ?, ?)");
prepInsertMap = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".MAP(ROWID, WORDID) VALUES(?, ?)");
prepDeleteRow = conn.prepareStatement(
"DELETE FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?");
prepDeleteMap = conn.prepareStatement(
"DELETE FROM " + SCHEMA + ".MAP WHERE ROWID=? AND WORDID=?");
prepSelectRow = conn.prepareStatement(
"SELECT ID FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?");
useOwnConnection = isMultiThread(conn);
if(!useOwnConnection) {
for (int i = 0; i < SQL.length; i++) {
prepStatements[i] = conn.prepareStatement(SQL[i]);
}
}
}
private static boolean isMultiThread(Connection conn)
throws SQLException {
try (Statement stat = conn.createStatement()) {
ResultSet rs = stat.executeQuery(
"SELECT value FROM information_schema.settings" +
" WHERE name = 'MULTI_THREADED'");
return rs.next() && !"0".equals(rs.getString(1));
}
}
/**
......@@ -960,16 +979,16 @@ public class FullText {
if (newRow != null) {
// update
if (hasChanged(oldRow, newRow, index.indexColumns)) {
delete(oldRow);
insert(newRow);
delete(conn, oldRow);
insert(conn, newRow);
}
} else {
// delete
delete(oldRow);
delete(conn, oldRow);
}
} else if (newRow != null) {
// insert
insert(newRow);
insert(conn, newRow);
}
}
......@@ -992,11 +1011,16 @@ public class FullText {
/**
* Add a row to the index.
*
* @param conn to use
* @param row the row
*/
protected void insert(Object[] row) throws SQLException {
protected void insert(Connection conn, Object[] row) throws SQLException {
PreparedStatement prepInsertRow = null;
PreparedStatement prepInsertMap = null;
try {
String key = getKey(row);
int hash = key.hashCode();
prepInsertRow = getStatement(conn, INSERT_ROW);
prepInsertRow.setInt(1, hash);
prepInsertRow.setInt(2, index.id);
prepInsertRow.setString(3, key);
......@@ -1004,30 +1028,46 @@ public class FullText {
ResultSet rs = prepInsertRow.getGeneratedKeys();
rs.next();
int rowId = rs.getInt(1);
prepInsertMap = getStatement(conn, INSERT_MAP);
prepInsertMap.setInt(1, rowId);
int[] wordIds = getWordIds(row);
int[] wordIds = getWordIds(conn, row);
for (int id : wordIds) {
prepInsertMap.setInt(2, id);
prepInsertMap.execute();
}
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepInsertRow);
IOUtils.closeSilently(prepInsertMap);
}
}
}
/**
* Delete a row from the index.
*
* @param conn to use
* @param row the row
*/
protected void delete(Object[] row) throws SQLException {
protected void delete(Connection conn, Object[] row) throws SQLException {
PreparedStatement prepSelectRow = null;
PreparedStatement prepDeleteMap = null;
PreparedStatement prepDeleteRow = null;
try {
String key = getKey(row);
int hash = key.hashCode();
prepSelectRow = getStatement(conn, SELECT_ROW);
prepSelectRow.setInt(1, hash);
prepSelectRow.setInt(2, index.id);
prepSelectRow.setString(3, key);
ResultSet rs = prepSelectRow.executeQuery();
prepDeleteMap = getStatement(conn, DELETE_MAP);
prepDeleteRow = getStatement(conn, DELETE_ROW);
if (rs.next()) {
int rowId = rs.getInt(1);
prepDeleteMap.setInt(1, rowId);
int[] wordIds = getWordIds(row);
int[] wordIds = getWordIds(conn, row);
for (int id : wordIds) {
prepDeleteMap.setInt(2, id);
prepDeleteMap.executeUpdate();
......@@ -1037,9 +1077,16 @@ public class FullText {
prepDeleteRow.setString(3, key);
prepDeleteRow.executeUpdate();
}
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepSelectRow);
IOUtils.closeSilently(prepDeleteMap);
IOUtils.closeSilently(prepDeleteRow);
}
}
}
private int[] getWordIds(Object[] row) throws SQLException {
private int[] getWordIds(Connection conn, Object[] row) throws SQLException {
HashSet<String> words = New.hashSet();
for (int idx : index.indexColumns) {
int type = columnTypes[idx];
......@@ -1057,27 +1104,36 @@ public class FullText {
addWords(setting, words, string);
}
}
HashMap<String, Integer> allWords = setting.getWordList();
PreparedStatement prepInsertWord = null;
try {
prepInsertWord = getStatement(conn, INSERT_WORD);
int[] wordIds = new int[words.size()];
Iterator<String> it = words.iterator();
for (int i = 0; it.hasNext(); i++) {
String word = it.next();
Integer wId = allWords.get(word);
int i = 0;
for (String word : words) {
int wordId;
if (wId == null) {
Integer wId;
while((wId = setting.getWordId(word)) == null) {
prepInsertWord.setString(1, word);
prepInsertWord.execute();
ResultSet rs = prepInsertWord.getGeneratedKeys();
rs.next();
if (rs.next()) {
wordId = rs.getInt(1);
allWords.put(word, wordId);
} else {
wordId = wId.intValue();
if (wordId != 0) {
setting.addWord(word, wordId);
wId = wordId;
break;
}
}
wordIds[i] = wordId;
}
wordIds[i++] = wId;
}
Arrays.sort(wordIds);
return wordIds;
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepInsertWord);
}
}
}
private String getKey(Object[] row) throws SQLException {
......@@ -1095,6 +1151,10 @@ public class FullText {
return buff.toString();
}
private PreparedStatement getStatement(Connection conn, int indx) throws SQLException {
return useOwnConnection ? conn.prepareStatement(SQL[indx]) : prepStatements[indx];
}
}
/**
......@@ -1116,5 +1176,4 @@ public class FullText {
throws SQLException {
throw new SQLException(message, "FULLTEXT");
}
}
......@@ -13,6 +13,7 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
......@@ -37,6 +38,8 @@ import org.h2.util.StatementBuilder;
import org.h2.util.StringUtils;
import org.h2.util.Utils;
import java.io.File;
import java.util.Map;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.FSDirectory;
......@@ -94,7 +97,7 @@ public class FullTextLucene extends FullText {
* @param conn the connection
*/
public static void init(Connection conn) throws SQLException {
Statement stat = conn.createStatement();
try (Statement stat = conn.createStatement()) {
stat.execute("CREATE SCHEMA IF NOT EXISTS " + SCHEMA);
stat.execute("CREATE TABLE IF NOT EXISTS " + SCHEMA +
".INDEXES(SCHEMA VARCHAR, TABLE VARCHAR, " +
......@@ -111,10 +114,6 @@ public class FullTextLucene extends FullText {
FullTextLucene.class.getName() + ".reindex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_ALL FOR \"" +
FullTextLucene.class.getName() + ".dropAll\"");
try {
getIndexAccess(conn);
} catch (SQLException e) {
throw convertException(e);
}
}
......@@ -157,12 +156,10 @@ public class FullTextLucene extends FullText {
prep.setString(1, schema);
prep.setString(2, table);
int rowCount = prep.executeUpdate();
if (rowCount == 0) {
return;
}
if (rowCount != 0) {
reindex(conn);
}
}
/**
* Re-creates the full text index for this database. Calling this method is
......@@ -248,10 +245,7 @@ public class FullTextLucene extends FullText {
* @return the converted SQL exception
*/
protected static SQLException convertException(Exception e) {
SQLException e2 = new SQLException(
"Error while indexing document", "FULLTEXT");
e2.initCause(e);
return e2;
return new SQLException("Error while indexing document", "FULLTEXT", e);
}
/**
......@@ -261,7 +255,7 @@ public class FullTextLucene extends FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void createTrigger(Connection conn, String schema,
private static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
createOrDropTrigger(conn, schema, table, true);
}
......@@ -309,11 +303,7 @@ public class FullTextLucene extends FullText {
conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
IndexWriter writer = new IndexWriter(indexDir, conf);
//see http://wiki.apache.org/lucene-java/NearRealtimeSearch
IndexReader reader = IndexReader.open(writer, true);
access = new IndexAccess();
access.writer = writer;
access.reader = reader;
access.searcher = new IndexSearcher(reader);
access = new IndexAccess(writer);
} catch (IOException e) {
throw convertException(e);
}
......@@ -353,7 +343,7 @@ public class FullTextLucene extends FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void indexExistingRows(Connection conn, String schema,
private static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
FullTextLucene.FullTextTrigger existing = new FullTextLucene.FullTextTrigger();
existing.init(conn, schema, null, table, false, Trigger.INSERT);
......@@ -373,10 +363,7 @@ public class FullTextLucene extends FullText {
private static void removeIndexFiles(Connection conn) throws SQLException {
String path = getIndexPath(conn);
IndexAccess access = INDEX_ACCESS.get(path);
if (access != null) {
removeIndexAccess(access, path);
}
removeIndexAccess(path);
if (!path.startsWith(IN_MEMORY_PREFIX)) {
FileUtils.deleteRecursive(path, false);
}
......@@ -386,17 +373,16 @@ public class FullTextLucene extends FullText {
* Close the index writer and searcher and remove them from the index access
* set.
*
* @param access the index writer/searcher wrapper
* @param indexPath the index path
*/
protected static void removeIndexAccess(IndexAccess access, String indexPath)
protected static void removeIndexAccess(String indexPath)
throws SQLException {
synchronized (INDEX_ACCESS) {
try {
INDEX_ACCESS.remove(indexPath);
access.searcher.close();
access.reader.close();
access.writer.close();
IndexAccess access = INDEX_ACCESS.remove(indexPath);
if(access != null) {
access.close();
}
} catch (Exception e) {
throw convertException(e);
}
......@@ -426,7 +412,8 @@ public class FullTextLucene extends FullText {
try {
IndexAccess access = getIndexAccess(conn);
// take a reference as the searcher may change
IndexSearcher searcher = access.searcher;
IndexSearcher searcher = access.getSearcher();
try {
// reuse the same analyzer; it's thread-safe;
// also allows subclasses to control the analyzer used.
Analyzer analyzer = access.writer.getAnalyzer();
......@@ -470,6 +457,9 @@ public class FullTextLucene extends FullText {
result.addRow(q, score);
}
}
} finally {
access.returnSearcher(searcher);
}
} catch (Exception e) {
throw convertException(e);
}
......@@ -479,16 +469,16 @@ public class FullTextLucene extends FullText {
/**
* Trigger updates the index when a inserting, updating, or deleting a row.
*/
public static class FullTextTrigger implements Trigger {
public static final class FullTextTrigger implements Trigger {
protected String schema;
protected String table;
protected int[] keys;
protected int[] indexColumns;
protected String[] columns;
protected int[] columnTypes;
protected String indexPath;
protected IndexAccess indexAccess;
private String schema;
private String table;
private int[] keys;
private int[] indexColumns;
private String[] columns;
private int[] columnTypes;
private String indexPath;
private IndexAccess indexAccess;
/**
* INTERNAL
......@@ -541,9 +531,8 @@ public class FullTextLucene extends FullText {
if (rs.next()) {
String cols = rs.getString(1);
if (cols != null) {
for (String s : StringUtils.arraySplit(cols, ',', true)) {
indexList.add(s);
}
Collections.addAll(indexList,
StringUtils.arraySplit(cols, ',', true));
}
}
if (indexList.size() == 0) {
......@@ -583,10 +572,7 @@ public class FullTextLucene extends FullText {
*/
@Override
public void close() throws SQLException {
if (indexAccess != null) {
removeIndexAccess(indexAccess, indexPath);
indexAccess = null;
}
removeIndexAccess(indexPath);
}
/**
......@@ -600,14 +586,9 @@ public class FullTextLucene extends FullText {
/**
* Commit all changes to the Lucene index.
*/
void commitIndex() throws SQLException {
private void commitIndex() throws SQLException {
try {
indexAccess.writer.commit();
// recreate Searcher with the IndexWriter's reader.
indexAccess.searcher.close();
indexAccess.reader.close();
indexAccess.reader = IndexReader.open(indexAccess.writer, true);
indexAccess.searcher = new IndexSearcher(indexAccess.reader);
indexAccess.commit();
} catch (IOException e) {
throw convertException(e);
}
......@@ -699,22 +680,80 @@ public class FullTextLucene extends FullText {
/**
* A wrapper for the Lucene writer and searcher.
*/
static class IndexAccess {
static final class IndexAccess {
/**
* The index writer.
*/
IndexWriter writer;
final IndexWriter writer;
/**
* The index reader.
* Map of usage counters for outstanding searchers.
*/
IndexReader reader;
private final Map<IndexSearcher,Integer> counters = New.hashMap();
/**
* Usage counter for current searcher.
*/
private int counter;
/**
* The index searcher.
*/
IndexSearcher searcher;
private IndexSearcher searcher;
private IndexAccess(IndexWriter writer) throws IOException {
this.writer = writer;
IndexReader reader = IndexReader.open(writer, true);
searcher = new IndexSearcher(reader);
}
private synchronized IndexSearcher getSearcher() {
++counter;
return searcher;
}
private synchronized void returnSearcher(IndexSearcher searcher) {
if (this.searcher == searcher) {
--counter;
assert counter >= 0;
} else {
Integer cnt = counters.remove(searcher);
assert cnt != null;
if(--cnt == 0) {
closeSearcher(searcher);
} else {
counters.put(searcher, cnt);
}
}
}
public synchronized void commit() throws IOException {
writer.commit();
if (counter != 0) {
counters.put(searcher, counter);
counter = 0;
} else {
closeSearcher(searcher);
}
// recreate Searcher with the IndexWriter's reader.
searcher = new IndexSearcher(IndexReader.open(writer, true));
}
public synchronized void close() throws IOException {
for (IndexSearcher searcher : counters.keySet()) {
closeSearcher(searcher);
}
counters.clear();
closeSearcher(searcher);
searcher = null;
writer.close();
}
private static void closeSearcher(IndexSearcher searcher) {
IndexReader indexReader = searcher.getIndexReader();
try { searcher.close(); } catch(IOException ignore) {/**/}
try { indexReader.close(); } catch(IOException ignore) {/**/}
}
}
}
......@@ -10,20 +10,22 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import org.h2.util.New;
import org.h2.util.SoftHashMap;
/**
* The global settings of a full text search.
*/
class FullTextSettings {
final class FullTextSettings {
/**
* The settings of open indexes.
*/
private static final HashMap<String, FullTextSettings> SETTINGS = New.hashMap();
private static final Map<String, FullTextSettings> SETTINGS = New.hashMap();
/**
* Whether this instance has been initialized.
......@@ -33,17 +35,17 @@ class FullTextSettings {
/**
* The set of words not to index (stop words).
*/
private final HashSet<String> ignoreList = New.hashSet();
private final Set<String> ignoreList = New.hashSet();
/**
* The set of words / terms.
*/
private final HashMap<String, Integer> words = New.hashMap();
private final Map<String, Integer> words = New.hashMap();
/**
* The set of indexes in this database.
*/
private final HashMap<Integer, IndexInfo> indexes = New.hashMap();
private final Map<Integer, IndexInfo> indexes = Collections.synchronizedMap(New.<Integer, IndexInfo>hashMap());
/**
* The prepared statement cache.
......@@ -60,26 +62,63 @@ class FullTextSettings {
/**
* Create a new instance.
*/
protected FullTextSettings() {
private FullTextSettings() {
// don't allow construction
}
/**
* Get the ignore list.
*
* @return the ignore list
* Clear set of ignored words
*/
protected HashSet<String> getIgnoreList() {
return ignoreList;
public void clearInored() {
synchronized (ignoreList) {
ignoreList.clear();
}
}
/**
* Get the word list.
*
* @return the word list
* Amend set of ignored words
* @param words to add
*/
public void addIgnored(Iterable<String> words) {
synchronized (ignoreList) {
for (String word : words) {
word = normalizeWord(word);
ignoreList.add(word);
}
}
}
/**
* Clear set of searchable words
*/
public void clearWordList() {
synchronized (words) {
words.clear();
}
}
/**
* Get id for a searchable word
* @param word to find id for
* @return Integer id or null if word is not found
*/
protected HashMap<String, Integer> getWordList() {
return words;
public Integer getWordId(String word) {
synchronized (words) {
return words.get(word);
}
}
/**
* Register searchable word
* @param word to register
* @param id to register with
*/
public void addWord(String word, Integer id) {
synchronized (words) {
if(!words.containsKey(word)) {
words.put(word, id);
}
}
}
/**
......@@ -109,11 +148,12 @@ class FullTextSettings {
* @return the uppercase version of the word or null
*/
protected String convertWord(String word) {
// TODO this is locale specific, document
word = word.toUpperCase();
word = normalizeWord(word);
synchronized (ignoreList) {
if (ignoreList.contains(word)) {
return null;
}
}
return word;
}
......@@ -126,11 +166,14 @@ class FullTextSettings {
protected static FullTextSettings getInstance(Connection conn)
throws SQLException {
String path = getIndexPath(conn);
FullTextSettings setting = SETTINGS.get(path);
FullTextSettings setting;
synchronized (SETTINGS) {
setting = SETTINGS.get(path);
if (setting == null) {
setting = new FullTextSettings();
SETTINGS.put(path, setting);
}
}
return setting;
}
......@@ -140,7 +183,7 @@ class FullTextSettings {
* @param conn the connection
* @return the file system path
*/
protected static String getIndexPath(Connection conn) throws SQLException {
private static String getIndexPath(Connection conn) throws SQLException {
Statement stat = conn.createStatement();
ResultSet rs = stat.executeQuery(
"CALL IFNULL(DATABASE_PATH(), 'MEM:' || DATABASE())");
......@@ -218,8 +261,10 @@ class FullTextSettings {
* Close all fulltext settings, freeing up memory.
*/
protected static void closeAll() {
synchronized (SETTINGS) {
SETTINGS.clear();
}
}
protected void setWhitespaceChars(String whitespaceChars) {
this.whitespaceChars = whitespaceChars;
......@@ -229,4 +274,8 @@ class FullTextSettings {
return whitespaceChars;
}
private String normalizeWord(String word) {
// TODO this is locale specific, document
return word.toUpperCase();
}
}
......@@ -22,6 +22,7 @@ import java.util.concurrent.TimeUnit;
import org.h2.fulltext.FullText;
import org.h2.store.fs.FileUtils;
import org.h2.test.TestAll;
import org.h2.test.TestBase;
import org.h2.util.IOUtils;
import org.h2.util.Task;
......@@ -50,10 +51,6 @@ public class TestFullText extends TestBase {
@Override
public void test() throws Exception {
if (config.multiThreaded) {
// It is even mentioned in the docs that this is not supported
return;
}
testUuidPrimaryKey(false);
testAutoAnalyze();
testNativeFeatures();
......@@ -71,7 +68,9 @@ public class TestFullText extends TestBase {
testCreateDropLucene();
testUuidPrimaryKey(true);
testMultiThreaded(true);
if(config.mvStore || !config.multiThreaded) {
testMultiThreaded(false);
}
testTransaction(true);
test(true, "VARCHAR");
test(true, "CLOB");
......@@ -256,7 +255,7 @@ public class TestFullText extends TestBase {
int len = 2;
Task[] task = new Task[len];
for (int i = 0; i < len; i++) {
final Connection conn = getConnection("fullText", connList);
final Connection conn = getConnection("fullText;LOCK_TIMEOUT=60000", connList);
Statement stat = conn.createStatement();
initFullText(stat, lucene);
initFullText(stat, lucene);
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论