提交 5841c4df authored 作者: Noel Grandin's avatar Noel Grandin 提交者: GitHub

Merge pull request #575 from andreitokar/full_text_mt

Support for full text search in multithreaded mode
......@@ -1320,13 +1320,6 @@ SELECT * FROM FTL_SEARCH_DATA('John', 0, 0);
SELECT * FROM FTL_SEARCH_DATA('LAST_NAME:John', 0, 0);
CALL FTL_DROP_ALL();
</pre>
<p>
The Lucene fulltext search implementation is not synchronized internally.
If you update the database and query the fulltext search concurrently
(directly using the Java API of H2 or Lucene itself), you need to ensure
operations are properly synchronized. If this is not the case, you may get
exceptions such as <code>org.apache.lucene.store.AlreadyClosedException: this IndexReader is closed</code>.
</p>
<h2 id="user_defined_variables">User-Defined Variables</h2>
<p>
......
......@@ -18,11 +18,12 @@ import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.UUID;
import org.h2.api.Trigger;
import org.h2.command.Parser;
import org.h2.engine.Session;
......@@ -151,13 +152,12 @@ public class FullText {
}
}
rs = stat.executeQuery("SELECT * FROM " + SCHEMA + ".WORDS");
HashMap<String, Integer> map = setting.getWordList();
while (rs.next()) {
String word = rs.getString("NAME");
int id = rs.getInt("ID");
word = setting.convertWord(word);
if (word != null) {
map.put(word, id);
setting.addWord(word, id);
}
}
setting.setInitialized(true);
......@@ -195,7 +195,7 @@ public class FullText {
init(conn);
removeAllTriggers(conn, TRIGGER_PREFIX);
FullTextSettings setting = FullTextSettings.getInstance(conn);
setting.getWordList().clear();
setting.clearWordList();
Statement stat = conn.createStatement();
stat.execute("TRUNCATE TABLE " + SCHEMA + ".WORDS");
stat.execute("TRUNCATE TABLE " + SCHEMA + ".ROWS");
......@@ -243,9 +243,9 @@ public class FullText {
break;
}
}
prep = conn.prepareStatement("DELETE FROM " + SCHEMA + ".MAP M " +
prep = conn.prepareStatement("DELETE FROM " + SCHEMA + ".MAP " +
"WHERE NOT EXISTS (SELECT * FROM " + SCHEMA +
".ROWS R WHERE R.ID=M.ROWID) AND ROWID<10000");
".ROWS R WHERE R.ID=ROWID) AND ROWID<10000");
while (true) {
int deleted = prep.executeUpdate();
if (deleted == 0) {
......@@ -266,8 +266,8 @@ public class FullText {
removeAllTriggers(conn, TRIGGER_PREFIX);
FullTextSettings setting = FullTextSettings.getInstance(conn);
setting.removeAllIndexes();
setting.getIgnoreList().clear();
setting.getWordList().clear();
setting.clearInored();
setting.clearWordList();
}
/**
......@@ -605,17 +605,16 @@ public class FullText {
if (!setting.isInitialized()) {
init(conn);
}
HashSet<String> words = New.hashSet();
Set<String> words = New.hashSet();
addWords(setting, words, text);
HashSet<Integer> rIds = null, lastRowIds = null;
HashMap<String, Integer> allWords = setting.getWordList();
Set<Integer> rIds = null, lastRowIds;
PreparedStatement prepSelectMapByWordId = setting.prepare(conn,
SELECT_MAP_BY_WORD_ID);
for (String word : words) {
lastRowIds = rIds;
rIds = New.hashSet();
Integer wId = allWords.get(word);
Integer wId = setting.getWordId(word);
if (wId == null) {
continue;
}
......@@ -698,7 +697,7 @@ public class FullText {
* @param reader the reader
*/
protected static void addWords(FullTextSettings setting,
HashSet<String> set, Reader reader) {
Set<String> set, Reader reader) {
StreamTokenizer tokenizer = new StreamTokenizer(reader);
tokenizer.resetSyntax();
tokenizer.wordChars(' ' + 1, 255);
......@@ -732,7 +731,7 @@ public class FullText {
* @param text the text
*/
protected static void addWords(FullTextSettings setting,
HashSet<String> set, String text) {
Set<String> set, String text) {
String whitespaceChars = setting.getWhitespaceChars();
StringTokenizer tokenizer = new StringTokenizer(text, whitespaceChars);
while (tokenizer.hasMoreTokens()) {
......@@ -751,30 +750,38 @@ public class FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
private static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
createOrDropTrigger(conn, schema, table, true);
}
private static void createOrDropTrigger(Connection conn,
String schema, String table, boolean create) throws SQLException {
Statement stat = conn.createStatement();
String trigger = StringUtils.quoteIdentifier(schema) + "."
+ StringUtils.quoteIdentifier(TRIGGER_PREFIX + table);
stat.execute("DROP TRIGGER IF EXISTS " + trigger);
if (create) {
StringBuilder buff = new StringBuilder("CREATE TRIGGER IF NOT EXISTS ");
// needs to be called on rollback as well, because we use the init
// connection do to changes in the index (not the user connection)
buff.append(trigger).
append(" AFTER INSERT, UPDATE, DELETE, ROLLBACK ON ").
append(StringUtils.quoteIdentifier(schema)).
append('.').
append(StringUtils.quoteIdentifier(table)).
append(" FOR EACH ROW CALL \"").
append(FullText.FullTextTrigger.class.getName()).
append('\"');
stat.execute(buff.toString());
try (Statement stat = conn.createStatement()) {
String trigger = StringUtils.quoteIdentifier(schema) + "."
+ StringUtils.quoteIdentifier(TRIGGER_PREFIX + table);
stat.execute("DROP TRIGGER IF EXISTS " + trigger);
if (create) {
boolean multiThread = FullTextTrigger.isMultiThread(conn);
StringBuilder buff = new StringBuilder(
"CREATE TRIGGER IF NOT EXISTS ");
// unless multithread, trigger needs to be called on rollback as well,
// because we use the init connection do to changes in the index
// (not the user connection)
buff.append(trigger).
append(" AFTER INSERT, UPDATE, DELETE");
if(!multiThread) {
buff.append(", ROLLBACK");
}
buff.append(" ON ").
append(StringUtils.quoteIdentifier(schema)).
append('.').
append(StringUtils.quoteIdentifier(table)).
append(" FOR EACH ROW CALL \"").
append(FullText.FullTextTrigger.class.getName()).
append('\"');
stat.execute(buff.toString());
}
}
}
......@@ -785,8 +792,8 @@ public class FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
private static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
FullText.FullTextTrigger existing = new FullText.FullTextTrigger();
existing.init(conn, schema, null, table, false, Trigger.INSERT);
String sql = "SELECT * FROM " + StringUtils.quoteIdentifier(schema) +
......@@ -823,13 +830,7 @@ public class FullText {
private static void setIgnoreList(FullTextSettings setting,
String commaSeparatedList) {
String[] list = StringUtils.arraySplit(commaSeparatedList, ',', true);
HashSet<String> set = setting.getIgnoreList();
for (String word : list) {
String converted = setting.convertWord(word);
if (converted != null) {
set.add(converted);
}
}
setting.addIgnored(Arrays.asList(list));
}
/**
......@@ -860,14 +861,28 @@ public class FullText {
/**
* Trigger updates the index when a inserting, updating, or deleting a row.
*/
public static class FullTextTrigger implements Trigger {
public static final class FullTextTrigger implements Trigger {
private FullTextSettings setting;
private IndexInfo index;
private int[] columnTypes;
private final PreparedStatement[] prepStatements = new PreparedStatement[SQL.length];
private boolean useOwnConnection;
private static final int INSERT_WORD = 0;
private static final int INSERT_ROW = 1;
private static final int INSERT_MAP = 2;
private static final int DELETE_ROW = 3;
private static final int DELETE_MAP = 4;
private static final int SELECT_ROW = 5;
protected FullTextSettings setting;
protected IndexInfo index;
protected int[] columnTypes;
protected PreparedStatement prepInsertWord, prepInsertRow, prepInsertMap;
protected PreparedStatement prepDeleteRow, prepDeleteMap;
protected PreparedStatement prepSelectRow;
private static final String SQL[] = {
"MERGE INTO " + SCHEMA + ".WORDS(NAME) KEY(NAME) VALUES(?)",
"INSERT INTO " + SCHEMA + ".ROWS(HASH, INDEXID, KEY) VALUES(?, ?, ?)",
"INSERT INTO " + SCHEMA + ".MAP(ROWID, WORDID) VALUES(?, ?)",
"DELETE FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?",
"DELETE FROM " + SCHEMA + ".MAP WHERE ROWID=? AND WORDID=?",
"SELECT ID FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?"
};
/**
* INTERNAL
......@@ -915,7 +930,8 @@ public class FullText {
}
ArrayList<String> indexList = New.arrayList();
PreparedStatement prep = conn.prepareStatement(
"SELECT ID, COLUMNS FROM " + SCHEMA + ".INDEXES WHERE SCHEMA=? AND TABLE=?");
"SELECT ID, COLUMNS FROM " + SCHEMA + ".INDEXES" +
" WHERE SCHEMA=? AND TABLE=?");
prep.setString(1, schemaName);
prep.setString(2, tableName);
rs = prep.executeQuery();
......@@ -923,9 +939,7 @@ public class FullText {
index.id = rs.getInt(1);
String columns = rs.getString(2);
if (columns != null) {
for (String s : StringUtils.arraySplit(columns, ',', true)) {
indexList.add(s);
}
Collections.addAll(indexList, StringUtils.arraySplit(columns, ',', true));
}
}
if (indexList.size() == 0) {
......@@ -936,18 +950,23 @@ public class FullText {
index.indexColumns = new int[indexList.size()];
setColumns(index.indexColumns, indexList, columnList);
setting.addIndexInfo(index);
prepInsertWord = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".WORDS(NAME) VALUES(?)");
prepInsertRow = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".ROWS(HASH, INDEXID, KEY) VALUES(?, ?, ?)");
prepInsertMap = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".MAP(ROWID, WORDID) VALUES(?, ?)");
prepDeleteRow = conn.prepareStatement(
"DELETE FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?");
prepDeleteMap = conn.prepareStatement(
"DELETE FROM " + SCHEMA + ".MAP WHERE ROWID=? AND WORDID=?");
prepSelectRow = conn.prepareStatement(
"SELECT ID FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?");
useOwnConnection = isMultiThread(conn);
if(!useOwnConnection) {
for (int i = 0; i < SQL.length; i++) {
prepStatements[i] = conn.prepareStatement(SQL[i]);
}
}
}
private static boolean isMultiThread(Connection conn)
throws SQLException {
try (Statement stat = conn.createStatement()) {
ResultSet rs = stat.executeQuery(
"SELECT value FROM information_schema.settings" +
" WHERE name = 'MULTI_THREADED'");
return rs.next() && !"0".equals(rs.getString(1));
}
}
/**
......@@ -960,16 +979,16 @@ public class FullText {
if (newRow != null) {
// update
if (hasChanged(oldRow, newRow, index.indexColumns)) {
delete(oldRow);
insert(newRow);
delete(conn, oldRow);
insert(conn, newRow);
}
} else {
// delete
delete(oldRow);
delete(conn, oldRow);
}
} else if (newRow != null) {
// insert
insert(newRow);
insert(conn, newRow);
}
}
......@@ -992,54 +1011,82 @@ public class FullText {
/**
* Add a row to the index.
*
* @param conn to use
* @param row the row
*/
protected void insert(Object[] row) throws SQLException {
String key = getKey(row);
int hash = key.hashCode();
prepInsertRow.setInt(1, hash);
prepInsertRow.setInt(2, index.id);
prepInsertRow.setString(3, key);
prepInsertRow.execute();
ResultSet rs = prepInsertRow.getGeneratedKeys();
rs.next();
int rowId = rs.getInt(1);
prepInsertMap.setInt(1, rowId);
int[] wordIds = getWordIds(row);
for (int id : wordIds) {
prepInsertMap.setInt(2, id);
prepInsertMap.execute();
protected void insert(Connection conn, Object[] row) throws SQLException {
PreparedStatement prepInsertRow = null;
PreparedStatement prepInsertMap = null;
try {
String key = getKey(row);
int hash = key.hashCode();
prepInsertRow = getStatement(conn, INSERT_ROW);
prepInsertRow.setInt(1, hash);
prepInsertRow.setInt(2, index.id);
prepInsertRow.setString(3, key);
prepInsertRow.execute();
ResultSet rs = prepInsertRow.getGeneratedKeys();
rs.next();
int rowId = rs.getInt(1);
prepInsertMap = getStatement(conn, INSERT_MAP);
prepInsertMap.setInt(1, rowId);
int[] wordIds = getWordIds(conn, row);
for (int id : wordIds) {
prepInsertMap.setInt(2, id);
prepInsertMap.execute();
}
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepInsertRow);
IOUtils.closeSilently(prepInsertMap);
}
}
}
/**
* Delete a row from the index.
*
* @param conn to use
* @param row the row
*/
protected void delete(Object[] row) throws SQLException {
String key = getKey(row);
int hash = key.hashCode();
prepSelectRow.setInt(1, hash);
prepSelectRow.setInt(2, index.id);
prepSelectRow.setString(3, key);
ResultSet rs = prepSelectRow.executeQuery();
if (rs.next()) {
int rowId = rs.getInt(1);
prepDeleteMap.setInt(1, rowId);
int[] wordIds = getWordIds(row);
for (int id : wordIds) {
prepDeleteMap.setInt(2, id);
prepDeleteMap.executeUpdate();
protected void delete(Connection conn, Object[] row) throws SQLException {
PreparedStatement prepSelectRow = null;
PreparedStatement prepDeleteMap = null;
PreparedStatement prepDeleteRow = null;
try {
String key = getKey(row);
int hash = key.hashCode();
prepSelectRow = getStatement(conn, SELECT_ROW);
prepSelectRow.setInt(1, hash);
prepSelectRow.setInt(2, index.id);
prepSelectRow.setString(3, key);
ResultSet rs = prepSelectRow.executeQuery();
prepDeleteMap = getStatement(conn, DELETE_MAP);
prepDeleteRow = getStatement(conn, DELETE_ROW);
if (rs.next()) {
int rowId = rs.getInt(1);
prepDeleteMap.setInt(1, rowId);
int[] wordIds = getWordIds(conn, row);
for (int id : wordIds) {
prepDeleteMap.setInt(2, id);
prepDeleteMap.executeUpdate();
}
prepDeleteRow.setInt(1, hash);
prepDeleteRow.setInt(2, index.id);
prepDeleteRow.setString(3, key);
prepDeleteRow.executeUpdate();
}
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepSelectRow);
IOUtils.closeSilently(prepDeleteMap);
IOUtils.closeSilently(prepDeleteRow);
}
prepDeleteRow.setInt(1, hash);
prepDeleteRow.setInt(2, index.id);
prepDeleteRow.setString(3, key);
prepDeleteRow.executeUpdate();
}
}
private int[] getWordIds(Object[] row) throws SQLException {
private int[] getWordIds(Connection conn, Object[] row) throws SQLException {
HashSet<String> words = New.hashSet();
for (int idx : index.indexColumns) {
int type = columnTypes[idx];
......@@ -1057,27 +1104,36 @@ public class FullText {
addWords(setting, words, string);
}
}
HashMap<String, Integer> allWords = setting.getWordList();
int[] wordIds = new int[words.size()];
Iterator<String> it = words.iterator();
for (int i = 0; it.hasNext(); i++) {
String word = it.next();
Integer wId = allWords.get(word);
int wordId;
if (wId == null) {
prepInsertWord.setString(1, word);
prepInsertWord.execute();
ResultSet rs = prepInsertWord.getGeneratedKeys();
rs.next();
wordId = rs.getInt(1);
allWords.put(word, wordId);
} else {
wordId = wId.intValue();
PreparedStatement prepInsertWord = null;
try {
prepInsertWord = getStatement(conn, INSERT_WORD);
int[] wordIds = new int[words.size()];
int i = 0;
for (String word : words) {
int wordId;
Integer wId;
while((wId = setting.getWordId(word)) == null) {
prepInsertWord.setString(1, word);
prepInsertWord.execute();
ResultSet rs = prepInsertWord.getGeneratedKeys();
if (rs.next()) {
wordId = rs.getInt(1);
if (wordId != 0) {
setting.addWord(word, wordId);
wId = wordId;
break;
}
}
}
wordIds[i++] = wId;
}
Arrays.sort(wordIds);
return wordIds;
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepInsertWord);
}
wordIds[i] = wordId;
}
Arrays.sort(wordIds);
return wordIds;
}
private String getKey(Object[] row) throws SQLException {
......@@ -1095,6 +1151,10 @@ public class FullText {
return buff.toString();
}
private PreparedStatement getStatement(Connection conn, int indx) throws SQLException {
return useOwnConnection ? conn.prepareStatement(SQL[indx]) : prepStatements[indx];
}
}
/**
......@@ -1116,5 +1176,4 @@ public class FullText {
throws SQLException {
throw new SQLException(message, "FULLTEXT");
}
}
......@@ -13,6 +13,7 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
......@@ -37,6 +38,8 @@ import org.h2.util.StatementBuilder;
import org.h2.util.StringUtils;
import org.h2.util.Utils;
import java.io.File;
import java.util.Map;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.FSDirectory;
......@@ -94,27 +97,23 @@ public class FullTextLucene extends FullText {
* @param conn the connection
*/
public static void init(Connection conn) throws SQLException {
Statement stat = conn.createStatement();
stat.execute("CREATE SCHEMA IF NOT EXISTS " + SCHEMA);
stat.execute("CREATE TABLE IF NOT EXISTS " + SCHEMA +
".INDEXES(SCHEMA VARCHAR, TABLE VARCHAR, " +
"COLUMNS VARCHAR, PRIMARY KEY(SCHEMA, TABLE))");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_CREATE_INDEX FOR \"" +
FullTextLucene.class.getName() + ".createIndex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_INDEX FOR \"" +
FullTextLucene.class.getName() + ".dropIndex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_SEARCH FOR \"" +
FullTextLucene.class.getName() + ".search\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_SEARCH_DATA FOR \"" +
FullTextLucene.class.getName() + ".searchData\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_REINDEX FOR \"" +
FullTextLucene.class.getName() + ".reindex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_ALL FOR \"" +
FullTextLucene.class.getName() + ".dropAll\"");
try {
getIndexAccess(conn);
} catch (SQLException e) {
throw convertException(e);
try (Statement stat = conn.createStatement()) {
stat.execute("CREATE SCHEMA IF NOT EXISTS " + SCHEMA);
stat.execute("CREATE TABLE IF NOT EXISTS " + SCHEMA +
".INDEXES(SCHEMA VARCHAR, TABLE VARCHAR, " +
"COLUMNS VARCHAR, PRIMARY KEY(SCHEMA, TABLE))");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_CREATE_INDEX FOR \"" +
FullTextLucene.class.getName() + ".createIndex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_INDEX FOR \"" +
FullTextLucene.class.getName() + ".dropIndex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_SEARCH FOR \"" +
FullTextLucene.class.getName() + ".search\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_SEARCH_DATA FOR \"" +
FullTextLucene.class.getName() + ".searchData\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_REINDEX FOR \"" +
FullTextLucene.class.getName() + ".reindex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_ALL FOR \"" +
FullTextLucene.class.getName() + ".dropAll\"");
}
}
......@@ -157,11 +156,9 @@ public class FullTextLucene extends FullText {
prep.setString(1, schema);
prep.setString(2, table);
int rowCount = prep.executeUpdate();
if (rowCount == 0) {
return;
if (rowCount != 0) {
reindex(conn);
}
reindex(conn);
}
/**
......@@ -248,10 +245,7 @@ public class FullTextLucene extends FullText {
* @return the converted SQL exception
*/
protected static SQLException convertException(Exception e) {
SQLException e2 = new SQLException(
"Error while indexing document", "FULLTEXT");
e2.initCause(e);
return e2;
return new SQLException("Error while indexing document", "FULLTEXT", e);
}
/**
......@@ -261,8 +255,8 @@ public class FullTextLucene extends FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
private static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
createOrDropTrigger(conn, schema, table, true);
}
......@@ -309,11 +303,7 @@ public class FullTextLucene extends FullText {
conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
IndexWriter writer = new IndexWriter(indexDir, conf);
//see http://wiki.apache.org/lucene-java/NearRealtimeSearch
IndexReader reader = IndexReader.open(writer, true);
access = new IndexAccess();
access.writer = writer;
access.reader = reader;
access.searcher = new IndexSearcher(reader);
access = new IndexAccess(writer);
} catch (IOException e) {
throw convertException(e);
}
......@@ -353,8 +343,8 @@ public class FullTextLucene extends FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
private static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
FullTextLucene.FullTextTrigger existing = new FullTextLucene.FullTextTrigger();
existing.init(conn, schema, null, table, false, Trigger.INSERT);
String sql = "SELECT * FROM " + StringUtils.quoteIdentifier(schema) +
......@@ -373,10 +363,7 @@ public class FullTextLucene extends FullText {
private static void removeIndexFiles(Connection conn) throws SQLException {
String path = getIndexPath(conn);
IndexAccess access = INDEX_ACCESS.get(path);
if (access != null) {
removeIndexAccess(access, path);
}
removeIndexAccess(path);
if (!path.startsWith(IN_MEMORY_PREFIX)) {
FileUtils.deleteRecursive(path, false);
}
......@@ -386,17 +373,16 @@ public class FullTextLucene extends FullText {
* Close the index writer and searcher and remove them from the index access
* set.
*
* @param access the index writer/searcher wrapper
* @param indexPath the index path
*/
protected static void removeIndexAccess(IndexAccess access, String indexPath)
protected static void removeIndexAccess(String indexPath)
throws SQLException {
synchronized (INDEX_ACCESS) {
try {
INDEX_ACCESS.remove(indexPath);
access.searcher.close();
access.reader.close();
access.writer.close();
IndexAccess access = INDEX_ACCESS.remove(indexPath);
if(access != null) {
access.close();
}
} catch (Exception e) {
throw convertException(e);
}
......@@ -426,49 +412,53 @@ public class FullTextLucene extends FullText {
try {
IndexAccess access = getIndexAccess(conn);
// take a reference as the searcher may change
IndexSearcher searcher = access.searcher;
// reuse the same analyzer; it's thread-safe;
// also allows subclasses to control the analyzer used.
Analyzer analyzer = access.writer.getAnalyzer();
QueryParser parser = new QueryParser(Version.LUCENE_30,
LUCENE_FIELD_DATA, analyzer);
Query query = parser.parse(text);
// Lucene 3 insists on a hard limit and will not provide
// a total hits value. Take at least 100 which is
// an optimal limit for Lucene as any more
// will trigger writing results to disk.
int maxResults = (limit == 0 ? 100 : limit) + offset;
TopDocs docs = searcher.search(query, maxResults);
if (limit == 0) {
limit = docs.totalHits;
}
for (int i = 0, len = docs.scoreDocs.length;
i < limit && i + offset < docs.totalHits
&& i + offset < len; i++) {
ScoreDoc sd = docs.scoreDocs[i + offset];
Document doc = searcher.doc(sd.doc);
float score = sd.score;
String q = doc.get(LUCENE_FIELD_QUERY);
if (data) {
int idx = q.indexOf(" WHERE ");
JdbcConnection c = (JdbcConnection) conn;
Session session = (Session) c.getSession();
Parser p = new Parser(session);
String tab = q.substring(0, idx);
ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab);
String schemaName = expr.getOriginalTableAliasName();
String tableName = expr.getColumnName();
q = q.substring(idx + " WHERE ".length());
Object[][] columnData = parseKey(conn, q);
result.addRow(
schemaName,
tableName,
columnData[0],
columnData[1],
score);
} else {
result.addRow(q, score);
IndexSearcher searcher = access.getSearcher();
try {
// reuse the same analyzer; it's thread-safe;
// also allows subclasses to control the analyzer used.
Analyzer analyzer = access.writer.getAnalyzer();
QueryParser parser = new QueryParser(Version.LUCENE_30,
LUCENE_FIELD_DATA, analyzer);
Query query = parser.parse(text);
// Lucene 3 insists on a hard limit and will not provide
// a total hits value. Take at least 100 which is
// an optimal limit for Lucene as any more
// will trigger writing results to disk.
int maxResults = (limit == 0 ? 100 : limit) + offset;
TopDocs docs = searcher.search(query, maxResults);
if (limit == 0) {
limit = docs.totalHits;
}
for (int i = 0, len = docs.scoreDocs.length;
i < limit && i + offset < docs.totalHits
&& i + offset < len; i++) {
ScoreDoc sd = docs.scoreDocs[i + offset];
Document doc = searcher.doc(sd.doc);
float score = sd.score;
String q = doc.get(LUCENE_FIELD_QUERY);
if (data) {
int idx = q.indexOf(" WHERE ");
JdbcConnection c = (JdbcConnection) conn;
Session session = (Session) c.getSession();
Parser p = new Parser(session);
String tab = q.substring(0, idx);
ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab);
String schemaName = expr.getOriginalTableAliasName();
String tableName = expr.getColumnName();
q = q.substring(idx + " WHERE ".length());
Object[][] columnData = parseKey(conn, q);
result.addRow(
schemaName,
tableName,
columnData[0],
columnData[1],
score);
} else {
result.addRow(q, score);
}
}
} finally {
access.returnSearcher(searcher);
}
} catch (Exception e) {
throw convertException(e);
......@@ -479,16 +469,16 @@ public class FullTextLucene extends FullText {
/**
* Trigger updates the index when a inserting, updating, or deleting a row.
*/
public static class FullTextTrigger implements Trigger {
public static final class FullTextTrigger implements Trigger {
protected String schema;
protected String table;
protected int[] keys;
protected int[] indexColumns;
protected String[] columns;
protected int[] columnTypes;
protected String indexPath;
protected IndexAccess indexAccess;
private String schema;
private String table;
private int[] keys;
private int[] indexColumns;
private String[] columns;
private int[] columnTypes;
private String indexPath;
private IndexAccess indexAccess;
/**
* INTERNAL
......@@ -541,9 +531,8 @@ public class FullTextLucene extends FullText {
if (rs.next()) {
String cols = rs.getString(1);
if (cols != null) {
for (String s : StringUtils.arraySplit(cols, ',', true)) {
indexList.add(s);
}
Collections.addAll(indexList,
StringUtils.arraySplit(cols, ',', true));
}
}
if (indexList.size() == 0) {
......@@ -583,10 +572,7 @@ public class FullTextLucene extends FullText {
*/
@Override
public void close() throws SQLException {
if (indexAccess != null) {
removeIndexAccess(indexAccess, indexPath);
indexAccess = null;
}
removeIndexAccess(indexPath);
}
/**
......@@ -600,14 +586,9 @@ public class FullTextLucene extends FullText {
/**
* Commit all changes to the Lucene index.
*/
void commitIndex() throws SQLException {
private void commitIndex() throws SQLException {
try {
indexAccess.writer.commit();
// recreate Searcher with the IndexWriter's reader.
indexAccess.searcher.close();
indexAccess.reader.close();
indexAccess.reader = IndexReader.open(indexAccess.writer, true);
indexAccess.searcher = new IndexSearcher(indexAccess.reader);
indexAccess.commit();
} catch (IOException e) {
throw convertException(e);
}
......@@ -699,22 +680,80 @@ public class FullTextLucene extends FullText {
/**
* A wrapper for the Lucene writer and searcher.
*/
static class IndexAccess {
static final class IndexAccess {
/**
* The index writer.
*/
IndexWriter writer;
final IndexWriter writer;
/**
* The index reader.
* Map of usage counters for outstanding searchers.
*/
IndexReader reader;
private final Map<IndexSearcher,Integer> counters = New.hashMap();
/**
* Usage counter for current searcher.
*/
private int counter;
/**
* The index searcher.
*/
IndexSearcher searcher;
}
private IndexSearcher searcher;
private IndexAccess(IndexWriter writer) throws IOException {
this.writer = writer;
IndexReader reader = IndexReader.open(writer, true);
searcher = new IndexSearcher(reader);
}
private synchronized IndexSearcher getSearcher() {
++counter;
return searcher;
}
private synchronized void returnSearcher(IndexSearcher searcher) {
if (this.searcher == searcher) {
--counter;
assert counter >= 0;
} else {
Integer cnt = counters.remove(searcher);
assert cnt != null;
if(--cnt == 0) {
closeSearcher(searcher);
} else {
counters.put(searcher, cnt);
}
}
}
public synchronized void commit() throws IOException {
writer.commit();
if (counter != 0) {
counters.put(searcher, counter);
counter = 0;
} else {
closeSearcher(searcher);
}
// recreate Searcher with the IndexWriter's reader.
searcher = new IndexSearcher(IndexReader.open(writer, true));
}
public synchronized void close() throws IOException {
for (IndexSearcher searcher : counters.keySet()) {
closeSearcher(searcher);
}
counters.clear();
closeSearcher(searcher);
searcher = null;
writer.close();
}
private static void closeSearcher(IndexSearcher searcher) {
IndexReader indexReader = searcher.getIndexReader();
try { searcher.close(); } catch(IOException ignore) {/**/}
try { indexReader.close(); } catch(IOException ignore) {/**/}
}
}
}
......@@ -10,20 +10,22 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import org.h2.util.New;
import org.h2.util.SoftHashMap;
/**
* The global settings of a full text search.
*/
class FullTextSettings {
final class FullTextSettings {
/**
* The settings of open indexes.
*/
private static final HashMap<String, FullTextSettings> SETTINGS = New.hashMap();
private static final Map<String, FullTextSettings> SETTINGS = New.hashMap();
/**
* Whether this instance has been initialized.
......@@ -33,17 +35,17 @@ class FullTextSettings {
/**
* The set of words not to index (stop words).
*/
private final HashSet<String> ignoreList = New.hashSet();
private final Set<String> ignoreList = New.hashSet();
/**
* The set of words / terms.
*/
private final HashMap<String, Integer> words = New.hashMap();
private final Map<String, Integer> words = New.hashMap();
/**
* The set of indexes in this database.
*/
private final HashMap<Integer, IndexInfo> indexes = New.hashMap();
private final Map<Integer, IndexInfo> indexes = Collections.synchronizedMap(New.<Integer, IndexInfo>hashMap());
/**
* The prepared statement cache.
......@@ -60,26 +62,63 @@ class FullTextSettings {
/**
* Create a new instance.
*/
protected FullTextSettings() {
private FullTextSettings() {
// don't allow construction
}
/**
* Get the ignore list.
*
* @return the ignore list
* Clear set of ignored words
*/
protected HashSet<String> getIgnoreList() {
return ignoreList;
public void clearInored() {
synchronized (ignoreList) {
ignoreList.clear();
}
}
/**
* Get the word list.
*
* @return the word list
* Amend set of ignored words
* @param words to add
*/
public void addIgnored(Iterable<String> words) {
synchronized (ignoreList) {
for (String word : words) {
word = normalizeWord(word);
ignoreList.add(word);
}
}
}
/**
* Clear set of searchable words
*/
public void clearWordList() {
synchronized (words) {
words.clear();
}
}
/**
* Get id for a searchable word
* @param word to find id for
* @return Integer id or null if word is not found
*/
public Integer getWordId(String word) {
synchronized (words) {
return words.get(word);
}
}
/**
* Register searchable word
* @param word to register
* @param id to register with
*/
protected HashMap<String, Integer> getWordList() {
return words;
public void addWord(String word, Integer id) {
synchronized (words) {
if(!words.containsKey(word)) {
words.put(word, id);
}
}
}
/**
......@@ -109,10 +148,11 @@ class FullTextSettings {
* @return the uppercase version of the word or null
*/
protected String convertWord(String word) {
// TODO this is locale specific, document
word = word.toUpperCase();
if (ignoreList.contains(word)) {
return null;
word = normalizeWord(word);
synchronized (ignoreList) {
if (ignoreList.contains(word)) {
return null;
}
}
return word;
}
......@@ -126,10 +166,13 @@ class FullTextSettings {
protected static FullTextSettings getInstance(Connection conn)
throws SQLException {
String path = getIndexPath(conn);
FullTextSettings setting = SETTINGS.get(path);
if (setting == null) {
setting = new FullTextSettings();
SETTINGS.put(path, setting);
FullTextSettings setting;
synchronized (SETTINGS) {
setting = SETTINGS.get(path);
if (setting == null) {
setting = new FullTextSettings();
SETTINGS.put(path, setting);
}
}
return setting;
}
......@@ -140,7 +183,7 @@ class FullTextSettings {
* @param conn the connection
* @return the file system path
*/
protected static String getIndexPath(Connection conn) throws SQLException {
private static String getIndexPath(Connection conn) throws SQLException {
Statement stat = conn.createStatement();
ResultSet rs = stat.executeQuery(
"CALL IFNULL(DATABASE_PATH(), 'MEM:' || DATABASE())");
......@@ -218,7 +261,9 @@ class FullTextSettings {
* Close all fulltext settings, freeing up memory.
*/
protected static void closeAll() {
SETTINGS.clear();
synchronized (SETTINGS) {
SETTINGS.clear();
}
}
protected void setWhitespaceChars(String whitespaceChars) {
......@@ -229,4 +274,8 @@ class FullTextSettings {
return whitespaceChars;
}
private String normalizeWord(String word) {
// TODO this is locale specific, document
return word.toUpperCase();
}
}
......@@ -22,6 +22,7 @@ import java.util.concurrent.TimeUnit;
import org.h2.fulltext.FullText;
import org.h2.store.fs.FileUtils;
import org.h2.test.TestAll;
import org.h2.test.TestBase;
import org.h2.util.IOUtils;
import org.h2.util.Task;
......@@ -50,10 +51,6 @@ public class TestFullText extends TestBase {
@Override
public void test() throws Exception {
if (config.multiThreaded) {
// It is even mentioned in the docs that this is not supported
return;
}
testUuidPrimaryKey(false);
testAutoAnalyze();
testNativeFeatures();
......@@ -71,7 +68,9 @@ public class TestFullText extends TestBase {
testCreateDropLucene();
testUuidPrimaryKey(true);
testMultiThreaded(true);
testMultiThreaded(false);
if(config.mvStore || !config.multiThreaded) {
testMultiThreaded(false);
}
testTransaction(true);
test(true, "VARCHAR");
test(true, "CLOB");
......@@ -256,7 +255,7 @@ public class TestFullText extends TestBase {
int len = 2;
Task[] task = new Task[len];
for (int i = 0; i < len; i++) {
final Connection conn = getConnection("fullText", connList);
final Connection conn = getConnection("fullText;LOCK_TIMEOUT=60000", connList);
Statement stat = conn.createStatement();
initFullText(stat, lucene);
initFullText(stat, lucene);
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论