提交 d4344eec authored 作者: Noel Grandin's avatar Noel Grandin

Merge branch 'master' of https://github.com/h2database/h2database

......@@ -21,6 +21,8 @@ Change Log
<h2>Next Version (unreleased)</h2>
<ul>
<li>Issue #569: ClassCastException when filtering on ENUM value in WHERE clause
</li>
<li>Issue #539: Allow override of builtin functions/aliases
</li>
<li>Issue #535: Allow explicit paths on Windows without drive letter
......
......@@ -69,8 +69,7 @@ Here is the list of known and confirmed issues:
will differ. This is not a problem within regions that use the same rules (such as, within
USA, or within Europe), even if the timezone itself is different. As a workaround, export the
database to a SQL script using the old timezone, and create a new database in the new
timezone. This problem does not occur when using the system property "h2.storeLocalTime"
(however such database files are not compatible with older versions of H2).
timezone.
</li><li>Apache Harmony: there seems to be a bug in Harmony that affects H2.
See <a href="http://issues.apache.org/jira/browse/HARMONY-6505">HARMONY-6505</a>.
</li><li>Tomcat and Glassfish 3 set most static fields (final or non-final) to <code>null</code> when
......
......@@ -1320,13 +1320,6 @@ SELECT * FROM FTL_SEARCH_DATA('John', 0, 0);
SELECT * FROM FTL_SEARCH_DATA('LAST_NAME:John', 0, 0);
CALL FTL_DROP_ALL();
</pre>
<p>
The Lucene fulltext search implementation is not synchronized internally.
If you update the database and query the fulltext search concurrently
(directly using the Java API of H2 or Lucene itself), you need to ensure
operations are properly synchronized. If this is not the case, you may get
exceptions such as <code>org.apache.lucene.store.AlreadyClosedException: this IndexReader is closed</code>.
</p>
<h2 id="user_defined_variables">User-Defined Variables</h2>
<p>
......
......@@ -165,34 +165,13 @@ public class ErrorCode {
* but the value is not one of the values enumerated by the
* type.
*
* This error is best thrown in a context when the column name
* and it's enumerated values are known.
*
* Example:
* <pre>
* CREATE TABLE TEST(CASE ENUM('sensitive','insensitive'));
* INSERT INTO TEST VALUES('snake');
* </pre>
*/
public static final int ENUM_VALUE_NOT_PERMITTED_1 = 22030;
/**
* The error with code <code>22031</code> is typically thrown
* when a math operation is attempted on an ENUM-typed cell,
* but the value resulting from the operation is not one of
* values enumerated by the type.
*
* This error is best thrown in a context when the column name
* is not known, but the enumerated values of the type are known.
*
* Example:
* <pre>
* CREATE TABLE TEST(CASE ENUM('sensitive','insensitive'));
* INSERT INTO TEST VALUES('sensitive');
* UPDATE TEST SET CASE = CASE + 100;
* </pre>
*/
public static final int ENUM_VALUE_NOT_PERMITTED_2 = 22031;
public static final int ENUM_VALUE_NOT_PERMITTED = 22030;
/**
* The error with code <code>22032</code> is thrown when an
......
......@@ -7,13 +7,10 @@ package org.h2.command.ddl;
import org.h2.api.ErrorCode;
import org.h2.command.CommandInterface;
import org.h2.engine.Right;
import org.h2.engine.Session;
import org.h2.message.DbException;
import org.h2.schema.Schema;
import org.h2.table.Table;
import org.h2.table.TableSynonym;
import org.h2.table.TableType;
/**
* This class represents the statement
......
......@@ -203,7 +203,8 @@ public class Comparison extends Condition {
// once.
if (constType != resType) {
right = ValueExpression.get(r.convertTo(resType,
MathUtils.convertLongToInt(left.getPrecision()), session.getDatabase().getMode()));
MathUtils.convertLongToInt(left.getPrecision()),
session.getDatabase().getMode(), ((ExpressionColumn) left).getColumn()));
}
} else if (right instanceof Parameter) {
((Parameter) right).setColumn(
......
......@@ -189,7 +189,7 @@ public class ExpressionColumn extends Expression {
throw DbException.get(ErrorCode.MUST_GROUP_BY_COLUMN_1, getSQL());
}
if (column.getEnumerators() != null) {
return ValueEnum.get(column.getEnumerators(), value);
return ValueEnum.get(column.getEnumerators(), value.getInt());
}
return value;
}
......
......@@ -18,11 +18,12 @@ import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.UUID;
import org.h2.api.Trigger;
import org.h2.command.Parser;
import org.h2.engine.Session;
......@@ -151,13 +152,12 @@ public class FullText {
}
}
rs = stat.executeQuery("SELECT * FROM " + SCHEMA + ".WORDS");
HashMap<String, Integer> map = setting.getWordList();
while (rs.next()) {
String word = rs.getString("NAME");
int id = rs.getInt("ID");
word = setting.convertWord(word);
if (word != null) {
map.put(word, id);
setting.addWord(word, id);
}
}
setting.setInitialized(true);
......@@ -195,7 +195,7 @@ public class FullText {
init(conn);
removeAllTriggers(conn, TRIGGER_PREFIX);
FullTextSettings setting = FullTextSettings.getInstance(conn);
setting.getWordList().clear();
setting.clearWordList();
Statement stat = conn.createStatement();
stat.execute("TRUNCATE TABLE " + SCHEMA + ".WORDS");
stat.execute("TRUNCATE TABLE " + SCHEMA + ".ROWS");
......@@ -243,9 +243,9 @@ public class FullText {
break;
}
}
prep = conn.prepareStatement("DELETE FROM " + SCHEMA + ".MAP M " +
prep = conn.prepareStatement("DELETE FROM " + SCHEMA + ".MAP " +
"WHERE NOT EXISTS (SELECT * FROM " + SCHEMA +
".ROWS R WHERE R.ID=M.ROWID) AND ROWID<10000");
".ROWS R WHERE R.ID=ROWID) AND ROWID<10000");
while (true) {
int deleted = prep.executeUpdate();
if (deleted == 0) {
......@@ -266,8 +266,8 @@ public class FullText {
removeAllTriggers(conn, TRIGGER_PREFIX);
FullTextSettings setting = FullTextSettings.getInstance(conn);
setting.removeAllIndexes();
setting.getIgnoreList().clear();
setting.getWordList().clear();
setting.clearInored();
setting.clearWordList();
}
/**
......@@ -605,17 +605,16 @@ public class FullText {
if (!setting.isInitialized()) {
init(conn);
}
HashSet<String> words = New.hashSet();
Set<String> words = New.hashSet();
addWords(setting, words, text);
HashSet<Integer> rIds = null, lastRowIds = null;
HashMap<String, Integer> allWords = setting.getWordList();
Set<Integer> rIds = null, lastRowIds;
PreparedStatement prepSelectMapByWordId = setting.prepare(conn,
SELECT_MAP_BY_WORD_ID);
for (String word : words) {
lastRowIds = rIds;
rIds = New.hashSet();
Integer wId = allWords.get(word);
Integer wId = setting.getWordId(word);
if (wId == null) {
continue;
}
......@@ -698,7 +697,7 @@ public class FullText {
* @param reader the reader
*/
protected static void addWords(FullTextSettings setting,
HashSet<String> set, Reader reader) {
Set<String> set, Reader reader) {
StreamTokenizer tokenizer = new StreamTokenizer(reader);
tokenizer.resetSyntax();
tokenizer.wordChars(' ' + 1, 255);
......@@ -732,7 +731,7 @@ public class FullText {
* @param text the text
*/
protected static void addWords(FullTextSettings setting,
HashSet<String> set, String text) {
Set<String> set, String text) {
String whitespaceChars = setting.getWhitespaceChars();
StringTokenizer tokenizer = new StringTokenizer(text, whitespaceChars);
while (tokenizer.hasMoreTokens()) {
......@@ -751,30 +750,38 @@ public class FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
private static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
createOrDropTrigger(conn, schema, table, true);
}
private static void createOrDropTrigger(Connection conn,
String schema, String table, boolean create) throws SQLException {
Statement stat = conn.createStatement();
String trigger = StringUtils.quoteIdentifier(schema) + "."
+ StringUtils.quoteIdentifier(TRIGGER_PREFIX + table);
stat.execute("DROP TRIGGER IF EXISTS " + trigger);
if (create) {
StringBuilder buff = new StringBuilder("CREATE TRIGGER IF NOT EXISTS ");
// needs to be called on rollback as well, because we use the init
// connection do to changes in the index (not the user connection)
buff.append(trigger).
append(" AFTER INSERT, UPDATE, DELETE, ROLLBACK ON ").
append(StringUtils.quoteIdentifier(schema)).
append('.').
append(StringUtils.quoteIdentifier(table)).
append(" FOR EACH ROW CALL \"").
append(FullText.FullTextTrigger.class.getName()).
append('\"');
stat.execute(buff.toString());
try (Statement stat = conn.createStatement()) {
String trigger = StringUtils.quoteIdentifier(schema) + "."
+ StringUtils.quoteIdentifier(TRIGGER_PREFIX + table);
stat.execute("DROP TRIGGER IF EXISTS " + trigger);
if (create) {
boolean multiThread = FullTextTrigger.isMultiThread(conn);
StringBuilder buff = new StringBuilder(
"CREATE TRIGGER IF NOT EXISTS ");
// unless multithread, trigger needs to be called on rollback as well,
// because we use the init connection do to changes in the index
// (not the user connection)
buff.append(trigger).
append(" AFTER INSERT, UPDATE, DELETE");
if(!multiThread) {
buff.append(", ROLLBACK");
}
buff.append(" ON ").
append(StringUtils.quoteIdentifier(schema)).
append('.').
append(StringUtils.quoteIdentifier(table)).
append(" FOR EACH ROW CALL \"").
append(FullText.FullTextTrigger.class.getName()).
append('\"');
stat.execute(buff.toString());
}
}
}
......@@ -785,8 +792,8 @@ public class FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
private static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
FullText.FullTextTrigger existing = new FullText.FullTextTrigger();
existing.init(conn, schema, null, table, false, Trigger.INSERT);
String sql = "SELECT * FROM " + StringUtils.quoteIdentifier(schema) +
......@@ -823,13 +830,7 @@ public class FullText {
private static void setIgnoreList(FullTextSettings setting,
String commaSeparatedList) {
String[] list = StringUtils.arraySplit(commaSeparatedList, ',', true);
HashSet<String> set = setting.getIgnoreList();
for (String word : list) {
String converted = setting.convertWord(word);
if (converted != null) {
set.add(converted);
}
}
setting.addIgnored(Arrays.asList(list));
}
/**
......@@ -860,14 +861,28 @@ public class FullText {
/**
* Trigger updates the index when a inserting, updating, or deleting a row.
*/
public static class FullTextTrigger implements Trigger {
public static final class FullTextTrigger implements Trigger {
private FullTextSettings setting;
private IndexInfo index;
private int[] columnTypes;
private final PreparedStatement[] prepStatements = new PreparedStatement[SQL.length];
private boolean useOwnConnection;
private static final int INSERT_WORD = 0;
private static final int INSERT_ROW = 1;
private static final int INSERT_MAP = 2;
private static final int DELETE_ROW = 3;
private static final int DELETE_MAP = 4;
private static final int SELECT_ROW = 5;
protected FullTextSettings setting;
protected IndexInfo index;
protected int[] columnTypes;
protected PreparedStatement prepInsertWord, prepInsertRow, prepInsertMap;
protected PreparedStatement prepDeleteRow, prepDeleteMap;
protected PreparedStatement prepSelectRow;
private static final String SQL[] = {
"MERGE INTO " + SCHEMA + ".WORDS(NAME) KEY(NAME) VALUES(?)",
"INSERT INTO " + SCHEMA + ".ROWS(HASH, INDEXID, KEY) VALUES(?, ?, ?)",
"INSERT INTO " + SCHEMA + ".MAP(ROWID, WORDID) VALUES(?, ?)",
"DELETE FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?",
"DELETE FROM " + SCHEMA + ".MAP WHERE ROWID=? AND WORDID=?",
"SELECT ID FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?"
};
/**
* INTERNAL
......@@ -915,7 +930,8 @@ public class FullText {
}
ArrayList<String> indexList = New.arrayList();
PreparedStatement prep = conn.prepareStatement(
"SELECT ID, COLUMNS FROM " + SCHEMA + ".INDEXES WHERE SCHEMA=? AND TABLE=?");
"SELECT ID, COLUMNS FROM " + SCHEMA + ".INDEXES" +
" WHERE SCHEMA=? AND TABLE=?");
prep.setString(1, schemaName);
prep.setString(2, tableName);
rs = prep.executeQuery();
......@@ -923,9 +939,7 @@ public class FullText {
index.id = rs.getInt(1);
String columns = rs.getString(2);
if (columns != null) {
for (String s : StringUtils.arraySplit(columns, ',', true)) {
indexList.add(s);
}
Collections.addAll(indexList, StringUtils.arraySplit(columns, ',', true));
}
}
if (indexList.size() == 0) {
......@@ -936,18 +950,23 @@ public class FullText {
index.indexColumns = new int[indexList.size()];
setColumns(index.indexColumns, indexList, columnList);
setting.addIndexInfo(index);
prepInsertWord = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".WORDS(NAME) VALUES(?)");
prepInsertRow = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".ROWS(HASH, INDEXID, KEY) VALUES(?, ?, ?)");
prepInsertMap = conn.prepareStatement(
"INSERT INTO " + SCHEMA + ".MAP(ROWID, WORDID) VALUES(?, ?)");
prepDeleteRow = conn.prepareStatement(
"DELETE FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?");
prepDeleteMap = conn.prepareStatement(
"DELETE FROM " + SCHEMA + ".MAP WHERE ROWID=? AND WORDID=?");
prepSelectRow = conn.prepareStatement(
"SELECT ID FROM " + SCHEMA + ".ROWS WHERE HASH=? AND INDEXID=? AND KEY=?");
useOwnConnection = isMultiThread(conn);
if(!useOwnConnection) {
for (int i = 0; i < SQL.length; i++) {
prepStatements[i] = conn.prepareStatement(SQL[i]);
}
}
}
private static boolean isMultiThread(Connection conn)
throws SQLException {
try (Statement stat = conn.createStatement()) {
ResultSet rs = stat.executeQuery(
"SELECT value FROM information_schema.settings" +
" WHERE name = 'MULTI_THREADED'");
return rs.next() && !"0".equals(rs.getString(1));
}
}
/**
......@@ -960,16 +979,16 @@ public class FullText {
if (newRow != null) {
// update
if (hasChanged(oldRow, newRow, index.indexColumns)) {
delete(oldRow);
insert(newRow);
delete(conn, oldRow);
insert(conn, newRow);
}
} else {
// delete
delete(oldRow);
delete(conn, oldRow);
}
} else if (newRow != null) {
// insert
insert(newRow);
insert(conn, newRow);
}
}
......@@ -992,54 +1011,82 @@ public class FullText {
/**
* Add a row to the index.
*
* @param conn to use
* @param row the row
*/
protected void insert(Object[] row) throws SQLException {
String key = getKey(row);
int hash = key.hashCode();
prepInsertRow.setInt(1, hash);
prepInsertRow.setInt(2, index.id);
prepInsertRow.setString(3, key);
prepInsertRow.execute();
ResultSet rs = prepInsertRow.getGeneratedKeys();
rs.next();
int rowId = rs.getInt(1);
prepInsertMap.setInt(1, rowId);
int[] wordIds = getWordIds(row);
for (int id : wordIds) {
prepInsertMap.setInt(2, id);
prepInsertMap.execute();
protected void insert(Connection conn, Object[] row) throws SQLException {
PreparedStatement prepInsertRow = null;
PreparedStatement prepInsertMap = null;
try {
String key = getKey(row);
int hash = key.hashCode();
prepInsertRow = getStatement(conn, INSERT_ROW);
prepInsertRow.setInt(1, hash);
prepInsertRow.setInt(2, index.id);
prepInsertRow.setString(3, key);
prepInsertRow.execute();
ResultSet rs = prepInsertRow.getGeneratedKeys();
rs.next();
int rowId = rs.getInt(1);
prepInsertMap = getStatement(conn, INSERT_MAP);
prepInsertMap.setInt(1, rowId);
int[] wordIds = getWordIds(conn, row);
for (int id : wordIds) {
prepInsertMap.setInt(2, id);
prepInsertMap.execute();
}
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepInsertRow);
IOUtils.closeSilently(prepInsertMap);
}
}
}
/**
* Delete a row from the index.
*
* @param conn to use
* @param row the row
*/
protected void delete(Object[] row) throws SQLException {
String key = getKey(row);
int hash = key.hashCode();
prepSelectRow.setInt(1, hash);
prepSelectRow.setInt(2, index.id);
prepSelectRow.setString(3, key);
ResultSet rs = prepSelectRow.executeQuery();
if (rs.next()) {
int rowId = rs.getInt(1);
prepDeleteMap.setInt(1, rowId);
int[] wordIds = getWordIds(row);
for (int id : wordIds) {
prepDeleteMap.setInt(2, id);
prepDeleteMap.executeUpdate();
protected void delete(Connection conn, Object[] row) throws SQLException {
PreparedStatement prepSelectRow = null;
PreparedStatement prepDeleteMap = null;
PreparedStatement prepDeleteRow = null;
try {
String key = getKey(row);
int hash = key.hashCode();
prepSelectRow = getStatement(conn, SELECT_ROW);
prepSelectRow.setInt(1, hash);
prepSelectRow.setInt(2, index.id);
prepSelectRow.setString(3, key);
ResultSet rs = prepSelectRow.executeQuery();
prepDeleteMap = getStatement(conn, DELETE_MAP);
prepDeleteRow = getStatement(conn, DELETE_ROW);
if (rs.next()) {
int rowId = rs.getInt(1);
prepDeleteMap.setInt(1, rowId);
int[] wordIds = getWordIds(conn, row);
for (int id : wordIds) {
prepDeleteMap.setInt(2, id);
prepDeleteMap.executeUpdate();
}
prepDeleteRow.setInt(1, hash);
prepDeleteRow.setInt(2, index.id);
prepDeleteRow.setString(3, key);
prepDeleteRow.executeUpdate();
}
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepSelectRow);
IOUtils.closeSilently(prepDeleteMap);
IOUtils.closeSilently(prepDeleteRow);
}
prepDeleteRow.setInt(1, hash);
prepDeleteRow.setInt(2, index.id);
prepDeleteRow.setString(3, key);
prepDeleteRow.executeUpdate();
}
}
private int[] getWordIds(Object[] row) throws SQLException {
private int[] getWordIds(Connection conn, Object[] row) throws SQLException {
HashSet<String> words = New.hashSet();
for (int idx : index.indexColumns) {
int type = columnTypes[idx];
......@@ -1057,27 +1104,36 @@ public class FullText {
addWords(setting, words, string);
}
}
HashMap<String, Integer> allWords = setting.getWordList();
int[] wordIds = new int[words.size()];
Iterator<String> it = words.iterator();
for (int i = 0; it.hasNext(); i++) {
String word = it.next();
Integer wId = allWords.get(word);
int wordId;
if (wId == null) {
prepInsertWord.setString(1, word);
prepInsertWord.execute();
ResultSet rs = prepInsertWord.getGeneratedKeys();
rs.next();
wordId = rs.getInt(1);
allWords.put(word, wordId);
} else {
wordId = wId.intValue();
PreparedStatement prepInsertWord = null;
try {
prepInsertWord = getStatement(conn, INSERT_WORD);
int[] wordIds = new int[words.size()];
int i = 0;
for (String word : words) {
int wordId;
Integer wId;
while((wId = setting.getWordId(word)) == null) {
prepInsertWord.setString(1, word);
prepInsertWord.execute();
ResultSet rs = prepInsertWord.getGeneratedKeys();
if (rs.next()) {
wordId = rs.getInt(1);
if (wordId != 0) {
setting.addWord(word, wordId);
wId = wordId;
break;
}
}
}
wordIds[i++] = wId;
}
Arrays.sort(wordIds);
return wordIds;
} finally {
if (useOwnConnection) {
IOUtils.closeSilently(prepInsertWord);
}
wordIds[i] = wordId;
}
Arrays.sort(wordIds);
return wordIds;
}
private String getKey(Object[] row) throws SQLException {
......@@ -1095,6 +1151,10 @@ public class FullText {
return buff.toString();
}
private PreparedStatement getStatement(Connection conn, int indx) throws SQLException {
return useOwnConnection ? conn.prepareStatement(SQL[indx]) : prepStatements[indx];
}
}
/**
......@@ -1116,5 +1176,4 @@ public class FullText {
throws SQLException {
throw new SQLException(message, "FULLTEXT");
}
}
......@@ -13,6 +13,7 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
......@@ -37,6 +38,8 @@ import org.h2.util.StatementBuilder;
import org.h2.util.StringUtils;
import org.h2.util.Utils;
import java.io.File;
import java.util.Map;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.FSDirectory;
......@@ -94,27 +97,23 @@ public class FullTextLucene extends FullText {
* @param conn the connection
*/
public static void init(Connection conn) throws SQLException {
Statement stat = conn.createStatement();
stat.execute("CREATE SCHEMA IF NOT EXISTS " + SCHEMA);
stat.execute("CREATE TABLE IF NOT EXISTS " + SCHEMA +
".INDEXES(SCHEMA VARCHAR, TABLE VARCHAR, " +
"COLUMNS VARCHAR, PRIMARY KEY(SCHEMA, TABLE))");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_CREATE_INDEX FOR \"" +
FullTextLucene.class.getName() + ".createIndex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_INDEX FOR \"" +
FullTextLucene.class.getName() + ".dropIndex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_SEARCH FOR \"" +
FullTextLucene.class.getName() + ".search\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_SEARCH_DATA FOR \"" +
FullTextLucene.class.getName() + ".searchData\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_REINDEX FOR \"" +
FullTextLucene.class.getName() + ".reindex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_ALL FOR \"" +
FullTextLucene.class.getName() + ".dropAll\"");
try {
getIndexAccess(conn);
} catch (SQLException e) {
throw convertException(e);
try (Statement stat = conn.createStatement()) {
stat.execute("CREATE SCHEMA IF NOT EXISTS " + SCHEMA);
stat.execute("CREATE TABLE IF NOT EXISTS " + SCHEMA +
".INDEXES(SCHEMA VARCHAR, TABLE VARCHAR, " +
"COLUMNS VARCHAR, PRIMARY KEY(SCHEMA, TABLE))");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_CREATE_INDEX FOR \"" +
FullTextLucene.class.getName() + ".createIndex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_INDEX FOR \"" +
FullTextLucene.class.getName() + ".dropIndex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_SEARCH FOR \"" +
FullTextLucene.class.getName() + ".search\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_SEARCH_DATA FOR \"" +
FullTextLucene.class.getName() + ".searchData\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_REINDEX FOR \"" +
FullTextLucene.class.getName() + ".reindex\"");
stat.execute("CREATE ALIAS IF NOT EXISTS FTL_DROP_ALL FOR \"" +
FullTextLucene.class.getName() + ".dropAll\"");
}
}
......@@ -157,11 +156,9 @@ public class FullTextLucene extends FullText {
prep.setString(1, schema);
prep.setString(2, table);
int rowCount = prep.executeUpdate();
if (rowCount == 0) {
return;
if (rowCount != 0) {
reindex(conn);
}
reindex(conn);
}
/**
......@@ -248,10 +245,7 @@ public class FullTextLucene extends FullText {
* @return the converted SQL exception
*/
protected static SQLException convertException(Exception e) {
SQLException e2 = new SQLException(
"Error while indexing document", "FULLTEXT");
e2.initCause(e);
return e2;
return new SQLException("Error while indexing document", "FULLTEXT", e);
}
/**
......@@ -261,8 +255,8 @@ public class FullTextLucene extends FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
private static void createTrigger(Connection conn, String schema,
String table) throws SQLException {
createOrDropTrigger(conn, schema, table, true);
}
......@@ -309,11 +303,7 @@ public class FullTextLucene extends FullText {
conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
IndexWriter writer = new IndexWriter(indexDir, conf);
//see http://wiki.apache.org/lucene-java/NearRealtimeSearch
IndexReader reader = IndexReader.open(writer, true);
access = new IndexAccess();
access.writer = writer;
access.reader = reader;
access.searcher = new IndexSearcher(reader);
access = new IndexAccess(writer);
} catch (IOException e) {
throw convertException(e);
}
......@@ -353,8 +343,8 @@ public class FullTextLucene extends FullText {
* @param schema the schema name
* @param table the table name
*/
protected static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
private static void indexExistingRows(Connection conn, String schema,
String table) throws SQLException {
FullTextLucene.FullTextTrigger existing = new FullTextLucene.FullTextTrigger();
existing.init(conn, schema, null, table, false, Trigger.INSERT);
String sql = "SELECT * FROM " + StringUtils.quoteIdentifier(schema) +
......@@ -373,10 +363,7 @@ public class FullTextLucene extends FullText {
private static void removeIndexFiles(Connection conn) throws SQLException {
String path = getIndexPath(conn);
IndexAccess access = INDEX_ACCESS.get(path);
if (access != null) {
removeIndexAccess(access, path);
}
removeIndexAccess(path);
if (!path.startsWith(IN_MEMORY_PREFIX)) {
FileUtils.deleteRecursive(path, false);
}
......@@ -386,17 +373,16 @@ public class FullTextLucene extends FullText {
* Close the index writer and searcher and remove them from the index access
* set.
*
* @param access the index writer/searcher wrapper
* @param indexPath the index path
*/
protected static void removeIndexAccess(IndexAccess access, String indexPath)
protected static void removeIndexAccess(String indexPath)
throws SQLException {
synchronized (INDEX_ACCESS) {
try {
INDEX_ACCESS.remove(indexPath);
access.searcher.close();
access.reader.close();
access.writer.close();
IndexAccess access = INDEX_ACCESS.remove(indexPath);
if(access != null) {
access.close();
}
} catch (Exception e) {
throw convertException(e);
}
......@@ -426,49 +412,53 @@ public class FullTextLucene extends FullText {
try {
IndexAccess access = getIndexAccess(conn);
// take a reference as the searcher may change
IndexSearcher searcher = access.searcher;
// reuse the same analyzer; it's thread-safe;
// also allows subclasses to control the analyzer used.
Analyzer analyzer = access.writer.getAnalyzer();
QueryParser parser = new QueryParser(Version.LUCENE_30,
LUCENE_FIELD_DATA, analyzer);
Query query = parser.parse(text);
// Lucene 3 insists on a hard limit and will not provide
// a total hits value. Take at least 100 which is
// an optimal limit for Lucene as any more
// will trigger writing results to disk.
int maxResults = (limit == 0 ? 100 : limit) + offset;
TopDocs docs = searcher.search(query, maxResults);
if (limit == 0) {
limit = docs.totalHits;
}
for (int i = 0, len = docs.scoreDocs.length;
i < limit && i + offset < docs.totalHits
&& i + offset < len; i++) {
ScoreDoc sd = docs.scoreDocs[i + offset];
Document doc = searcher.doc(sd.doc);
float score = sd.score;
String q = doc.get(LUCENE_FIELD_QUERY);
if (data) {
int idx = q.indexOf(" WHERE ");
JdbcConnection c = (JdbcConnection) conn;
Session session = (Session) c.getSession();
Parser p = new Parser(session);
String tab = q.substring(0, idx);
ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab);
String schemaName = expr.getOriginalTableAliasName();
String tableName = expr.getColumnName();
q = q.substring(idx + " WHERE ".length());
Object[][] columnData = parseKey(conn, q);
result.addRow(
schemaName,
tableName,
columnData[0],
columnData[1],
score);
} else {
result.addRow(q, score);
IndexSearcher searcher = access.getSearcher();
try {
// reuse the same analyzer; it's thread-safe;
// also allows subclasses to control the analyzer used.
Analyzer analyzer = access.writer.getAnalyzer();
QueryParser parser = new QueryParser(Version.LUCENE_30,
LUCENE_FIELD_DATA, analyzer);
Query query = parser.parse(text);
// Lucene 3 insists on a hard limit and will not provide
// a total hits value. Take at least 100 which is
// an optimal limit for Lucene as any more
// will trigger writing results to disk.
int maxResults = (limit == 0 ? 100 : limit) + offset;
TopDocs docs = searcher.search(query, maxResults);
if (limit == 0) {
limit = docs.totalHits;
}
for (int i = 0, len = docs.scoreDocs.length;
i < limit && i + offset < docs.totalHits
&& i + offset < len; i++) {
ScoreDoc sd = docs.scoreDocs[i + offset];
Document doc = searcher.doc(sd.doc);
float score = sd.score;
String q = doc.get(LUCENE_FIELD_QUERY);
if (data) {
int idx = q.indexOf(" WHERE ");
JdbcConnection c = (JdbcConnection) conn;
Session session = (Session) c.getSession();
Parser p = new Parser(session);
String tab = q.substring(0, idx);
ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab);
String schemaName = expr.getOriginalTableAliasName();
String tableName = expr.getColumnName();
q = q.substring(idx + " WHERE ".length());
Object[][] columnData = parseKey(conn, q);
result.addRow(
schemaName,
tableName,
columnData[0],
columnData[1],
score);
} else {
result.addRow(q, score);
}
}
} finally {
access.returnSearcher(searcher);
}
} catch (Exception e) {
throw convertException(e);
......@@ -479,16 +469,16 @@ public class FullTextLucene extends FullText {
/**
* Trigger updates the index when a inserting, updating, or deleting a row.
*/
public static class FullTextTrigger implements Trigger {
public static final class FullTextTrigger implements Trigger {
protected String schema;
protected String table;
protected int[] keys;
protected int[] indexColumns;
protected String[] columns;
protected int[] columnTypes;
protected String indexPath;
protected IndexAccess indexAccess;
private String schema;
private String table;
private int[] keys;
private int[] indexColumns;
private String[] columns;
private int[] columnTypes;
private String indexPath;
private IndexAccess indexAccess;
/**
* INTERNAL
......@@ -541,9 +531,8 @@ public class FullTextLucene extends FullText {
if (rs.next()) {
String cols = rs.getString(1);
if (cols != null) {
for (String s : StringUtils.arraySplit(cols, ',', true)) {
indexList.add(s);
}
Collections.addAll(indexList,
StringUtils.arraySplit(cols, ',', true));
}
}
if (indexList.size() == 0) {
......@@ -583,10 +572,7 @@ public class FullTextLucene extends FullText {
*/
@Override
public void close() throws SQLException {
if (indexAccess != null) {
removeIndexAccess(indexAccess, indexPath);
indexAccess = null;
}
removeIndexAccess(indexPath);
}
/**
......@@ -600,14 +586,9 @@ public class FullTextLucene extends FullText {
/**
* Commit all changes to the Lucene index.
*/
void commitIndex() throws SQLException {
private void commitIndex() throws SQLException {
try {
indexAccess.writer.commit();
// recreate Searcher with the IndexWriter's reader.
indexAccess.searcher.close();
indexAccess.reader.close();
indexAccess.reader = IndexReader.open(indexAccess.writer, true);
indexAccess.searcher = new IndexSearcher(indexAccess.reader);
indexAccess.commit();
} catch (IOException e) {
throw convertException(e);
}
......@@ -699,22 +680,80 @@ public class FullTextLucene extends FullText {
/**
* A wrapper for the Lucene writer and searcher.
*/
static class IndexAccess {
static final class IndexAccess {
/**
* The index writer.
*/
IndexWriter writer;
final IndexWriter writer;
/**
* The index reader.
* Map of usage counters for outstanding searchers.
*/
IndexReader reader;
private final Map<IndexSearcher,Integer> counters = New.hashMap();
/**
* Usage counter for current searcher.
*/
private int counter;
/**
* The index searcher.
*/
IndexSearcher searcher;
}
private IndexSearcher searcher;
private IndexAccess(IndexWriter writer) throws IOException {
this.writer = writer;
IndexReader reader = IndexReader.open(writer, true);
searcher = new IndexSearcher(reader);
}
private synchronized IndexSearcher getSearcher() {
++counter;
return searcher;
}
private synchronized void returnSearcher(IndexSearcher searcher) {
if (this.searcher == searcher) {
--counter;
assert counter >= 0;
} else {
Integer cnt = counters.remove(searcher);
assert cnt != null;
if(--cnt == 0) {
closeSearcher(searcher);
} else {
counters.put(searcher, cnt);
}
}
}
public synchronized void commit() throws IOException {
writer.commit();
if (counter != 0) {
counters.put(searcher, counter);
counter = 0;
} else {
closeSearcher(searcher);
}
// recreate Searcher with the IndexWriter's reader.
searcher = new IndexSearcher(IndexReader.open(writer, true));
}
public synchronized void close() throws IOException {
for (IndexSearcher searcher : counters.keySet()) {
closeSearcher(searcher);
}
counters.clear();
closeSearcher(searcher);
searcher = null;
writer.close();
}
private static void closeSearcher(IndexSearcher searcher) {
IndexReader indexReader = searcher.getIndexReader();
try { searcher.close(); } catch(IOException ignore) {/**/}
try { indexReader.close(); } catch(IOException ignore) {/**/}
}
}
}
......@@ -10,20 +10,22 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import org.h2.util.New;
import org.h2.util.SoftHashMap;
/**
* The global settings of a full text search.
*/
class FullTextSettings {
final class FullTextSettings {
/**
* The settings of open indexes.
*/
private static final HashMap<String, FullTextSettings> SETTINGS = New.hashMap();
private static final Map<String, FullTextSettings> SETTINGS = New.hashMap();
/**
* Whether this instance has been initialized.
......@@ -33,17 +35,17 @@ class FullTextSettings {
/**
* The set of words not to index (stop words).
*/
private final HashSet<String> ignoreList = New.hashSet();
private final Set<String> ignoreList = New.hashSet();
/**
* The set of words / terms.
*/
private final HashMap<String, Integer> words = New.hashMap();
private final Map<String, Integer> words = New.hashMap();
/**
* The set of indexes in this database.
*/
private final HashMap<Integer, IndexInfo> indexes = New.hashMap();
private final Map<Integer, IndexInfo> indexes = Collections.synchronizedMap(New.<Integer, IndexInfo>hashMap());
/**
* The prepared statement cache.
......@@ -60,26 +62,63 @@ class FullTextSettings {
/**
* Create a new instance.
*/
protected FullTextSettings() {
private FullTextSettings() {
// don't allow construction
}
/**
* Get the ignore list.
*
* @return the ignore list
* Clear set of ignored words
*/
protected HashSet<String> getIgnoreList() {
return ignoreList;
public void clearInored() {
synchronized (ignoreList) {
ignoreList.clear();
}
}
/**
* Get the word list.
*
* @return the word list
* Amend set of ignored words
* @param words to add
*/
public void addIgnored(Iterable<String> words) {
synchronized (ignoreList) {
for (String word : words) {
word = normalizeWord(word);
ignoreList.add(word);
}
}
}
/**
* Clear set of searchable words
*/
public void clearWordList() {
synchronized (words) {
words.clear();
}
}
/**
* Get id for a searchable word
* @param word to find id for
* @return Integer id or null if word is not found
*/
public Integer getWordId(String word) {
synchronized (words) {
return words.get(word);
}
}
/**
* Register searchable word
* @param word to register
* @param id to register with
*/
protected HashMap<String, Integer> getWordList() {
return words;
public void addWord(String word, Integer id) {
synchronized (words) {
if(!words.containsKey(word)) {
words.put(word, id);
}
}
}
/**
......@@ -109,10 +148,11 @@ class FullTextSettings {
* @return the uppercase version of the word or null
*/
protected String convertWord(String word) {
// TODO this is locale specific, document
word = word.toUpperCase();
if (ignoreList.contains(word)) {
return null;
word = normalizeWord(word);
synchronized (ignoreList) {
if (ignoreList.contains(word)) {
return null;
}
}
return word;
}
......@@ -126,10 +166,13 @@ class FullTextSettings {
protected static FullTextSettings getInstance(Connection conn)
throws SQLException {
String path = getIndexPath(conn);
FullTextSettings setting = SETTINGS.get(path);
if (setting == null) {
setting = new FullTextSettings();
SETTINGS.put(path, setting);
FullTextSettings setting;
synchronized (SETTINGS) {
setting = SETTINGS.get(path);
if (setting == null) {
setting = new FullTextSettings();
SETTINGS.put(path, setting);
}
}
return setting;
}
......@@ -140,7 +183,7 @@ class FullTextSettings {
* @param conn the connection
* @return the file system path
*/
protected static String getIndexPath(Connection conn) throws SQLException {
private static String getIndexPath(Connection conn) throws SQLException {
Statement stat = conn.createStatement();
ResultSet rs = stat.executeQuery(
"CALL IFNULL(DATABASE_PATH(), 'MEM:' || DATABASE())");
......@@ -218,7 +261,9 @@ class FullTextSettings {
* Close all fulltext settings, freeing up memory.
*/
protected static void closeAll() {
SETTINGS.clear();
synchronized (SETTINGS) {
SETTINGS.clear();
}
}
protected void setWhitespaceChars(String whitespaceChars) {
......@@ -229,4 +274,8 @@ class FullTextSettings {
return whitespaceChars;
}
private String normalizeWord(String word) {
// TODO this is locale specific, document
return word.toUpperCase();
}
}
......@@ -14,10 +14,8 @@ import java.nio.charset.Charset;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Map;
import org.h2.engine.Constants;
import org.h2.util.New;
......@@ -741,17 +739,6 @@ public class DataUtils {
return new UnsupportedOperationException(formatMessage(0, message));
}
/**
* Create a new ConcurrentModificationException.
*
* @param message the message
* @return the exception
*/
public static ConcurrentModificationException
newConcurrentModificationException(String message) {
return new ConcurrentModificationException(formatMessage(0, message));
}
/**
* Create a new IllegalStateException.
*
......
......@@ -14,7 +14,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import org.h2.mvstore.type.DataType;
import org.h2.mvstore.type.ObjectDataType;
import org.h2.util.New;
......@@ -128,23 +127,6 @@ public class MVMap<K, V> extends AbstractMap<K, V>
return (V) result;
}
/**
* Add or replace a key-value pair in a branch.
*
* @param root the root page
* @param key the key (may not be null)
* @param value the value (may not be null)
* @return the new root page
*/
synchronized Page putBranch(Page root, K key, V value) {
DataUtils.checkArgument(value != null, "The value may not be null");
long v = writeVersion;
Page p = root.copy(v);
p = splitRootIfNeeded(p, v);
put(p, v, key, value);
return p;
}
/**
* Split the root page if necessary.
*
......@@ -484,30 +466,6 @@ public class MVMap<K, V> extends AbstractMap<K, V>
return get(key) != null;
}
/**
* Get the value for the given key, or null if not found.
*
* @param p the parent page
* @param key the key
* @return the page or null
*/
protected Page binarySearchPage(Page p, Object key) {
int x = p.binarySearch(key);
if (!p.isLeaf()) {
if (x < 0) {
x = -x - 1;
} else {
x++;
}
p = p.getChildPage(x);
return binarySearchPage(p, key);
}
if (x >= 0) {
return p;
}
return null;
}
/**
* Remove all entries.
*/
......
......@@ -916,17 +916,6 @@ public final class MVStore {
}
}
/**
* Whether the chunk at the given position is live.
*
* @param chunkId the chunk id
* @return true if it is live
*/
boolean isChunkLive(int chunkId) {
String s = meta.get(Chunk.getMetaKey(chunkId));
return s != null;
}
/**
* Get the chunk for the given position.
*
......
......@@ -7,8 +7,6 @@ package org.h2.mvstore;
import java.nio.ByteBuffer;
import java.util.HashSet;
import java.util.Set;
import org.h2.compress.Compressor;
import org.h2.mvstore.type.DataType;
import org.h2.util.New;
......@@ -1121,18 +1119,6 @@ public class Page {
}
}
/**
* Collect the set of chunks referenced directly by this page.
*
* @param target the target set
*/
void collectReferencedChunks(Set<Integer> target) {
target.add(DataUtils.getPageChunkId(pos));
for (long p : children) {
target.add(DataUtils.getPageChunkId(p));
}
}
private void removeChild(int index) {
if (index == 0 && children.length == 1) {
children = EMPTY_ARRAY;
......
......@@ -48,11 +48,6 @@ public class MVPrimaryIndex extends BaseIndex {
*/
static final ValueLong MAX = ValueLong.get(Long.MAX_VALUE);
/**
* The zero long value.
*/
static final ValueLong ZERO = ValueLong.get(0);
private final MVTable mvTable;
private final String mapName;
private TransactionMap<Value, Value> dataMap;
......
......@@ -1239,10 +1239,6 @@ public class TransactionStore {
}
}
Object getUndoLog() {
return transaction.store.undoLog;
}
/**
* Get the versioned value for the given key.
*
......
......@@ -410,10 +410,12 @@ public class LocalResult implements ResultInterface, ResultTarget {
if (rows.size() > limit) {
rows = New.arrayList(rows.subList(0, limit));
rowCount = limit;
distinctRows = null;
}
} else {
if (limit < rowCount) {
rowCount = limit;
distinctRows = null;
}
}
}
......@@ -513,6 +515,7 @@ public class LocalResult implements ResultInterface, ResultTarget {
rowCount -= offset;
}
}
distinctRows = null;
}
@Override
......
......@@ -177,13 +177,13 @@ public class Column {
*/
public Value convert(Value v, Mode mode) {
try {
return v.convertTo(type, MathUtils.convertLongToInt(precision), mode);
return v.convertTo(type, MathUtils.convertLongToInt(precision), mode, this);
} catch (DbException e) {
if (e.getErrorCode() == ErrorCode.DATA_CONVERSION_ERROR_1) {
String target = (table == null ? "" : table.getName() + ": ") +
getCreateSQL();
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1,
ErrorCode.DATA_CONVERSION_ERROR_1, e,
v.getSQL() + " (" + target + ")");
}
throw e;
......@@ -389,11 +389,11 @@ public class Column {
if (s.length() > 127) {
s = s.substring(0, 128) + "...";
}
throw DbException.get(ErrorCode.ENUM_VALUE_NOT_PERMITTED_1,
throw DbException.get(ErrorCode.ENUM_VALUE_NOT_PERMITTED,
getCreateSQL(), s);
}
value = ValueEnum.get(enumerators, value);
value = ValueEnum.get(enumerators, value.getInt());
}
updateSequenceIfRequired(session, value);
return value;
......
......@@ -24,6 +24,7 @@ import org.h2.engine.Mode;
import org.h2.engine.SysProperties;
import org.h2.message.DbException;
import org.h2.store.DataHandler;
import org.h2.table.Column;
import org.h2.tools.SimpleResultSet;
import org.h2.util.DateTimeUtils;
import org.h2.util.JdbcUtils;
......@@ -551,12 +552,26 @@ public abstract class Value {
* Compare a value to the specified type.
*
* @param targetType the type of the returned value
* @param the precision of the column to convert this value to.
* @param precision the precision of the column to convert this value to.
* The special constant <code>-1</code> is used to indicate that
* the precision plays no role when converting the value
* @return the converted value
*/
public Value convertTo(int targetType, int precision, Mode mode) {
public final Value convertTo(int targetType, int precision, Mode mode) {
return convertTo(targetType, precision, mode, null);
}
/**
* Compare a value to the specified type.
*
* @param targetType the type of the returned value
* @param precision the precision of the column to convert this value to.
* The special constant <code>-1</code> is used to indicate that
* the precision plays no role when converting the value
* @param column the column that contains the ENUM datatype enumerators, for dealing with ENUM conversions
* @return the converted value
*/
public Value convertTo(int targetType, int precision, Mode mode, Column column) {
// converting NULL is done in ValueNull
// converting BLOB to CLOB and vice versa is done in ValueLob
if (getType() == targetType) {
......@@ -582,6 +597,7 @@ public abstract class Value {
case BYTES:
case JAVA_OBJECT:
case UUID:
case ENUM:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
}
......@@ -593,6 +609,7 @@ public abstract class Value {
return ValueByte.get(getBoolean().booleanValue() ? (byte) 1 : (byte) 0);
case SHORT:
return ValueByte.get(convertToByte(getShort()));
case ENUM:
case INT:
return ValueByte.get(convertToByte(getInt()));
case LONG:
......@@ -617,6 +634,7 @@ public abstract class Value {
return ValueShort.get(getBoolean().booleanValue() ? (short) 1 : (short) 0);
case BYTE:
return ValueShort.get(getByte());
case ENUM:
case INT:
return ValueShort.get(convertToShort(getInt()));
case LONG:
......@@ -669,6 +687,7 @@ public abstract class Value {
return ValueLong.get(getByte());
case SHORT:
return ValueLong.get(getShort());
case ENUM:
case INT:
return ValueLong.get(getInt());
case DECIMAL:
......@@ -700,6 +719,7 @@ public abstract class Value {
return ValueDecimal.get(BigDecimal.valueOf(getByte()));
case SHORT:
return ValueDecimal.get(BigDecimal.valueOf(getShort()));
case ENUM:
case INT:
return ValueDecimal.get(BigDecimal.valueOf(getInt()));
case LONG:
......@@ -743,6 +763,7 @@ public abstract class Value {
return ValueDouble.get(getBigDecimal().doubleValue());
case FLOAT:
return ValueDouble.get(getFloat());
case ENUM:
case TIMESTAMP_TZ:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
......@@ -765,6 +786,7 @@ public abstract class Value {
return ValueFloat.get(getBigDecimal().floatValue());
case DOUBLE:
return ValueFloat.get((float) getDouble());
case ENUM:
case TIMESTAMP_TZ:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
......@@ -784,6 +806,9 @@ public abstract class Value {
case TIMESTAMP_TZ:
return ValueDate.fromDateValue(
((ValueTimestampTimeZone) this).getDateValue());
case ENUM:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
}
break;
}
......@@ -799,6 +824,9 @@ public abstract class Value {
case TIMESTAMP_TZ:
return ValueTime.fromNanos(
((ValueTimestampTimeZone) this).getTimeNanos());
case ENUM:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
}
break;
}
......@@ -814,6 +842,9 @@ public abstract class Value {
return ValueTimestamp.fromDateValueAndNanos(
((ValueTimestampTimeZone) this).getDateValue(),
((ValueTimestampTimeZone) this).getTimeNanos());
case ENUM:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
}
break;
}
......@@ -856,6 +887,7 @@ public abstract class Value {
(byte) x
});
}
case ENUM:
case TIMESTAMP_TZ:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
......@@ -868,6 +900,7 @@ public abstract class Value {
case BLOB:
return ValueJavaObject.getNoCopy(
null, getBytesNoCopy(), getDataHandler());
case ENUM:
case TIMESTAMP_TZ:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
......@@ -876,9 +909,19 @@ public abstract class Value {
}
case ENUM: {
switch (getType()) {
case BYTE:
case SHORT:
case INT:
case LONG:
case DECIMAL:
return ValueEnum.get(column.getEnumerators(), getInt());
case STRING:
return this;
case STRING_IGNORECASE:
case STRING_FIXED:
return ValueEnum.get(column.getEnumerators(), getString());
default:
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1, getString());
}
}
case BLOB: {
......@@ -972,7 +1015,6 @@ public abstract class Value {
case JAVA_OBJECT:
return ValueJavaObject.getNoCopy(null,
StringUtils.convertHexToBytes(s.trim()), getDataHandler());
case ENUM:
case STRING:
return ValueString.get(s);
case STRING_IGNORECASE:
......
......@@ -49,19 +49,15 @@ public class ValueEnum extends ValueEnumBase {
private static final void check(final String[] enumerators, final Value value) {
check(enumerators);
switch (validate(enumerators, value)) {
case VALID:
return;
default:
throw DbException.get(ErrorCode.ENUM_VALUE_NOT_PERMITTED_2,
toString(enumerators), value.toString());
if (validate(enumerators, value) != Validation.VALID) {
throw DbException.get(ErrorCode.ENUM_VALUE_NOT_PERMITTED,
toString(enumerators), value.toString());
}
}
@Override
protected int compareSecure(final Value v, final CompareMode mode) {
final ValueEnum ev = ValueEnum.get(enumerators, v);
return MathUtils.compareInt(getInt(), ev.getInt());
return MathUtils.compareInt(getInt(), v.getInt());
}
/**
......@@ -72,21 +68,22 @@ public class ValueEnum extends ValueEnumBase {
* @param value a value
* @return the ENUM value
*/
public static ValueEnum get(final String[] enumerators, final Value value) {
check(enumerators, value);
public static ValueEnum get(final String[] enumerators, int value) {
check(enumerators, ValueInt.get(value));
return new ValueEnum(enumerators, value);
}
if (DataType.isStringType(value.getType())) {
final String cleanLabel = sanitize(value.getString());
public static ValueEnum get(final String[] enumerators, String value) {
check(enumerators, ValueString.get(value));
for (int i = 0; i < enumerators.length; i++) {
if (cleanLabel.equals(sanitize(enumerators[i])))
return new ValueEnum(enumerators, i);
}
final String cleanLabel = sanitize(value);
throw DbException.get(ErrorCode.GENERAL_ERROR_1, "Unexpected error");
} else {
return new ValueEnum(enumerators, value.getInt());
for (int i = 0; i < enumerators.length; i++) {
if (cleanLabel.equals(sanitize(enumerators[i])))
return new ValueEnum(enumerators, i);
}
throw DbException.get(ErrorCode.GENERAL_ERROR_1, "Unexpected error");
}
public String[] getEnumerators() {
......
......@@ -8,17 +8,16 @@ package org.h2.value;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Arrays;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.CoordinateSequenceFilter;
import com.vividsolutions.jts.geom.PrecisionModel;
import org.h2.engine.Mode;
import org.h2.message.DbException;
import org.h2.table.Column;
import org.h2.util.StringUtils;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.CoordinateSequenceFilter;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.PrecisionModel;
import com.vividsolutions.jts.io.ParseException;
import com.vividsolutions.jts.io.WKBReader;
import com.vividsolutions.jts.io.WKBWriter;
......@@ -274,11 +273,11 @@ public class ValueGeometry extends Value {
}
@Override
public Value convertTo(int targetType, int precision, Mode mode) {
public Value convertTo(int targetType, int precision, Mode mode, Column column) {
if (targetType == Value.JAVA_OBJECT) {
return this;
}
return super.convertTo(targetType, precision, mode);
return super.convertTo(targetType, precision, mode, column);
}
/**
......
......@@ -23,6 +23,7 @@ import org.h2.store.FileStore;
import org.h2.store.FileStoreInputStream;
import org.h2.store.FileStoreOutputStream;
import org.h2.store.fs.FileUtils;
import org.h2.table.Column;
import org.h2.util.IOUtils;
import org.h2.util.MathUtils;
import org.h2.util.SmallLRUCache;
......@@ -444,7 +445,7 @@ public class ValueLob extends Value {
* @return the converted value
*/
@Override
public Value convertTo(int t, int precision, Mode mode) {
public Value convertTo(int t, int precision, Mode mode, Column column) {
if (t == type) {
return this;
} else if (t == Value.CLOB) {
......@@ -454,7 +455,7 @@ public class ValueLob extends Value {
ValueLob copy = ValueLob.createBlob(getInputStream(), -1, handler);
return copy;
}
return super.convertTo(t, precision, mode);
return super.convertTo(t, precision, mode, column);
}
@Override
......
......@@ -25,6 +25,7 @@ import org.h2.store.FileStoreOutputStream;
import org.h2.store.LobStorageFrontend;
import org.h2.store.LobStorageInterface;
import org.h2.store.fs.FileUtils;
import org.h2.table.Column;
import org.h2.util.IOUtils;
import org.h2.util.MathUtils;
import org.h2.util.StringUtils;
......@@ -185,7 +186,7 @@ public class ValueLobDb extends Value implements Value.ValueClob,
* @return the converted value
*/
@Override
public Value convertTo(int t, int precision, Mode mode) {
public Value convertTo(int t, int precision, Mode mode, Column column) {
if (t == type) {
return this;
} else if (t == Value.CLOB) {
......@@ -205,7 +206,7 @@ public class ValueLobDb extends Value implements Value.ValueClob,
return ValueLobDb.createSmallLob(t, small);
}
}
return super.convertTo(t, precision, mode);
return super.convertTo(t, precision, mode, column);
}
@Override
......
......@@ -13,9 +13,9 @@ import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Time;
import java.sql.Timestamp;
import org.h2.engine.Mode;
import org.h2.message.DbException;
import org.h2.table.Column;
/**
* Implementation of NULL. NULL is not a regular data type.
......@@ -133,7 +133,7 @@ public class ValueNull extends Value {
}
@Override
public Value convertTo(int type, int precision, Mode mode) {
public Value convertTo(int type, int precision, Mode mode, Column column) {
return this;
}
......
......@@ -470,8 +470,6 @@ java org.h2.test.TestAll timer
// System.setProperty("h2.modifyOnWrite", "true");
// System.setProperty("h2.storeLocalTime", "true");
// speedup
// System.setProperty("h2.syncMethod", "");
......
......@@ -18,7 +18,6 @@ import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.SimpleTimeZone;
import java.util.TimeZone;
import org.h2.test.TestBase;
import org.h2.test.unit.TestDate;
import org.h2.util.DateTimeUtils;
......@@ -35,7 +34,6 @@ public class TestDateStorage extends TestBase {
* @param a ignored
*/
public static void main(String... a) throws Exception {
System.setProperty("h2.storeLocalTime", "true");
TestBase.createCaller().init().test();
}
......
......@@ -22,6 +22,7 @@ import java.util.concurrent.TimeUnit;
import org.h2.fulltext.FullText;
import org.h2.store.fs.FileUtils;
import org.h2.test.TestAll;
import org.h2.test.TestBase;
import org.h2.util.IOUtils;
import org.h2.util.Task;
......@@ -50,10 +51,6 @@ public class TestFullText extends TestBase {
@Override
public void test() throws Exception {
if (config.multiThreaded) {
// It is even mentioned in the docs that this is not supported
return;
}
testUuidPrimaryKey(false);
testAutoAnalyze();
testNativeFeatures();
......@@ -71,7 +68,9 @@ public class TestFullText extends TestBase {
testCreateDropLucene();
testUuidPrimaryKey(true);
testMultiThreaded(true);
testMultiThreaded(false);
if(config.mvStore || !config.multiThreaded) {
testMultiThreaded(false);
}
testTransaction(true);
test(true, "VARCHAR");
test(true, "CLOB");
......@@ -256,7 +255,7 @@ public class TestFullText extends TestBase {
int len = 2;
Task[] task = new Task[len];
for (int i = 0; i < len; i++) {
final Connection conn = getConnection("fullText", connList);
final Connection conn = getConnection("fullText;LOCK_TIMEOUT=60000", connList);
Statement stat = conn.createStatement();
initFullText(stat, lucene);
initFullText(stat, lucene);
......
......@@ -30,7 +30,6 @@ public class TestLinkedTable extends TestBase {
* @param a ignored
*/
public static void main(String... a) throws Exception {
// System.setProperty("h2.storeLocalTime", "true");
TestBase.createCaller().init().test();
}
......
......@@ -454,7 +454,7 @@ public class TestPreparedStatement extends TestBase {
PreparedStatement prep = conn.prepareStatement(
"INSERT INTO test_enum VALUES(?)");
prep.setObject(1, badSizes[i]);
assertThrows(ErrorCode.ENUM_VALUE_NOT_PERMITTED_1, prep).execute();
assertThrows(ErrorCode.ENUM_VALUE_NOT_PERMITTED, prep).execute();
}
String[] goodSizes = new String[]{"small", "medium", "large"};
......
......@@ -11,7 +11,6 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import org.h2.api.ErrorCode;
import org.h2.jdbc.JdbcSQLException;
import org.h2.test.TestBase;
import org.h2.util.IOUtils;
......
......@@ -10308,6 +10308,9 @@ create table test(id int, name varchar);
insert into test values(5, 'b'), (5, 'b'), (20, 'a');
> update count: 3
drop table test;
> ok
select 0 from ((
select 0 as f from dual u1 where null in (?, ?, ?, ?, ?)
) union all (
......@@ -10683,4 +10686,78 @@ drop table card;
> ok
drop type CARD_SUIT;
> ok
\ No newline at end of file
> ok
--- ENUM in primary key with another column
create type CARD_SUIT as enum('hearts', 'clubs', 'spades', 'diamonds');
> ok
create table card (rank int, suit CARD_SUIT, primary key(rank, suit));
> ok
insert into card (rank, suit) values (0, 'clubs'), (3, 'hearts'), (1, 'clubs');
> update count: 3
insert into card (rank, suit) values (0, 'clubs');
> exception
select rank from card where suit = 'clubs';
> RANK
> ----
> 0
> 1
drop table card;
> ok
drop type CARD_SUIT;
> ok
--- ENUM with index
create type CARD_SUIT as enum('hearts', 'clubs', 'spades', 'diamonds');
> ok
create table card (rank int, suit CARD_SUIT, primary key(rank, suit));
> ok
insert into card (rank, suit) values (0, 'clubs'), (3, 'hearts'), (1, 'clubs');
> update count: 3
create index idx_card_suite on card(`suit`);
select rank from card where suit = 'clubs';
> RANK
> ----
> 0
> 1
select rank from card where suit in ('clubs');
> RANK
> ----
> 0
> 1
drop table card;
> ok
drop type CARD_SUIT;
> ok
----- Issue#493 -----
create table test (year int, action varchar(10));
> ok
insert into test values (2015, 'order'), (2016, 'order'), (2014, 'order');
> update count: 3
insert into test values (2014, 'execution'), (2015, 'execution'), (2016, 'execution');
> update count: 3
select * from test where year in (select distinct year from test order by year desc limit 1 offset 0);
> YEAR ACTION
> ---- ---------
> 2016 order
> 2016 execution
drop table test;
> ok
......@@ -13,7 +13,6 @@ import java.util.ArrayList;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import org.h2.api.ErrorCode;
import org.h2.test.TestBase;
import org.h2.test.utils.AssertThrows;
......@@ -40,7 +39,6 @@ public class TestDate extends TestBase {
* @param a ignored
*/
public static void main(String... a) throws Exception {
// System.setProperty("h2.storeLocalTime", "true");
TestBase.createCaller().init().test();
}
......
......@@ -19,7 +19,6 @@ public class TestDateTimeUtils extends TestBase {
* @param a ignored
*/
public static void main(String... a) throws Exception {
// System.setProperty("h2.storeLocalTime", "true");
TestBase.createCaller().init().test();
}
......
......@@ -10,7 +10,6 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Locale;
import org.h2.test.TestBase;
/**
......@@ -24,7 +23,6 @@ public class TestLocale extends TestBase {
* @param a ignored
*/
public static void main(String... a) throws Exception {
// System.setProperty("h2.storeLocalTime", "true");
TestBase.createCaller().init().test();
}
......
......@@ -274,7 +274,6 @@ class FileReorderWrites extends FileBase {
@Override
public int write(ByteBuffer src, long position) throws IOException {
if (FilePathReorderWrites.isPartialWrites() && src.remaining() > 2) {
final int tmp = src.remaining();
ByteBuffer buf1 = src.slice();
ByteBuffer buf2 = src.slice();
int len1 = src.remaining() / 2;
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论