提交 207f063c authored 作者: Thomas Mueller's avatar Thomas Mueller

--no commit message

--no commit message
上级 36d52017
...@@ -78,8 +78,8 @@ or by using Statement.setMaxRows(max). ...@@ -78,8 +78,8 @@ or by using Statement.setMaxRows(max).
<h3>Large Result Sets and External Sorting</h3> <h3>Large Result Sets and External Sorting</h3>
<p> <p>
For result set larger than 1000 rows, the result is buffered to disk. If ORDER BY is used, For large result set, the result is buffered to disk. The threshold can be defined using the statement SET MAX_MEMORY_ROWS.
the sorting is done using an external sort algorithm. In this case, each block of rows is sorted using If ORDER BY is used, the sorting is done using an external sort algorithm. In this case, each block of rows is sorted using
quick sort, then written to disk; when reading the data, the blocks are merged together. quick sort, then written to disk; when reading the data, the blocks are merged together.
</p> </p>
......
...@@ -16,7 +16,9 @@ Change Log ...@@ -16,7 +16,9 @@ Change Log
<h1>Change Log</h1> <h1>Change Log</h1>
<h2>Next Version (unreleased)</h2> <h2>Next Version (unreleased)</h2>
<ul><li>H2 Console: The progress display when opening a database has been improved. <ul><li>ORDER BY on tableName.columnName didn't work correctly if the column
name was also used as an alias.
</li><li>H2 Console: The progress display when opening a database has been improved.
</li><li>The error message when the server doesn't start has been improved. </li><li>The error message when the server doesn't start has been improved.
</li><li>Key values can now be changed in updatable result sets. </li><li>Key values can now be changed in updatable result sets.
</li><li>Changes in updatable result sets are now visible even when resetting the result set. </li><li>Changes in updatable result sets are now visible even when resetting the result set.
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
setlocal
cd ../..
rem call java16
rem call build all
rem makensis /v2 src/installer/h2.nsi
rem soffice.exe -invisible macro:///Standard.Module1.H2Pdf
...@@ -1279,7 +1279,7 @@ public class Parser { ...@@ -1279,7 +1279,7 @@ public class Parser {
Query command = parseSelectSub(); Query command = parseSelectSub();
return parseSelectUnionExtension(command, start, false); return parseSelectUnionExtension(command, start, false);
} }
private Query parseSelectUnionExtension(Query command, int start, boolean unionOnly) throws SQLException { private Query parseSelectUnionExtension(Query command, int start, boolean unionOnly) throws SQLException {
while (true) { while (true) {
if (readIf("UNION")) { if (readIf("UNION")) {
...@@ -1306,7 +1306,15 @@ public class Parser { ...@@ -1306,7 +1306,15 @@ public class Parser {
break; break;
} }
} }
if (!unionOnly && readIf("ORDER")) { if (!unionOnly) {
parseEndOfQuery(command);
}
setSQL(command, null, start);
return command;
}
private void parseEndOfQuery(Query command) throws SQLException {
if (readIf("ORDER")) {
read("BY"); read("BY");
Select oldSelect = currentSelect; Select oldSelect = currentSelect;
if (command instanceof Select) { if (command instanceof Select) {
...@@ -1346,7 +1354,34 @@ public class Parser { ...@@ -1346,7 +1354,34 @@ public class Parser {
command.setOrder(orderList); command.setOrder(orderList);
currentSelect = oldSelect; currentSelect = oldSelect;
} }
if (!unionOnly && readIf("LIMIT")) { int test;
if (database.getMode().supportOffsetFetch) {
if (readIf("OFFSET")) {
Select temp = currentSelect;
// make sure aggregate functions will not work here
currentSelect = null;
command.setOffset(readExpression().optimize(session));
if (!readIf("ROW")) {
read("ROWS");
}
currentSelect = temp;
}
if (readIf("FETCH")) {
Select temp = currentSelect;
// make sure aggregate functions will not work here
currentSelect = null;
if (readIf("FIRST")) {
Expression limit = readExpression().optimize(session);
command.setLimit(limit);
if (!readIf("ROW")) {
read("ROWS");
}
readIf("ONLY");
}
currentSelect = temp;
}
}
if (readIf("LIMIT")) {
Select temp = currentSelect; Select temp = currentSelect;
// make sure aggregate functions will not work here // make sure aggregate functions will not work here
currentSelect = null; currentSelect = null;
...@@ -1367,7 +1402,7 @@ public class Parser { ...@@ -1367,7 +1402,7 @@ public class Parser {
} }
currentSelect = temp; currentSelect = temp;
} }
if (!unionOnly && readIf("FOR")) { if (readIf("FOR")) {
if (readIf("UPDATE")) { if (readIf("UPDATE")) {
if (readIf("OF")) { if (readIf("OF")) {
do { do {
...@@ -1387,8 +1422,6 @@ public class Parser { ...@@ -1387,8 +1422,6 @@ public class Parser {
} }
} }
} }
setSQL(command, null, start);
return command;
} }
private Query parseSelectSub() throws SQLException { private Query parseSelectSub() throws SQLException {
...@@ -2943,7 +2976,7 @@ public class Parser { ...@@ -2943,7 +2976,7 @@ public class Parser {
if (len == 0) { if (len == 0) {
throw getSyntaxError(); throw getSyntaxError();
} }
return getSaveTokenType(s); return getSaveTokenType(s, database.getMode().supportOffsetFetch);
} }
/** /**
...@@ -2952,14 +2985,14 @@ public class Parser { ...@@ -2952,14 +2985,14 @@ public class Parser {
* @param s the token to check * @param s the token to check
* @return true if it is a keyword * @return true if it is a keyword
*/ */
public static boolean isKeyword(String s) { public static boolean isKeyword(String s, boolean supportOffsetFetch) {
if (s == null || s.length() == 0) { if (s == null || s.length() == 0) {
return false; return false;
} }
return getSaveTokenType(s) != IDENTIFIER; return getSaveTokenType(s, supportOffsetFetch) != IDENTIFIER;
} }
private static int getSaveTokenType(String s) { private static int getSaveTokenType(String s, boolean supportOffsetFetch) {
switch (s.charAt(0)) { switch (s.charAt(0)) {
case 'C': case 'C':
if (s.equals("CURRENT_TIMESTAMP")) { if (s.equals("CURRENT_TIMESTAMP")) {
...@@ -2984,6 +3017,8 @@ public class Parser { ...@@ -2984,6 +3017,8 @@ public class Parser {
return KEYWORD; return KEYWORD;
} else if ("FULL".equals(s)) { } else if ("FULL".equals(s)) {
return KEYWORD; return KEYWORD;
} else if (supportOffsetFetch && "FETCH".equals(s)) {
return KEYWORD;
} }
return getKeywordOrIdentifier(s, "FALSE", FALSE); return getKeywordOrIdentifier(s, "FALSE", FALSE);
case 'G': case 'G':
...@@ -3016,6 +3051,8 @@ public class Parser { ...@@ -3016,6 +3051,8 @@ public class Parser {
case 'O': case 'O':
if ("ON".equals(s)) { if ("ON".equals(s)) {
return KEYWORD; return KEYWORD;
} else if (supportOffsetFetch && "OFFSET".equals(s)) {
return KEYWORD;
} }
return getKeywordOrIdentifier(s, "ORDER", KEYWORD); return getKeywordOrIdentifier(s, "ORDER", KEYWORD);
case 'P': case 'P':
...@@ -3476,7 +3513,7 @@ public class Parser { ...@@ -3476,7 +3513,7 @@ public class Parser {
boolean ifNotExists = readIfNoExists(); boolean ifNotExists = readIfNoExists();
String constantName = readIdentifierWithSchema(); String constantName = readIdentifierWithSchema();
Schema schema = getSchema(); Schema schema = getSchema();
if (isKeyword(constantName)) { if (isKeyword(constantName, false)) {
throw Message.getSQLException(ErrorCode.CONSTANT_ALREADY_EXISTS_1, constantName); throw Message.getSQLException(ErrorCode.CONSTANT_ALREADY_EXISTS_1, constantName);
} }
read("VALUE"); read("VALUE");
...@@ -3493,7 +3530,7 @@ public class Parser { ...@@ -3493,7 +3530,7 @@ public class Parser {
CreateAggregate command = new CreateAggregate(session); CreateAggregate command = new CreateAggregate(session);
command.setForce(force); command.setForce(force);
String name = readUniqueIdentifier(); String name = readUniqueIdentifier();
if (isKeyword(name) || Function.getFunction(database, name) != null || Aggregate.getAggregateType(name) >= 0) { if (isKeyword(name, false) || Function.getFunction(database, name) != null || Aggregate.getAggregateType(name) >= 0) {
throw Message.getSQLException(ErrorCode.FUNCTION_ALIAS_ALREADY_EXISTS_1, name); throw Message.getSQLException(ErrorCode.FUNCTION_ALIAS_ALREADY_EXISTS_1, name);
} }
command.setName(name); command.setName(name);
...@@ -3597,7 +3634,7 @@ public class Parser { ...@@ -3597,7 +3634,7 @@ public class Parser {
CreateFunctionAlias command = new CreateFunctionAlias(session); CreateFunctionAlias command = new CreateFunctionAlias(session);
command.setForce(force); command.setForce(force);
String name = readUniqueIdentifier(); String name = readUniqueIdentifier();
if (isKeyword(name) || Function.getFunction(database, name) != null || Aggregate.getAggregateType(name) >= 0) { if (isKeyword(name, false) || Function.getFunction(database, name) != null || Aggregate.getAggregateType(name) >= 0) {
throw Message.getSQLException(ErrorCode.FUNCTION_ALIAS_ALREADY_EXISTS_1, name); throw Message.getSQLException(ErrorCode.FUNCTION_ALIAS_ALREADY_EXISTS_1, name);
} }
command.setAliasName(name); command.setAliasName(name);
...@@ -4550,7 +4587,7 @@ public class Parser { ...@@ -4550,7 +4587,7 @@ public class Parser {
return StringUtils.quoteIdentifier(s); return StringUtils.quoteIdentifier(s);
} }
} }
if (Parser.isKeyword(s)) { if (Parser.isKeyword(s, true)) {
return StringUtils.quoteIdentifier(s); return StringUtils.quoteIdentifier(s);
} }
return s; return s;
......
...@@ -267,7 +267,7 @@ public abstract class Query extends Prepared { ...@@ -267,7 +267,7 @@ public abstract class Query extends Prepared {
int idx = expressions.size(); int idx = expressions.size();
if (e instanceof ExpressionColumn) { if (e instanceof ExpressionColumn) {
ExpressionColumn exprCol = (ExpressionColumn) e; ExpressionColumn exprCol = (ExpressionColumn) e;
String alias = exprCol.getOriginalAliasName(); String tableAlias = exprCol.getOriginalTableAliasName();
String col = exprCol.getOriginalColumnName(); String col = exprCol.getOriginalColumnName();
for (int j = 0; j < visible; j++) { for (int j = 0; j < visible; j++) {
boolean found = false; boolean found = false;
...@@ -275,15 +275,15 @@ public abstract class Query extends Prepared { ...@@ -275,15 +275,15 @@ public abstract class Query extends Prepared {
if (ec instanceof ExpressionColumn) { if (ec instanceof ExpressionColumn) {
ExpressionColumn c = (ExpressionColumn) ec; ExpressionColumn c = (ExpressionColumn) ec;
found = col.equals(c.getColumnName()); found = col.equals(c.getColumnName());
if (alias != null && found) { if (tableAlias != null && found) {
String ca = c.getOriginalAliasName(); String ca = c.getOriginalTableAliasName();
if (ca != null) { if (ca != null) {
found = alias.equals(ca); found = tableAlias.equals(ca);
} }
} }
} else if (!(ec instanceof Alias)) { } else if (!(ec instanceof Alias)) {
continue; continue;
} else if (col.equals(ec.getAlias())) { } else if (tableAlias == null && col.equals(ec.getAlias())) {
found = true; found = true;
} else { } else {
Expression ec2 = ec.getNonAliasExpression(); Expression ec2 = ec.getNonAliasExpression();
......
...@@ -7,58 +7,62 @@ ...@@ -7,58 +7,62 @@
package org.h2.engine; package org.h2.engine;
/* /*
* Release checklist * Release checklist:
*
* - Test with Hibernate * - Test with Hibernate
* - Run FindBugs * - Run FindBugs
* - build jarClient, check jar file size * build jarClient
* - build jar, test with IKVM * - Check jar file size
* - build javadocImpl * build jar
* - Compile with JDK 1.6: * build javadocImpl
* java16 * java16
* build compile * build compile
* set classpath= * set classpath=
* build javadoc * build javadoc
* build javadocImpl (to find missing javadocs) * build javadocImpl
* - reset to JDK 1.4 * - Check if missing javadocs
* build compile * java14
* java16 * build compile
* compile * java16
* - Change version and build number in * compile
* Constants.java * - Change version and build number in Constants.java
* - Maybe increase TCP_DRIVER_VERSION (old clients must be compatible!) * - Maybe increase TCP_DRIVER_VERSION (old clients must be compatible!)
* - Check code coverage * - Check code coverage
* - No " Message.get" (must be "throw Message.get") * - No " Message.get" (must be "throw Message.get")
* - Check that is no TODO in the docs * - Check that is no TODO in the docs
* - Run regression test with JDK 1.4 and 1.5 * - Run regression test with JDK 1.4 and 1.5
* - Use latest versions of other dbs (Derby 10.4.1.3; * - Use latest versions of other dbs
* PostgreSQL 8.3.1; MySQL 5.0.51) * Derby 10.4.1.3
* PostgreSQL 8.3.1
* MySQL 5.0.51
* - Change version(s) in performance.html * - Change version(s) in performance.html
* - Run 'build benchmark' (with JDK 1.4 currently) * java14
* build benchmark
* - Copy the benchmark results and update the performance page and diagram * - Copy the benchmark results and update the performance page and diagram
* - Documentation: check if all Javadoc files are in the index * - Documentation: check if all Javadoc files are in the index
* - Update the changelog (add new version) * - Update the changelog (add new version)
* - Update the newsfeed * - Update the newsfeed
* - build docs * build docs
* - Check dataWeb/index.html, versions and links in main, downloads, build * - Check dataWeb/index.html, versions and links in main, downloads, build
* - PDF * soffice.exe -invisible macro:///Standard.Module1.H2Pdf
* - footer * - Check in the PDF file:
* - front page * - footer
* - orphan control * - front page
* - check images * - orphan control
* - table of contents * - check images
* - Use java 1.4 * - table of contents
* - build all * java14
* build all
* - Copy the pdf file to h2/docs * - Copy the pdf file to h2/docs
* - build zip * build zip
* - Windows installer (nsis) * makensis /v2 src/installer/h2.nsi
* - Test Console * - Test Console
* - Test all languages * - Test all languages
* - Scan for viruses * - Scan for viruses
* - build mavenDeployCentral * build mavenDeployCentral
* - Upload to SourceForge * - Upload to SourceForge
* - java16 * java16
* - build compile * build compile
* - refresh
* - svn commit * - svn commit
* - svn copy: /svn/trunk /svn/tags/version-1.0.x; Version 1.0.x (yyyy-mm-dd) * - svn copy: /svn/trunk /svn/tags/version-1.0.x; Version 1.0.x (yyyy-mm-dd)
* - Newsletter: prepare (always to BCC!!) * - Newsletter: prepare (always to BCC!!)
......
...@@ -80,6 +80,12 @@ public class Mode { ...@@ -80,6 +80,12 @@ public class Mode {
*/ */
public boolean uniqueIndexSingleNull; public boolean uniqueIndexSingleNull;
/**
* If the syntax [OFFSET .. ROW] [FETCH ... ONLY] should be supported.
* This is an alternative syntax for LIMIT .. OFFSET.
*/
public boolean supportOffsetFetch;
private String name; private String name;
static { static {
......
...@@ -199,7 +199,7 @@ public class ExpressionColumn extends Expression { ...@@ -199,7 +199,7 @@ public class ExpressionColumn extends Expression {
return columnName; return columnName;
} }
public String getOriginalAliasName() { public String getOriginalTableAliasName() {
return tableAlias; return tableAlias;
} }
......
...@@ -314,7 +314,7 @@ implements Trigger, CloseListener ...@@ -314,7 +314,7 @@ implements Trigger, CloseListener
Parser p = new Parser(session); Parser p = new Parser(session);
String tab = q.substring(0, idx); String tab = q.substring(0, idx);
ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab); ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab);
String schemaName = expr.getOriginalAliasName(); String schemaName = expr.getOriginalTableAliasName();
String tableName = expr.getColumnName(); String tableName = expr.getColumnName();
q = q.substring(idx + " WHERE ".length()); q = q.substring(idx + " WHERE ".length());
Object[][] columnData = parseKey(conn, q); Object[][] columnData = parseKey(conn, q);
......
...@@ -391,7 +391,7 @@ public class DbContextRule implements Rule { ...@@ -391,7 +391,7 @@ public class DbContextRule implements Rule {
return null; return null;
} }
String alias = up.substring(0, i); String alias = up.substring(0, i);
if (Parser.isKeyword(alias)) { if (Parser.isKeyword(alias, true)) {
return null; return null;
} }
return query.substring(alias.length()); return query.substring(alias.length());
...@@ -414,7 +414,7 @@ public class DbContextRule implements Rule { ...@@ -414,7 +414,7 @@ public class DbContextRule implements Rule {
return null; return null;
} }
String alias = up.substring(0, i); String alias = up.substring(0, i);
if (Parser.isKeyword(alias)) { if (Parser.isKeyword(alias, true)) {
return null; return null;
} }
if (add) { if (add) {
......
...@@ -270,6 +270,18 @@ java org.h2.test.TestAll timer ...@@ -270,6 +270,18 @@ java org.h2.test.TestAll timer
/* /*
drop table test;
create table test(a int);
insert into test values(1);
insert into test values(2);
select -test.a a from test order by test.a;
select -test.a from test order by test.a;
select -test.a aa from test order by a;
select -test.a aa from test order by aa;
select -test.a a from test order by test.a;
select -test.a a from test order by a;
drop all objects; drop all objects;
create domain email as varchar; create domain email as varchar;
create table test(e email); create table test(e email);
...@@ -289,8 +301,15 @@ https://bugs.eclipse.org/bugs/show_bug.cgi?id=137701 ...@@ -289,8 +301,15 @@ https://bugs.eclipse.org/bugs/show_bug.cgi?id=137701
Support large updates (use the transaction log to undo). Support large updates (use the transaction log to undo).
H2 Console: support single file upload and directory download (optional)
document FTL_SEARCH, FTL_SEARCH_DATA document FTL_SEARCH, FTL_SEARCH_DATA
find quote:
You can't make a system that will not lose data, you can only make
a system that knows the last save point of 100% integrity. There are
too many variables and too much randomness on a cold hard power failure.
JaQu JaQu
row level locking row level locking
......
--- special grammar and test cases --------------------------------------------------------------------------------------------- --- special grammar and test cases ---------------------------------------------------------------------------------------------
create table test(a int);
> ok
insert into test values(1), (2);
> update count: 2
select -test.a a from test order by test.a;
> A
> --
> -1
> -2
> rows (ordered): 2
select -test.a from test order by test.a;
> - TEST.A
> --------
> -1
> -2
> rows (ordered): 2
select -test.a aa from test order by a;
> AA
> --
> -1
> -2
> rows (ordered): 2
select -test.a aa from test order by aa;
> AA
> --
> -2
> -1
> rows (ordered): 2
select -test.a a from test order by a;
> A
> --
> -2
> -1
> rows (ordered): 2
drop table test;
> ok
CREATE TABLE table_a(a_id INT PRIMARY KEY, left_id INT, right_id INT); CREATE TABLE table_a(a_id INT PRIMARY KEY, left_id INT, right_id INT);
> ok > ok
......
...@@ -11,6 +11,7 @@ import java.io.FileReader; ...@@ -11,6 +11,7 @@ import java.io.FileReader;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.PrintWriter; import java.io.PrintWriter;
import org.h2.engine.Constants;
import org.h2.util.StringUtils; import org.h2.util.StringUtils;
/** /**
...@@ -52,6 +53,8 @@ public class MergeDocs { ...@@ -52,6 +53,8 @@ public class MergeDocs {
writer.println("<html><head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" /><title>"); writer.println("<html><head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" /><title>");
writer.println("H2 Documentation"); writer.println("H2 Documentation");
writer.println("</title><link rel=\"stylesheet\" type=\"text/css\" href=\"stylesheetPdf.css\" /></head><body>"); writer.println("</title><link rel=\"stylesheet\" type=\"text/css\" href=\"stylesheetPdf.css\" /></head><body>");
writer.println("<h1>H2 Database Engine</h1>");
writer.println("<p>Version " + Constants.getFullVersion() + "</p>");
writer.println(finalText); writer.println(finalText);
writer.println("</body></html>"); writer.println("</body></html>");
writer.close(); writer.close();
......
...@@ -548,4 +548,7 @@ usable weblica jena preserved instrumentation inspect jayaprakash ashwin ...@@ -548,4 +548,7 @@ usable weblica jena preserved instrumentation inspect jayaprakash ashwin
varargs automate couldn unclear eat dtp disks tablespaces great reproduces varargs automate couldn unclear eat dtp disks tablespaces great reproduces
hhh overridden sqle propogation buildid tsv monospace microarrays pathogen hhh overridden sqle propogation buildid tsv monospace microarrays pathogen
geocoder geocoding longitude estimating microarray latitude magnolia pfgrc geocoder geocoding longitude estimating microarray latitude magnolia pfgrc
refill analyzers patches popular came growing indication refill analyzers patches popular came growing indication arabic graphic toc
\ No newline at end of file numbering goto outline makensis macro hyperlink dispatch setlocal wend
widows msgbox designer styles families uno soffice orphans stan ucb rem
pdfurl upate pagebreak
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论