Skip to content
项目
群组
代码片段
帮助
正在加载...
帮助
为 GitLab 提交贡献
登录/注册
切换导航
H
h2database
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
分枝图
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
计划
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
分枝图
统计图
创建新议题
作业
提交
议题看板
打开侧边栏
Administrator
h2database
Commits
38687e98
Unverified
提交
38687e98
authored
1月 10, 2018
作者:
Prashant Bhat
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Remove JPA/ORM configuration txt files as they're already integrated
上级
659bf5ca
隐藏空白字符变更
内嵌
并排
正在显示
7 个修改的文件
包含
0 行增加
和
844 行删除
+0
-844
H2MetaData.java.txt
h2/src/tools/com/caucho/jdbc/H2MetaData.java.txt
+0
-47
JdbcMetaData.java.txt
h2/src/tools/com/caucho/jdbc/JdbcMetaData.java.txt
+0
-15
H2DatabaseProvider.java.txt
h2/src/tools/net/java/ao/db/H2DatabaseProvider.java.txt
+0
-41
H2Platform.java.txt
.../toplink/essentials/platform/database/H2Platform.java.txt
+0
-142
H2Dictionary.java.txt
...c/tools/org/apache/openjpa/jdbc/sql/H2Dictionary.java.txt
+0
-202
H2Adapter.java.txt
h2/src/tools/org/h2/dev/jpox/H2Adapter.java.txt
+0
-352
SendMail.java.txt
h2/src/tools/org/h2/dev/mail/SendMail.java.txt
+0
-45
没有找到文件。
h2/src/tools/com/caucho/jdbc/H2MetaData.java.txt
deleted
100644 → 0
浏览文件 @
659bf5ca
package com.caucho.jdbc;
import com.caucho.util.Log;
import javax.sql.DataSource;
import java.util.logging.Logger;
/**
* Metadata for the H2 database.
* For details, see
* http://wondering.ru/java/H2ejb3onResinSupport1.0.zip
*/
public class H2MetaData extends JdbcMetaData {
private static final Logger log = Log.open(H2MetaData.class);
protected H2MetaData(DataSource ds) {
super(ds);
}
/**
* Returns the blob type.
*/
public String getBlobType(){
return "BLOB";
}
/**
* Returns the long type.
*/
public String getLongType() {
return "BIGINT";
}
/**
* Returns true if identity is supported.
*/
public boolean supportsIdentity() {
return true;
}
/**
* Returns the identity property
*/
public String createIdentitySQL(String sqlType) {
return "IDENTITY";
}
}
h2/src/tools/com/caucho/jdbc/JdbcMetaData.java.txt
deleted
100644 → 0
浏览文件 @
659bf5ca
Sorry I can not include this file because
http://ohloh.org says that this may conflict
with another license.
In the file JdbcMetaData.java in package com.caucho.jdbc
in method public static JdbcMetaData create(DataSource ds),
you need to add
if ("H2".equalsIgnoreCase(name))
return new H2MetaData(ds);
else
just before
if ("oracle".equalsIgnoreCase(name))
h2/src/tools/net/java/ao/db/H2DatabaseProvider.java.txt
deleted
100644 → 0
浏览文件 @
659bf5ca
/*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: Michael Manske
*/
package net.java.ao.db;
import java.sql.Driver;
/**
* This is a database provider for ActiveObjects.
* See also https://activeobjects.dev.java.net .
* Usage:
* <pre>
* EntityManager manager = new EntityManager(new H2DatabaseProvider(
* dbProperties.getProperty("db.uri"),
* dbProperties.getProperty("db.username"),
* dbProperties.getProperty("db.password")));
* </pre>
*
* @author Michael Manske
* @author Thomas Mueller
*/
public class H2DatabaseProvider extends HSQLDatabaseProvider {
/**
* Create a new provider.
*
* @param uri the database uri
* @param username the user name
* @param password the password
*/
public H2DatabaseProvider(String uri, String username, String password) {
super(uri, username, password);
}
public Class< ? extends Driver> getDriverClass() throws ClassNotFoundException {
return (Class< ? extends Driver>) Class.forName("org.h2.Driver");
}
}
h2/src/tools/oracle/toplink/essentials/platform/database/H2Platform.java.txt
deleted
100644 → 0
浏览文件 @
659bf5ca
/*
* The contents of this file are subject to the terms
* of the Common Development and Distribution License
* (the "License"). You may not use this file except
* in compliance with the License.
*
* You can obtain a copy of the license at
* glassfish/bootstrap/legal/CDDLv1.0.txt or
* https://glassfish.dev.java.net/public/CDDLv1.0.html .
* See the License for the specific language governing
* permissions and limitations under the License.
*
* When distributing Covered Code, include this CDDL
* HEADER in each file and include the License file at
* glassfish/bootstrap/legal/CDDLv1.0.txt. If applicable,
* add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your
* own identifying information: Portions Copyright [yyyy]
* [name of copyright owner]
*/
// Copyright (c) 1998, 2006, Oracle. All rights reserved.
package oracle.toplink.essentials.platform.database;
import java.io.IOException;
import java.io.Writer;
import java.util.Hashtable;
import oracle.toplink.essentials.exceptions.ValidationException;
import oracle.toplink.essentials.expressions.ExpressionOperator;
import oracle.toplink.essentials.internal.databaseaccess.FieldTypeDefinition;
import oracle.toplink.essentials.queryframework.ValueReadQuery;
/**
* This platform provides H2 specific behaviour.
* To enable this platform change the following setting in persistence.xml:
* <pre>
* <property
* name="toplink.target-database"
* value="oracle.toplink.essentials.platform.database.H2Platform"/>
* </pre>
* In old versions of Glassfish, the property name is
* <code>toplink.platform.class.name</code>.
* See also: https://glassfish.dev.java.net/issues/show_bug.cgi?id=4042
*
* @author Thomas Mueller
* @author Marcio Borges (http://www.marciowb.net/blog/2008_08_01_)
*/
public class H2Platform extends DatabasePlatform {
protected Hashtable buildFieldTypes() {
Hashtable fieldTypeMapping;
fieldTypeMapping = super.buildFieldTypes();
fieldTypeMapping.put(Boolean.class, new FieldTypeDefinition("TINYINT", false));
fieldTypeMapping.put(Integer.class, new FieldTypeDefinition("INTEGER", false));
fieldTypeMapping.put(Long.class, new FieldTypeDefinition("NUMERIC", 19));
fieldTypeMapping.put(Float.class, new FieldTypeDefinition("REAL", false));
fieldTypeMapping.put(Double.class, new FieldTypeDefinition("REAL", false));
fieldTypeMapping.put(Short.class, new FieldTypeDefinition("SMALLINT", false));
fieldTypeMapping.put(Byte.class, new FieldTypeDefinition("SMALLINT", false));
fieldTypeMapping.put(java.math.BigInteger.class, new FieldTypeDefinition("NUMERIC", 38));
fieldTypeMapping.put(java.math.BigDecimal.class, new FieldTypeDefinition("NUMERIC", 38).setLimits(38, -19, 19));
fieldTypeMapping.put(Number.class, new FieldTypeDefinition("NUMERIC", 38).setLimits(38, -19, 19));
fieldTypeMapping.put(Byte[].class, new FieldTypeDefinition("BINARY", false));
fieldTypeMapping.put(Character[].class, new FieldTypeDefinition("LONGVARCHAR", false));
fieldTypeMapping.put(byte[].class, new FieldTypeDefinition("BINARY", false));
fieldTypeMapping.put(char[].class, new FieldTypeDefinition("LONGVARCHAR", false));
fieldTypeMapping.put(java.sql.Blob.class, new FieldTypeDefinition("BINARY", false));
fieldTypeMapping.put(java.sql.Clob.class, new FieldTypeDefinition("LONGVARCHAR", false));
fieldTypeMapping.put(java.sql.Date.class, new FieldTypeDefinition("DATE", false));
fieldTypeMapping.put(java.sql.Time.class, new FieldTypeDefinition("TIME", false));
fieldTypeMapping.put(java.sql.Timestamp.class, new FieldTypeDefinition("TIMESTAMP", false));
return fieldTypeMapping;
}
public boolean isH2() {
return true;
}
public boolean supportsForeignKeyConstraints() {
return true;
}
public ValueReadQuery buildSelectQueryForNativeSequence(String seqName, Integer size) {
StringBuffer buff = new StringBuffer();
buff.append("SELECT MAX(NEXT VALUE FOR ");
buff.append(getQualifiedSequenceName(seqName));
buff.append(") FROM SYSTEM_RANGE(1, ");
buff.append(size);
buff.append(")");
String sql = buff.toString();
return new ValueReadQuery(sql);
}
public boolean supportsNativeSequenceNumbers() {
return true;
}
protected String getQualifiedSequenceName(String seqName) {
if (getTableQualifier().equals("")) {
return seqName;
}
return getTableQualifier() + "." + seqName;
}
public boolean supportsSelectForUpdateNoWait() {
return true;
}
protected ExpressionOperator todayOperator() {
return ExpressionOperator.simpleFunctionNoParentheses(ExpressionOperator.Today, "SYSDATE");
}
protected void initializePlatformOperators() {
super.initializePlatformOperators();
addOperator(ExpressionOperator.simpleMath(ExpressionOperator.Concat, "||"));
}
public boolean shouldUseJDBCOuterJoinSyntax() {
return false;
}
public boolean supportsSequenceObjects() {
return true;
}
public boolean supportsIdentity() {
return true;
}
public ValueReadQuery buildSelectQueryForIdentity() {
return new ValueReadQuery("SELECT IDENTITY()");
}
public void printFieldIdentityClause(Writer writer) throws ValidationException {
try {
writer.write(" IDENTITY");
} catch (final IOException ioException) {
throw ValidationException.fileError(ioException);
}
}
}
h2/src/tools/org/apache/openjpa/jdbc/sql/H2Dictionary.java.txt
deleted
100644 → 0
浏览文件 @
659bf5ca
package org.apache.openjpa.jdbc.sql;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Arrays;
import java.util.Locale;
import org.apache.commons.lang.StringUtils;
import org.apache.openjpa.jdbc.kernel.exps.FilterValue;
import org.apache.openjpa.jdbc.schema.Column;
import org.apache.openjpa.jdbc.schema.PrimaryKey;
import org.apache.openjpa.jdbc.schema.Table;
import org.apache.openjpa.jdbc.schema.Unique;
import org.apache.openjpa.meta.JavaTypes;
public class H2Dictionary extends DBDictionary {
public H2Dictionary() {
platform = "H2";
validationSQL = "CALL 1";
closePoolSQL = "SHUTDOWN";
supportsAutoAssign = true;
lastGeneratedKeyQuery = "CALL IDENTITY()";
autoAssignClause = "IDENTITY";
autoAssignTypeName = "INTEGER";
nextSequenceQuery = "CALL NEXT VALUE FOR {0}";
crossJoinClause = "CROSS JOIN";
requiresConditionForCrossJoin = false;
stringLengthFunction = "LENGTH({0})";
trimLeadingFunction = "LTRIM({0})";
trimTrailingFunction = "RTRIM({0})";
trimBothFunction = "TRIM({0})";
useSchemaName = true;
supportsSelectForUpdate = true;
supportsSelectStartIndex = true;
supportsSelectEndIndex = true;
rangePosition = RANGE_POST_LOCK;
supportsDeferredConstraints = false;
blobTypeName = "BLOB";
doubleTypeName = "DOUBLE";
supportsNullTableForGetPrimaryKeys = true;
supportsNullTableForGetIndexInfo = true;
requiresCastForMathFunctions = false;
requiresCastForComparisons = false;
reservedWordSet.addAll(Arrays.asList(new String[] { "CURRENT_TIMESTAMP", "CURRENT_TIME", "CURRENT_DATE", "CROSS", "DISTINCT", "EXCEPT", "EXISTS", "FROM", "FOR", "FALSE",
"FULL", "GROUP", "HAVING", "INNER", "INTERSECT", "IS", "JOIN", "LIKE", "MINUS", "NATURAL", "NOT", "NULL", "ON", "ORDER", "PRIMARY", "ROWNUM", "SELECT", "SYSDATE",
"SYSTIME", "SYSTIMESTAMP", "TODAY", "TRUE", "UNION", "WHERE" }));
}
public int getJDBCType(int metaTypeCode, boolean lob) {
int type = super.getJDBCType(metaTypeCode, lob);
switch (type) {
case Types.BIGINT:
if (metaTypeCode == JavaTypes.BIGINTEGER)
return Types.NUMERIC;
break;
}
return type;
}
public int getPreferredType(int type) {
return super.getPreferredType(type);
}
public String[] getAddPrimaryKeySQL(PrimaryKey pk) {
return new String[0];
}
public String[] getDropPrimaryKeySQL(PrimaryKey pk) {
return new String[0];
}
public String[] getAddColumnSQL(Column column) {
return new String[] { "ALTER TABLE " + getFullName(column.getTable(), false) + " ADD COLUMN " + getDeclareColumnSQL(column, true) };
}
public String[] getCreateTableSQL(Table table) {
StringBuffer buf = new StringBuffer();
buf.append("CREATE TABLE ").append(getFullName(table, false)).append(" (");
Column[] cols = table.getColumns();
for (int i = 0; i < cols.length; i++) {
if (i > 0)
buf.append(", ");
buf.append(getDeclareColumnSQL(cols[i], false));
}
PrimaryKey pk = table.getPrimaryKey();
String pkStr;
if (pk != null) {
pkStr = getPrimaryKeyConstraintSQL(pk);
if (!StringUtils.isEmpty(pkStr))
buf.append(", ").append(pkStr);
}
Unique[] uniques = table.getUniques();
String uniqueStr;
for (int i = 0; i < uniques.length; i++) {
uniqueStr = getUniqueConstraintSQL(uniques[i]);
if (uniqueStr != null)
buf.append(", ").append(uniqueStr);
}
buf.append(")");
return new String[] { buf.toString() };
}
protected String getPrimaryKeyConstraintSQL(PrimaryKey pk) {
Column[] cols = pk.getColumns();
if (cols.length == 1 && cols[0].isAutoAssigned())
return null;
return super.getPrimaryKeyConstraintSQL(pk);
}
public boolean isSystemIndex(String name, Table table) {
return name.toUpperCase(Locale.ENGLISH).startsWith("SYSTEM_");
}
protected String getSequencesSQL(String schemaName, String sequenceName) {
StringBuffer buf = new StringBuffer();
buf.append("SELECT SEQUENCE_SCHEMA, SEQUENCE_NAME FROM ").append("INFORMATION_SCHEMA.SEQUENCES");
if (schemaName != null || sequenceName != null)
buf.append(" WHERE ");
if (schemaName != null) {
buf.append("SEQUENCE_SCHEMA = ?");
if (sequenceName != null)
buf.append(" AND ");
}
if (sequenceName != null)
buf.append("SEQUENCE_NAME = ?");
return buf.toString();
}
protected SQLBuffer toOperation(String op, SQLBuffer selects, SQLBuffer from, SQLBuffer where, SQLBuffer group, SQLBuffer having, SQLBuffer order, boolean distinct,
boolean forUpdate, long start, long end) {
return super.toOperation(op, selects, from, where, group, having, order, distinct, forUpdate, start, end);
}
public Column[] getColumns(DatabaseMetaData meta, String catalog, String schemaName, String tableName, String columnName, Connection conn) throws SQLException {
Column[] cols = super.getColumns(meta, catalog, schemaName, tableName, columnName, conn);
return cols;
}
public void setDouble(PreparedStatement stat, int idx, double val, Column col) throws SQLException {
super.setDouble(stat, idx, val, col);
}
public void setBigDecimal(PreparedStatement stat, int idx, BigDecimal val, Column col) throws SQLException {
super.setBigDecimal(stat, idx, val, col);
}
protected void appendSelectRange(SQLBuffer buf, long start, long end) {
if (end != Long.MAX_VALUE)
buf.append(" LIMIT ").appendValue(end - start);
if (start != 0)
buf.append(" OFFSET ").appendValue(start);
}
public void substring(SQLBuffer buf, FilterValue str, FilterValue start, FilterValue end) {
buf.append("SUBSTR(");
str.appendTo(buf);
buf.append(", (");
start.appendTo(buf);
buf.append(" + 1)");
if (end != null) {
buf.append(", (");
end.appendTo(buf);
buf.append(" - ");
start.appendTo(buf);
buf.append(")");
}
buf.append(")");
}
public void indexOf(SQLBuffer buf, FilterValue str, FilterValue find, FilterValue start) {
buf.append("(POSITION(");
find.appendTo(buf);
buf.append(" IN ");
if (start != null)
substring(buf, str, start, null);
else
str.appendTo(buf);
buf.append(") - 1");
if (start != null) {
buf.append(" + ");
start.appendTo(buf);
}
buf.append(")");
}
}
h2/src/tools/org/h2/dev/jpox/H2Adapter.java.txt
deleted
100644 → 0
浏览文件 @
659bf5ca
/**********************************************************************
Copyright (c) 2006 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
2006 Thomas Mueller - updated the dialect for the H2 database engine
**********************************************************************/
package org.jpox.store.rdbms.adapter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import javax.sql.DataSource;
import org.jpox.store.DatastoreContainerObject;
import org.jpox.store.DatastoreIdentifier;
import org.jpox.store.Dictionary;
import org.jpox.store.expression.LogicSetExpression;
import org.jpox.store.expression.NumericExpression;
import org.jpox.store.expression.QueryExpression;
import org.jpox.store.expression.ScalarExpression;
import org.jpox.store.expression.TableExprAsJoins;
import org.jpox.store.rdbms.Column;
import org.jpox.store.rdbms.key.PrimaryKey;
import org.jpox.store.rdbms.table.Table;
/**
* Provides methods for adapting SQL language elements to the H2 Database Engine.
*
* @version $Revision: 1.1 $
*/
class H2Adapter extends DatabaseAdapter
{
private String schemaName;
/**
* Constructs a H2 adapter based on the given JDBC metadata.
* @param dictionary The Dictionary to use
* @param metadata the database metadata.
*/
public H2Adapter(Dictionary dictionary, DatabaseMetaData metadata)
{
super(dictionary, metadata);
// Set schema name
try
{
ResultSet rs = metadata.getSchemas();
while (rs.next())
{
if (rs.getBoolean("IS_DEFAULT"))
{
schemaName = rs.getString("TABLE_SCHEM");
}
}
}
catch (SQLException e)
{
e.printStackTrace();
// ignore
}
}
/**
* Getter for the vendor ID for this adapter.
* @return The vendor ID
*/
public String getVendorID()
{
return "h2";
}
/**
* Accessor for a Connection to the datastore.
* @param ds The data source. Possible to have more than one data source for fail over
* @param userName The username for the datastore
* @param password The password for the datastore
* @param isolationLevel The level of transaction isolation
* @return The Connection
* @throws SQLException Thrown when an error occurs in the creation.
**/
public Connection getConnection(DataSource[] ds, String userName, String password, int isolationLevel)
throws SQLException
{
return super.getConnection(ds,userName,password,Connection.TRANSACTION_SERIALIZABLE);
}
/**
* Accessor for the maximum table name length permitted on this
* datastore.
* @return Max table name length
**/
public int getMaxTableNameLength()
{
return SQLConstants.MAX_IDENTIFIER_LENGTH;
}
/**
* Accessor for the maximum constraint name length permitted on this
* datastore.
* @return Max constraint name length
**/
public int getMaxConstraintNameLength()
{
return SQLConstants.MAX_IDENTIFIER_LENGTH;
}
/**
* Accessor for the maximum index name length permitted on this datastore.
* @return Max index name length
**/
public int getMaxIndexNameLength()
{
return SQLConstants.MAX_IDENTIFIER_LENGTH;
}
/**
* Accessor for the maximum column name length permitted on this datastore.
* @return Max column name length
**/
public int getMaxColumnNameLength()
{
return SQLConstants.MAX_IDENTIFIER_LENGTH;
}
/**
* Accessor for the SQL statement to add a column to a table.
* @param table The table
* @param col The column
* @return The SQL necessary to add the column
*/
public String getAddColumnStatement(DatastoreContainerObject table, Column col)
{
return "ALTER TABLE " + table.toString() + " ADD COLUMN " + col.getSQLDefinition();
}
/**
* Method to return the SQL to append to the SELECT clause of a SELECT statement to handle
* restriction of ranges using the LIMIT keyword.
* @param offset The offset to return from
* @param count The number of items to return
* @return The SQL to append to allow for ranges using LIMIT.
*/
public String getRangeByLimitSelectClause(long offset, long count)
{
if (offset >= 0 && count > 0)
{
return " LIMIT " + offset + " " + count + " ";
}
else if (offset <= 0 && count > 0)
{
return " LIMIT 0 " + count + " ";
}
else
{
return "";
}
}
/**
* Accessor for whether the adapter supports the transaction isolation level
*
* @param isolationLevel the isolation level
* @return Whether the transaction isolation level setting is supported.
*/
public boolean supportsTransactionIsolationLevel(int isolationLevel)
{
if (isolationLevel == Connection.TRANSACTION_READ_COMMITTED || isolationLevel == Connection.TRANSACTION_SERIALIZABLE)
{
return true;
}
return false;
}
/**
* Whether the datastore supports specification of the primary key in CREATE
* TABLE statements.
* @return Whether it allows "PRIMARY KEY ..."
*/
public boolean supportsPrimaryKeyInCreateStatements()
{
return true;
}
/**
* Accessor for the Schema Name for this datastore.
*
* @param conn Connection to the datastore
* @return The schema name
**/
public String getSchemaName(Connection conn)
throws SQLException
{
return schemaName;
}
/**
* @param pk An object describing the primary key.
* @return The PK statement
*/
public String getAddPrimaryKeyStatement(PrimaryKey pk)
{
// PK is created by the CREATE TABLE statement so we just return null
return null;
}
/**
* Returns the appropriate SQL to drop the given table.
* It should return something like:
* <p>
* <blockquote><pre>
* DROP TABLE FOO
* </pre></blockquote>
*
* @param table The table to drop.
* @return The text of the SQL statement.
*/
public String getDropTableStatement(DatastoreContainerObject table)
{
return "DROP TABLE " + table.toString();
}
/**
* Whether we support deferred constraints in keys.
* @return whether we support deferred constraints in keys.
**/
public boolean supportsDeferredConstraints()
{
return false;
}
/**
* Whether we support auto incrementing fields.
* @return whether we support auto incrementing fields.
**/
public boolean supportsAutoIncrementFields()
{
return true;
}
/**
* Accessor for the auto-increment sql statement for this datastore.
* @param tableName Name of the table that the autoincrement is for
* @param columnName Name of the column that the autoincrement is for
* @return The statement for getting the latest auto-increment key
**/
public String getAutoIncrementStmt(String tableName, String columnName)
{
return "CALL IDENTITY()";
}
/**
* Accessor for the auto-increment keyword for generating DDLs (CREATE TABLE...).
* @return The keyword for a column using auto-increment
**/
public String getAutoIncrementKeyword()
{
return "IDENTITY";
}
/**
* Method to return the INSERT statement to use when inserting into a table that has no
* columns specified. This is the case when we have a single column in the table and that column
* is autoincrement/identity (and so is assigned automatically in the datastore).
* @param table The table
* @return The INSERT statement
*/
public String getInsertStatementForNoColumns(Table table)
{
return "INSERT INTO " + table.toString() + " VALUES(NULL)";
}
/**
* Whether to allow Unique statements in the section of CREATE TABLE after the
* column definitions.
* @see org.jpox.store.rdbms.adapter.DatabaseAdapter#supportsUniqueConstraintsInEndCreateStatements()
*/
public boolean supportsUniqueConstraintsInEndCreateStatements()
{
return true;
}
/**
* Whether this datastore supports the use of CHECK after the column
* definitions in CREATE TABLE statements (DDL).
* e.g.
* CREATE TABLE XYZ
* (
* COL_A int,
* COL_B char(1),
* PRIMARY KEY (COL_A),
* CHECK (COL_B IN ('Y','N'))
* )
* @return whether we can use CHECK after the column definitions in CREATE TABLE.
**/
public boolean supportsCheckConstraintsInEndCreateStatements()
{
return true;
}
/**
* Accessor for whether the specified type is allow to be part of a PK.
* @param datatype The JDBC type
* @return Whether it is permitted in the PK
*/
public boolean isValidPrimaryKeyType(int datatype)
{
return true;
}
/**
* Method to generate a modulus expression. The binary % operator is said to
* yield the remainder of its operands from an implied division; the
* left-hand operand is the dividend and the right-hand operand is the
* divisor. This returns MOD(expr1, expr2).
* @param operand1 the left expression
* @param operand2 the right expression
* @return The Expression for modulus
*/
public NumericExpression modOperator(ScalarExpression operand1, ScalarExpression operand2)
{
ArrayList args = new ArrayList();
args.add(operand1);
args.add(operand2);
return new NumericExpression("MOD", args);
}
/**
* Return a new TableExpression.
* @param qs The QueryStatement to add the expression to
* @param table The table in the expression
* @param rangeVar range variable to assign to the expression.
* @return The expression.
**/
public LogicSetExpression newTableExpression(QueryExpression qs, DatastoreContainerObject table, DatastoreIdentifier rangeVar)
{
return new TableExprAsJoins(qs, table, rangeVar);
}
}
h2/src/tools/org/h2/dev/mail/SendMail.java.txt
deleted
100644 → 0
浏览文件 @
659bf5ca
/*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.dev.mail;
import java.util.Properties;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.Message.RecipientType;
import javax.mail.internet.MimeMessage;
/**
* Utility class to send a mail over a fixed gmail account.
*/
public class SendMail {
// http://repo2.maven.org/maven2/javax/mail/mail/1.4.1/mail-1.4.1.jar
// http://repo2.maven.org/maven2/javax/activation/activation/1.1/activation-1.1.jar
public static void main(String[] args) throws Exception {
String to = "thomas.tom.mueller" + "@" + "gmail.com";
sendMailOverGmail("", to, "Test", "Test Mail");
}
static void sendMailOverGmail(String password, String to, String subject, String body) throws Exception {
String username = "testing1212123" + "@" + "gmail.com";
String host = "smtp.gmail.com";
Properties prop = new Properties();
prop.put("mail.smtps.auth", "true");
Session session = Session.getDefaultInstance(prop);
session.setProtocolForAddress("rfc822", "smtps");
session.setDebug(true);
MimeMessage msg = new MimeMessage(session);
msg.setRecipients(RecipientType.TO, to);
msg.setSubject(subject);
msg.setText(body);
Transport t = session.getTransport("smtps");
try {
t.connect(host, username, password);
t.sendMessage(msg, msg.getAllRecipients());
} finally {
t.close();
}
}
}
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论