Skip to content
项目
群组
代码片段
帮助
正在加载...
帮助
为 GitLab 提交贡献
登录/注册
切换导航
H
h2database
项目
项目
详情
活动
周期分析
仓库
仓库
文件
提交
分支
标签
贡献者
分枝图
比较
统计图
议题
0
议题
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
CI / CD
CI / CD
流水线
作业
计划
统计图
Wiki
Wiki
代码片段
代码片段
成员
成员
折叠边栏
关闭边栏
活动
分枝图
统计图
创建新议题
作业
提交
议题看板
打开侧边栏
Administrator
h2database
Commits
1e0db2e8
提交
1e0db2e8
authored
12月 15, 2006
作者:
Thomas Mueller
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
--no commit message
--no commit message
上级
2dea109a
显示空白字符变更
内嵌
并排
正在显示
4 个修改的文件
包含
831 行增加
和
0 行删除
+831
-0
H2MetaData.java.txt
h2/src/tools/com/caucho/jdbc/H2MetaData.java.txt
+47
-0
JdbcMetaData.java.txt
h2/src/tools/com/caucho/jdbc/JdbcMetaData.java.txt
+464
-0
H2Platform.java.txt
.../toplink/essentials/platform/database/H2Platform.java.txt
+116
-0
H2Dictionary.java.txt
...c/tools/org/apache/openjpa/jdbc/sql/H2Dictionary.java.txt
+204
-0
没有找到文件。
h2/src/tools/com/caucho/jdbc/H2MetaData.java.txt
0 → 100644
浏览文件 @
1e0db2e8
package com.caucho.jdbc;
import com.caucho.util.Log;
import javax.sql.DataSource;
import java.util.logging.Logger;
/**
* Metadata for the H2 database.
* For details, see
* http://wondering.ru/java/H2ejb3onResinSupport1.0.zip
*/
public class H2MetaData extends JdbcMetaData {
private static final Logger log = Log.open(H2MetaData.class);
protected H2MetaData(DataSource ds) {
super(ds);
}
/**
* Returns the blob type.
*/
public String getBlobType(){
return "BLOB";
}
/**
* Returns the long type.
*/
public String getLongType() {
return "BIGINT";
}
/**
* Returns true if identity is supported.
*/
public boolean supportsIdentity() {
return true;
}
/**
* Returns the identity property
*/
public String createIdentitySQL(String sqlType) {
return "IDENTITY";
}
}
h2/src/tools/com/caucho/jdbc/JdbcMetaData.java.txt
0 → 100644
浏览文件 @
1e0db2e8
/*
* Copyright (c) 1998-2006 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
*
* Free Software Foundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Scott Ferguson
*/
package com.caucho.jdbc;
import com.caucho.util.L10N;
import com.caucho.util.Log;
import javax.sql.DataSource;
import java.sql.*;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Abstract way of grabbing data from the JDBC connection.
*/
public class JdbcMetaData {
private static final L10N L = new L10N(JdbcMetaData.class);
private static final Logger log = Log.open(JdbcMetaData.class);
private DataSource _ds;
/**
* Create a new JDBC backing store.
*/
protected JdbcMetaData(DataSource ds)
{
_ds = ds;
}
/**
* Create based on the connection.
*/
public static JdbcMetaData create(DataSource ds)
{
Connection conn = null;
try {
conn = ds.getConnection();
DatabaseMetaData md = conn.getMetaData();
String name = md.getDatabaseProductName();
log.fine(L.l("Database '{0}' metadata.", name));
if ("H2".equalsIgnoreCase(name))
return new H2MetaData(ds);
else if ("oracle".equalsIgnoreCase(name))
return new OracleMetaData(ds);
else if ("resin".equalsIgnoreCase(name))
return new ResinMetaData(ds);
else if ("postgres".equalsIgnoreCase(name) ||
"PostgreSQL".equalsIgnoreCase(name))
return new PostgresMetaData(ds);
else if ("mysql".equalsIgnoreCase(name))
return new MysqlMetaData(ds);
else if ("Microsoft SQL Server".equalsIgnoreCase(name))
return new SqlServerMetaData(ds);
else {
log.fine(name + " is an unknown database type");
return new JdbcMetaData(ds);
}
} catch (SQLException e) {
log.log(Level.FINE, e.toString(), e);
return new JdbcMetaData(ds);
} finally {
try {
if (conn != null) conn.close();
} catch (SQLException e) {
}
}
}
/**
* Returns the database name.
*/
public String getDatabaseName()
{
Connection conn = null;
try {
conn = getConnection();
DatabaseMetaData md = conn.getMetaData();
return md.getDatabaseProductName();
} catch (SQLException e) {
log.log(Level.WARNING, e.toString(), e);
return "unknown";
} finally {
try {
if (conn != null) conn.close();
} catch (SQLException e) {
}
}
}
/**
* Returns the blob type.
*/
public String getBlobType()
{
Connection conn = null;
try {
conn = getConnection();
DatabaseMetaData md = conn.getMetaData();
ResultSet rs;
rs = md.getTypeInfo();
try {
while (rs.next()) {
if (rs.getShort("DATA_TYPE") == Types.BLOB) {
return rs.getString("TYPE_NAME");
}
}
} finally {
rs.close();
}
rs = md.getTypeInfo();
try {
while (rs.next()) {
int dataType = rs.getShort("DATA_TYPE");
if (rs.getShort("DATA_TYPE") == Types.LONGVARBINARY) {
return rs.getString("TYPE_NAME");
}
}
} finally {
rs.close();
}
rs = md.getTypeInfo();
try {
while (rs.next()) {
if (rs.getShort("DATA_TYPE") == Types.BINARY) {
return rs.getString("TYPE_NAME");
}
}
} finally {
rs.close();
}
rs = md.getTypeInfo();
try {
while (rs.next()) {
if (rs.getShort("DATA_TYPE") == Types.VARBINARY) {
return rs.getString("TYPE_NAME");
}
}
} finally {
rs.close();
}
} catch (SQLException e) {
log.log(Level.FINE, e.toString(), e);
} finally {
try {
if (conn != null)
conn.close();
} catch (Exception e) {
}
}
return null;
}
/**
* Returns the long type.
*/
public String getLongType()
{
Connection conn = null;
try {
conn = getConnection();
DatabaseMetaData md = conn.getMetaData();
ResultSet rs;
rs = md.getTypeInfo();
try {
while (rs.next()) {
if (rs.getShort("DATA_TYPE") == Types.BIGINT) {
return rs.getString("TYPE_NAME");
}
}
} finally {
rs.close();
}
} catch (SQLException e) {
log.log(Level.FINE, e.toString(), e);
} finally {
try {
if (conn != null) conn.close();
} catch (SQLException e) {
}
}
return null;
}
/**
* Returns true if identity is supported.
*/
public boolean supportsIdentity()
{
return false;
}
/**
* Returns the identity property
*/
public String createIdentitySQL(String sqlType)
{
throw new UnsupportedOperationException(getClass().getName());
}
/**
* Returns true if sequences are supported.
*/
public boolean supportsSequences()
{
return false;
}
/**
* Returns a sequence select expression.
*/
public String createSequenceSQL(String name, int size)
{
throw new UnsupportedOperationException(getClass().getName());
}
/**
* Returns a sequence select expression.
*/
public String selectSequenceSQL(String name)
{
throw new UnsupportedOperationException(getClass().getName());
}
/**
* Returns a sequence select expression.
*/
public String testSequenceSQL(String name)
{
return selectSequenceSQL(name) + " WHERE 1=0";
}
/**
* Returns the code to test for a boolean value for a term.
*/
public String generateBoolean(String term)
{
return term;
}
/**
* Returns a limit.
*/
public String limit(String sql, int max)
{
return sql;
}
/**
* New version to Return SQL for the table with the given
* SQL type. Takes, length, precision and scale.
*/
public String getCreateColumnSQL(int sqlType, int length, int precision, int scale)
{
String type = null;
switch (sqlType) {
case Types.BOOLEAN:
type = getCreateColumnSQLImpl(sqlType, length, precision, scale);
if (type == null)
type = getCreateColumnSQLImpl(Types.BIT, length, precision, scale);
break;
case Types.DATE:
type = getCreateColumnSQLImpl(sqlType, length, precision, scale);
if (type == null)
type = getCreateColumnSQLImpl(Types.TIMESTAMP, length, precision, scale);
break;
case Types.TIME:
type = getCreateColumnSQLImpl(sqlType, length, precision, scale);
if (type == null)
type = getCreateColumnSQLImpl(Types.TIMESTAMP, length, precision, scale);
break;
case Types.DOUBLE:
type = getCreateColumnSQLImpl(Types.DOUBLE, length, precision, scale);
break;
case Types.NUMERIC:
type = getCreateColumnSQLImpl(Types.NUMERIC, length, precision, scale);
break;
default:
type = getCreateColumnSQLImpl(sqlType, length, precision, scale);
break;
}
if (type == null)
type = getDefaultCreateTableSQL(sqlType, length, precision, scale);
return type;
}
/**
* Returns the SQL for the table with the given SQL type.
*/
protected String getCreateColumnSQLImpl(int sqlType, int length, int precision, int scale)
{
Connection conn = null;
try {
conn = getConnection();
DatabaseMetaData md = conn.getMetaData();
ResultSet rs;
rs = md.getTypeInfo();
try {
while (rs.next()) {
if (rs.getShort("DATA_TYPE") == sqlType) {
String typeName = rs.getString("TYPE_NAME");
String params = rs.getString("CREATE_PARAMS");
if (params == null || params.equals(""))
return typeName;
else if (params.startsWith("(M)")) {
if (length > 0)
return typeName + "(" + length + ")";
else
return typeName;
}
else if (params.startsWith("(M,D)") || params.equals("precision,scale")) {
if (precision > 0) {
typeName += "(" + precision;
if (scale > 0) {
typeName += "," + scale;
}
typeName += ")";
}
return typeName;
}
else if (params.startsWith("(")) {
int tail = params.indexOf(')');
if (tail > 0) {
String value = params.substring(1, tail);
boolean isConstant = true;
for (int i = 0; i < value.length(); i++) {
if (value.charAt(i) >= 'a' && value.charAt(i) <= 'z')
isConstant = false;
else if (value.charAt(i) >= 'A' && value.charAt(i) <= 'Z')
isConstant = false;
}
if (isConstant)
return typeName + "(" + value + ")";
}
return typeName;
}
else {
return typeName;
}
}
}
} finally {
rs.close();
}
} catch (Throwable e) {
log.log(Level.FINE, e.toString(), e);
} finally {
try {
if (conn != null)
conn.close();
} catch (Exception e) {
}
}
return null;
}
protected String getDefaultCreateTableSQL(int sqlType, int length, int precision, int scale)
{
switch (sqlType) {
case java.sql.Types.BOOLEAN:
return "CHAR";
case java.sql.Types.BIT:
case java.sql.Types.TINYINT:
case java.sql.Types.SMALLINT:
case java.sql.Types.INTEGER:
case java.sql.Types.BIGINT:
return "INTEGER";
case java.sql.Types.NUMERIC:
case java.sql.Types.DECIMAL:
String typeString = "NUMERIC";
if (precision > 0) {
typeString += "(" + precision;
if (scale > 0) {
typeString += "," + scale;
}
typeString += ")";
}
return typeString;
case java.sql.Types.DOUBLE:
case java.sql.Types.FLOAT:
return "DOUBLE";
case java.sql.Types.CHAR:
return "CHAR";
case java.sql.Types.DATE:
case java.sql.Types.TIME:
case java.sql.Types.TIMESTAMP:
return "TIMESTAMP";
default:
return "VARCHAR(" + length + ")";
}
}
/**
* Returns a connection.
*/
protected Connection getConnection()
throws SQLException
{
return _ds.getConnection();
}
}
h2/src/tools/oracle/toplink/essentials/platform/database/H2Platform.java.txt
0 → 100644
浏览文件 @
1e0db2e8
/*
* The contents of this file are subject to the terms
* of the Common Development and Distribution License
* (the "License"). You may not use this file except
* in compliance with the License.
*
* You can obtain a copy of the license at
* glassfish/bootstrap/legal/CDDLv1.0.txt or
* https://glassfish.dev.java.net/public/CDDLv1.0.html.
* See the License for the specific language governing
* permissions and limitations under the License.
*
* When distributing Covered Code, include this CDDL
* HEADER in each file and include the License file at
* glassfish/bootstrap/legal/CDDLv1.0.txt. If applicable,
* add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your
* own identifying information: Portions Copyright [yyyy]
* [name of copyright owner]
*/
// Copyright (c) 1998, 2006, Oracle. All rights reserved.
package oracle.toplink.essentials.platform.database;
import java.util.*;
import java.io.*;
import java.sql.*;
import oracle.toplink.essentials.exceptions.*;
import oracle.toplink.essentials.queryframework.*;
import oracle.toplink.essentials.internal.helper.*;
import oracle.toplink.essentials.expressions.*;
import oracle.toplink.essentials.internal.expressions.*;
import oracle.toplink.essentials.internal.databaseaccess.*;
import oracle.toplink.essentials.internal.sessions.AbstractSession;
/**
* <p><b>Purpose</b>: Provides H2 specific behaviour.
*/
public class H2Platform extends DatabasePlatform {
public H2Platform() {
}
protected Hashtable buildFieldTypes() {
Hashtable fieldTypeMapping;
fieldTypeMapping = super.buildFieldTypes();
fieldTypeMapping.put(Boolean.class, new FieldTypeDefinition("TINYINT", false));
fieldTypeMapping.put(Integer.class, new FieldTypeDefinition("INTEGER", false));
fieldTypeMapping.put(Long.class, new FieldTypeDefinition("NUMERIC", 19));
fieldTypeMapping.put(Float.class, new FieldTypeDefinition("REAL", false));
fieldTypeMapping.put(Double.class, new FieldTypeDefinition("REAL", false));
fieldTypeMapping.put(Short.class, new FieldTypeDefinition("SMALLINT", false));
fieldTypeMapping.put(Byte.class, new FieldTypeDefinition("SMALLINT", false));
fieldTypeMapping.put(java.math.BigInteger.class, new FieldTypeDefinition("NUMERIC", 38));
fieldTypeMapping.put(java.math.BigDecimal.class, new FieldTypeDefinition("NUMERIC", 38).setLimits(38, -19, 19));
fieldTypeMapping.put(Number.class, new FieldTypeDefinition("NUMERIC", 38).setLimits(38, -19, 19));
fieldTypeMapping.put(Byte[].class, new FieldTypeDefinition("BINARY", false));
fieldTypeMapping.put(Character[].class, new FieldTypeDefinition("LONGVARCHAR", false));
fieldTypeMapping.put(byte[].class, new FieldTypeDefinition("BINARY", false));
fieldTypeMapping.put(char[].class, new FieldTypeDefinition("LONGVARCHAR", false));
fieldTypeMapping.put(java.sql.Blob.class, new FieldTypeDefinition("BINARY", false));
fieldTypeMapping.put(java.sql.Clob.class, new FieldTypeDefinition("LONGVARCHAR", false));
fieldTypeMapping.put(java.sql.Date.class, new FieldTypeDefinition("DATE", false));
fieldTypeMapping.put(java.sql.Time.class, new FieldTypeDefinition("TIME", false));
fieldTypeMapping.put(java.sql.Timestamp.class, new FieldTypeDefinition("TIMESTAMP", false));
return fieldTypeMapping;
}
// public boolean isHSQL() {
// return true;
// }
public boolean isH2() {
return true;
}
public boolean supportsForeignKeyConstraints() {
return true;
}
public ValueReadQuery buildSelectQueryForNativeSequence(String seqName, Integer size) {
return new ValueReadQuery("CALL NEXT VALUE FOR " + getQualifiedSequenceName(seqName));
// return new ValueReadQuery("SELECT " + getQualifiedSequenceName(seqName) + ".NEXTVAL FROM DUAL");
}
public boolean supportsNativeSequenceNumbers() {
return true;
}
protected String getQualifiedSequenceName(String seqName) {
if (getTableQualifier().equals("")) {
return seqName;
} else {
return getTableQualifier() + "." + seqName;
}
}
public boolean supportsSelectForUpdateNoWait() {
return true;
}
protected ExpressionOperator todayOperator() {
return ExpressionOperator.simpleFunctionNoParentheses(ExpressionOperator.Today, "SYSDATE");
}
protected void initializePlatformOperators() {
super.initializePlatformOperators();
addOperator(ExpressionOperator.simpleMath(ExpressionOperator.Concat, "||"));
}
public boolean shouldUseJDBCOuterJoinSyntax() {
return false;
}
}
h2/src/tools/org/apache/openjpa/jdbc/sql/H2Dictionary.java.txt
0 → 100644
浏览文件 @
1e0db2e8
package org.apache.openjpa.jdbc.sql;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Arrays;
import java.util.Locale;
import org.apache.commons.lang.StringUtils;
import org.apache.openjpa.jdbc.kernel.exps.FilterValue;
import org.apache.openjpa.jdbc.schema.Column;
import org.apache.openjpa.jdbc.schema.PrimaryKey;
import org.apache.openjpa.jdbc.schema.Table;
import org.apache.openjpa.jdbc.schema.Unique;
import org.apache.openjpa.meta.JavaTypes;
public class H2Dictionary extends DBDictionary {
public H2Dictionary() {
platform = "H2";
validationSQL = "CALL 1";
closePoolSQL = "SHUTDOWN";
supportsAutoAssign = true;
lastGeneratedKeyQuery = "CALL IDENTITY()";
autoAssignClause = "IDENTITY";
autoAssignTypeName = "INTEGER";
nextSequenceQuery = "CALL NEXT VALUE FOR {0}";
// CROSS JOIN is currently not supported
crossJoinClause = "JOIN";
requiresConditionForCrossJoin = true;
stringLengthFunction = "LENGTH({0})";
trimLeadingFunction = "LTRIM({0})";
trimTrailingFunction = "RTRIM({0})";
trimBothFunction = "TRIM({0})";
useSchemaName = true;
supportsSelectForUpdate = true;
supportsSelectStartIndex = true;
supportsSelectEndIndex = true;
rangePosition = RANGE_POST_LOCK;
supportsDeferredConstraints = false;
useGetObjectForBlobs = true;
blobTypeName = "BLOB";
doubleTypeName = "DOUBLE";
supportsNullTableForGetPrimaryKeys = true;
supportsNullTableForGetIndexInfo = true;
requiresCastForMathFunctions = false;
requiresCastForComparisons = false;
reservedWordSet.addAll(Arrays.asList(new String[] { "CURRENT_TIMESTAMP", "CURRENT_TIME", "CURRENT_DATE", "CROSS", "DISTINCT", "EXCEPT", "EXISTS", "FROM", "FOR", "FALSE",
"FULL", "GROUP", "HAVING", "INNER", "INTERSECT", "IS", "JOIN", "LIKE", "MINUS", "NATURAL", "NOT", "NULL", "ON", "ORDER", "PRIMARY", "ROWNUM", "SELECT", "SYSDATE",
"SYSTIME", "SYSTIMESTAMP", "TODAY", "TRUE", "UNION", "WHERE" }));
}
public int getJDBCType(int metaTypeCode, boolean lob) {
int type = super.getJDBCType(metaTypeCode, lob);
switch (type) {
case Types.BIGINT:
if (metaTypeCode == JavaTypes.BIGINTEGER)
return Types.NUMERIC;
break;
}
return type;
}
public int getPreferredType(int type) {
return super.getPreferredType(type);
}
public String[] getAddPrimaryKeySQL(PrimaryKey pk) {
return new String[0];
}
public String[] getDropPrimaryKeySQL(PrimaryKey pk) {
return new String[0];
}
public String[] getAddColumnSQL(Column column) {
return new String[] { "ALTER TABLE " + getFullName(column.getTable(), false) + " ADD COLUMN " + getDeclareColumnSQL(column, true) };
}
public String[] getCreateTableSQL(Table table) {
StringBuffer buf = new StringBuffer();
buf.append("CREATE TABLE ").append(getFullName(table, false)).append(" (");
Column[] cols = table.getColumns();
for (int i = 0; i < cols.length; i++) {
if (i > 0)
buf.append(", ");
buf.append(getDeclareColumnSQL(cols[i], false));
}
PrimaryKey pk = table.getPrimaryKey();
String pkStr;
if (pk != null) {
pkStr = getPrimaryKeyConstraintSQL(pk);
if (!StringUtils.isEmpty(pkStr))
buf.append(", ").append(pkStr);
}
Unique[] unqs = table.getUniques();
String unqStr;
for (int i = 0; i < unqs.length; i++) {
unqStr = getUniqueConstraintSQL(unqs[i]);
if (unqStr != null)
buf.append(", ").append(unqStr);
}
buf.append(")");
return new String[] { buf.toString() };
}
protected String getPrimaryKeyConstraintSQL(PrimaryKey pk) {
Column[] cols = pk.getColumns();
if (cols.length == 1 && cols[0].isAutoAssigned())
return null;
return super.getPrimaryKeyConstraintSQL(pk);
}
public boolean isSystemIndex(String name, Table table) {
return name.toUpperCase(Locale.ENGLISH).startsWith("SYSTEM_");
}
protected String getSequencesSQL(String schemaName, String sequenceName) {
StringBuffer buf = new StringBuffer();
buf.append("SELECT SEQUENCE_SCHEMA, SEQUENCE_NAME FROM ").append("INFORMATION_SCHEMA.SEQUENCES");
if (schemaName != null || sequenceName != null)
buf.append(" WHERE ");
if (schemaName != null) {
buf.append("SEQUENCE_SCHEMA = ?");
if (sequenceName != null)
buf.append(" AND ");
}
if (sequenceName != null)
buf.append("SEQUENCE_NAME = ?");
return buf.toString();
}
protected SQLBuffer toOperation(String op, SQLBuffer selects, SQLBuffer from, SQLBuffer where, SQLBuffer group, SQLBuffer having, SQLBuffer order, boolean distinct,
boolean forUpdate, long start, long end) {
return super.toOperation(op, selects, from, where, group, having, order, distinct, forUpdate, start, end);
}
public Column[] getColumns(DatabaseMetaData meta, String catalog, String schemaName, String tableName, String columnName, Connection conn) throws SQLException {
Column[] cols = super.getColumns(meta, catalog, schemaName, tableName, columnName, conn);
return cols;
}
public void setDouble(PreparedStatement stmnt, int idx, double val, Column col) throws SQLException {
super.setDouble(stmnt, idx, val, col);
}
public void setBigDecimal(PreparedStatement stmnt, int idx, BigDecimal val, Column col) throws SQLException {
super.setBigDecimal(stmnt, idx, val, col);
}
protected void appendSelectRange(SQLBuffer buf, long start, long end) {
if (end != Long.MAX_VALUE)
buf.append(" LIMIT ").appendValue(end - start);
if (start != 0)
buf.append(" OFFSET ").appendValue(start);
}
public void substring(SQLBuffer buf, FilterValue str, FilterValue start, FilterValue end) {
buf.append("SUBSTR(");
str.appendTo(buf);
buf.append(", (");
start.appendTo(buf);
buf.append(" + 1)");
if (end != null) {
buf.append(", (");
end.appendTo(buf);
buf.append(" - ");
start.appendTo(buf);
buf.append(")");
}
buf.append(")");
}
public void indexOf(SQLBuffer buf, FilterValue str, FilterValue find, FilterValue start) {
buf.append("(POSITION(");
find.appendTo(buf);
buf.append(" IN ");
if (start != null)
substring(buf, str, start, null);
else
str.appendTo(buf);
buf.append(") - 1");
if (start != null) {
buf.append(" + ");
start.appendTo(buf);
}
buf.append(")");
}
}
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论