Unverified Commit 72412ddc authored by Kerwin's avatar Kerwin Committed by GitHub

Added dlink-metadata module code style. (#921)

parent bf3f6795
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,13 +17,8 @@ ...@@ -17,13 +17,8 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
import com.dlink.constant.CommonConstant; import com.dlink.constant.CommonConstant;
import com.dlink.metadata.query.IDBQuery; import com.dlink.metadata.query.IDBQuery;
...@@ -33,11 +28,27 @@ import com.dlink.model.Schema; ...@@ -33,11 +28,27 @@ import com.dlink.model.Schema;
import com.dlink.model.Table; import com.dlink.model.Table;
import com.dlink.result.SqlExplainResult; import com.dlink.result.SqlExplainResult;
import com.dlink.utils.LogUtil; import com.dlink.utils.LogUtil;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.sql.*; import com.alibaba.druid.pool.DruidDataSource;
import java.util.*; import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
/** /**
* AbstractJdbcDriver * AbstractJdbcDriver
...@@ -396,7 +407,7 @@ public abstract class AbstractJdbcDriver extends AbstractDriver { ...@@ -396,7 +407,7 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
public boolean execute(String sql) throws Exception { public boolean execute(String sql) throws Exception {
Asserts.checkNullString(sql, "Sql 语句为空"); Asserts.checkNullString(sql, "Sql 语句为空");
try (Statement statement = conn.get().createStatement()) { try (Statement statement = conn.get().createStatement()) {
// logger.info("执行sql的连接id:" + ((DruidPooledConnection) conn).getTransactionInfo().getId()); //logger.info("执行sql的连接id:" + ((DruidPooledConnection) conn).getTransactionInfo().getId());
statement.execute(sql); statement.execute(sql);
} }
return true; return true;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
...@@ -28,7 +27,6 @@ import com.dlink.model.Schema; ...@@ -28,7 +27,6 @@ import com.dlink.model.Schema;
import com.dlink.model.Table; import com.dlink.model.Table;
import com.dlink.result.SqlExplainResult; import com.dlink.result.SqlExplainResult;
import javax.sql.DataSource;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
...@@ -81,7 +79,6 @@ public interface Driver { ...@@ -81,7 +79,6 @@ public interface Driver {
} }
} }
Driver setDriverConfig(DriverConfig config); Driver setDriverConfig(DriverConfig config);
boolean canHandle(String type); boolean canHandle(String type);
......
...@@ -17,16 +17,15 @@ ...@@ -17,16 +17,15 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
import lombok.Getter;
import lombok.Setter;
import java.util.Map; import java.util.Map;
import lombok.Getter;
import lombok.Setter;
/** /**
* DriverConfig * DriverConfig
* *
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import java.util.Map; import java.util.Map;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
/** /**
...@@ -28,7 +27,6 @@ package com.dlink.metadata.query; ...@@ -28,7 +27,6 @@ package com.dlink.metadata.query;
**/ **/
public abstract class AbstractDBQuery implements IDBQuery { public abstract class AbstractDBQuery implements IDBQuery {
@Override @Override
public String createTableSql(String schemaName, String tableName) { public String createTableSql(String schemaName, String tableName) {
return "show create table " + schemaName + "." + tableName; return "show create table " + schemaName + "." + tableName;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
/** /**
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.result; package com.dlink.metadata.result;
/** /**
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.result; package com.dlink.metadata.result;
import com.dlink.result.AbstractResult; import com.dlink.result.AbstractResult;
...@@ -42,9 +41,11 @@ public class JdbcSelectResult extends AbstractResult implements IResult { ...@@ -42,9 +41,11 @@ public class JdbcSelectResult extends AbstractResult implements IResult {
private Integer limit; private Integer limit;
private static final String STATUS = "status"; private static final String STATUS = "status";
private static final List<String> STATUS_COLUMN = new ArrayList<String>() {{ private static final List<String> STATUS_COLUMN = new ArrayList<String>() {
{
add("status"); add("status");
}}; }
};
public JdbcSelectResult() { public JdbcSelectResult() {
} }
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.rules; package com.dlink.metadata.rules;
/** /**
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.rules; package com.dlink.metadata.rules;
/** /**
......
...@@ -17,9 +17,11 @@ ...@@ -17,9 +17,11 @@
* *
*/ */
package com.dlink.metadata.ast; package com.dlink.metadata.ast;
import java.util.ArrayList;
import java.util.List;
import com.alibaba.druid.DbType; import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLExpr; import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLOrderBy; import com.alibaba.druid.sql.ast.SQLOrderBy;
...@@ -28,9 +30,6 @@ import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement; ...@@ -28,9 +30,6 @@ import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.ClickhouseVisitor; import com.alibaba.druid.sql.dialect.clickhouse.visitor.ClickhouseVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor; import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import java.util.ArrayList;
import java.util.List;
public class Clickhouse20CreateTableStatement extends SQLCreateTableStatement { public class Clickhouse20CreateTableStatement extends SQLCreateTableStatement {
protected final List<SQLAssignItem> settings = new ArrayList<SQLAssignItem>(); protected final List<SQLAssignItem> settings = new ArrayList<SQLAssignItem>();
private SQLOrderBy orderBy; private SQLOrderBy orderBy;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,15 +17,8 @@ ...@@ -17,15 +17,8 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.SQLDropTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.Token;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement; import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import com.dlink.metadata.convert.ClickHouseTypeConvert; import com.dlink.metadata.convert.ClickHouseTypeConvert;
import com.dlink.metadata.convert.ITypeConvert; import com.dlink.metadata.convert.ITypeConvert;
...@@ -36,7 +29,8 @@ import com.dlink.model.Table; ...@@ -36,7 +29,8 @@ import com.dlink.model.Table;
import com.dlink.result.SqlExplainResult; import com.dlink.result.SqlExplainResult;
import com.dlink.utils.LogUtil; import com.dlink.utils.LogUtil;
import java.sql.*; import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
...@@ -44,6 +38,13 @@ import java.util.Map; ...@@ -44,6 +38,13 @@ import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.SQLDropTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.Token;
/** /**
* ClickHouseDriver * ClickHouseDriver
* *
...@@ -54,7 +55,6 @@ public class ClickHouseDriver extends AbstractJdbcDriver { ...@@ -54,7 +55,6 @@ public class ClickHouseDriver extends AbstractJdbcDriver {
@Override @Override
String getDriverClass() { String getDriverClass() {
// return "com.clickhouse.jdbc.ClickHouseDriver";
return "ru.yandex.clickhouse.ClickHouseDriver"; return "ru.yandex.clickhouse.ClickHouseDriver";
} }
......
...@@ -17,9 +17,10 @@ ...@@ -17,9 +17,10 @@
* *
*/ */
package com.dlink.metadata.parser; package com.dlink.metadata.parser;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import com.alibaba.druid.sql.ast.SQLExpr; import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLOrderBy; import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem; import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
...@@ -28,7 +29,6 @@ import com.alibaba.druid.sql.parser.SQLCreateTableParser; ...@@ -28,7 +29,6 @@ import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLExprParser; import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.Token; import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash; import com.alibaba.druid.util.FnvHash;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
public class Clickhouse20CreateTableParser extends SQLCreateTableParser { public class Clickhouse20CreateTableParser extends SQLCreateTableParser {
public Clickhouse20CreateTableParser(SQLExprParser exprParser) { public Clickhouse20CreateTableParser(SQLExprParser exprParser) {
......
...@@ -17,9 +17,10 @@ ...@@ -17,9 +17,10 @@
* *
*/ */
package com.dlink.metadata.parser; package com.dlink.metadata.parser;
import java.util.Arrays;
import com.alibaba.druid.sql.ast.SQLExpr; import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.expr.SQLArrayExpr; import com.alibaba.druid.sql.ast.expr.SQLArrayExpr;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr; import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
...@@ -29,15 +30,12 @@ import com.alibaba.druid.sql.parser.SQLParserFeature; ...@@ -29,15 +30,12 @@ import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token; import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash; import com.alibaba.druid.util.FnvHash;
import java.util.Arrays;
public class Clickhouse20ExprParser extends SQLExprParser { public class Clickhouse20ExprParser extends SQLExprParser {
private final static String[] AGGREGATE_FUNCTIONS; private static final String[] AGGREGATE_FUNCTIONS;
private final static long[] AGGREGATE_FUNCTIONS_CODES; private static final long[] AGGREGATE_FUNCTIONS_CODES;
static { static {
String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER", String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER", "ROWNUMBER"};
"ROWNUMBER"};
AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true); AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true);
AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length]; AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length];
for (String str : strings) { for (String str : strings) {
......
...@@ -17,20 +17,19 @@ ...@@ -17,20 +17,19 @@
* *
*/ */
package com.dlink.metadata.parser; package com.dlink.metadata.parser;
import java.util.HashMap;
import java.util.Map;
import com.alibaba.druid.DbType; import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.Keywords; import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer; import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature; import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token; import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class Clickhouse20Lexer extends Lexer { public class Clickhouse20Lexer extends Lexer {
public final static Keywords DEFAULT_KEYWORDS; public static final Keywords DEFAULT_KEYWORDS;
static { static {
Map<String, Token> map = new HashMap<String, Token>(); Map<String, Token> map = new HashMap<String, Token>();
......
...@@ -17,11 +17,14 @@ ...@@ -17,11 +17,14 @@
* *
*/ */
package com.dlink.metadata.parser; package com.dlink.metadata.parser;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause; import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.parser.*; import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.sql.parser.Token;
public class Clickhouse20StatementParser extends SQLStatementParser { public class Clickhouse20StatementParser extends SQLStatementParser {
public Clickhouse20StatementParser(String sql) { public Clickhouse20StatementParser(String sql) {
...@@ -36,7 +39,6 @@ public class Clickhouse20StatementParser extends SQLStatementParser { ...@@ -36,7 +39,6 @@ public class Clickhouse20StatementParser extends SQLStatementParser {
super(new Clickhouse20ExprParser(lexer)); super(new Clickhouse20ExprParser(lexer));
} }
@Override @Override
public SQLWithSubqueryClause parseWithQuery() { public SQLWithSubqueryClause parseWithQuery() {
SQLWithSubqueryClause withQueryClause = new SQLWithSubqueryClause(); SQLWithSubqueryClause withQueryClause = new SQLWithSubqueryClause();
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
/** /**
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.visitor; package com.dlink.metadata.visitor;
import com.alibaba.druid.DbType; import com.alibaba.druid.DbType;
......
...@@ -17,10 +17,11 @@ ...@@ -17,10 +17,11 @@
* *
*/ */
package com.dlink.metadata.visitor; package com.dlink.metadata.visitor;
import java.util.ArrayList;
import java.util.List;
import com.alibaba.druid.sql.ast.SQLOrderBy; import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.expr.SQLBetweenExpr; import com.alibaba.druid.sql.ast.expr.SQLBetweenExpr;
import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr;
...@@ -31,9 +32,6 @@ import com.alibaba.druid.sql.ast.statement.SQLSelectItem; ...@@ -31,9 +32,6 @@ import com.alibaba.druid.sql.ast.statement.SQLSelectItem;
import com.alibaba.druid.sql.visitor.ExportParameterVisitor; import com.alibaba.druid.sql.visitor.ExportParameterVisitor;
import com.alibaba.druid.sql.visitor.ExportParameterVisitorUtils; import com.alibaba.druid.sql.visitor.ExportParameterVisitorUtils;
import java.util.ArrayList;
import java.util.List;
public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisitor implements ExportParameterVisitor { public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisitor implements ExportParameterVisitor {
/** /**
......
...@@ -17,17 +17,25 @@ ...@@ -17,17 +17,25 @@
* *
*/ */
package com.dlink.metadata.visitor; package com.dlink.metadata.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement; import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import java.util.List; import java.util.List;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.SQLStructDataType;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
public class Clickhouse20OutputVisitor extends SQLASTOutputVisitor implements Clickhouse20Visitor { public class Clickhouse20OutputVisitor extends SQLASTOutputVisitor implements Clickhouse20Visitor {
public Clickhouse20OutputVisitor(Appendable appender) { public Clickhouse20OutputVisitor(Appendable appender) {
super(appender, DbType.clickhouse); super(appender, DbType.clickhouse);
......
...@@ -17,12 +17,12 @@ ...@@ -17,12 +17,12 @@
* *
*/ */
package com.dlink.metadata.visitor; package com.dlink.metadata.visitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement; import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
public interface Clickhouse20Visitor extends SQLASTVisitor { public interface Clickhouse20Visitor extends SQLASTVisitor {
default boolean visit(Clickhouse20CreateTableStatement x) { default boolean visit(Clickhouse20CreateTableStatement x) {
return true; return true;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata; package com.dlink.metadata;
import com.dlink.metadata.driver.ClickHouseDriver; import com.dlink.metadata.driver.ClickHouseDriver;
...@@ -27,11 +26,11 @@ import com.dlink.metadata.result.JdbcSelectResult; ...@@ -27,11 +26,11 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.Schema; import com.dlink.model.Schema;
import com.dlink.utils.JSONUtil; import com.dlink.utils.JSONUtil;
import com.fasterxml.jackson.databind.util.JSONPObject;
import org.junit.Test;
import java.util.List; import java.util.List;
import org.junit.Test;
/** /**
* ClickhouseTest * ClickhouseTest
* *
...@@ -41,7 +40,7 @@ import java.util.List; ...@@ -41,7 +40,7 @@ import java.util.List;
public class ClickHouseTest { public class ClickHouseTest {
private static final String IP = "127.0.0.1"; private static final String IP = "127.0.0.1";
private static String url="jdbc:clickhouse://"+IP+":8123/default"; private static String url = "jdbc:clickhouse://" + IP + ":8123/default";
private ClickHouseDriver clickHouseDriver = new ClickHouseDriver(); private ClickHouseDriver clickHouseDriver = new ClickHouseDriver();
public Driver getDriver() { public Driver getDriver() {
DriverConfig config = new DriverConfig(); DriverConfig config = new DriverConfig();
...@@ -49,8 +48,6 @@ public class ClickHouseTest { ...@@ -49,8 +48,6 @@ public class ClickHouseTest {
config.setName(clickHouseDriver.getName()); config.setName(clickHouseDriver.getName());
config.setIp(IP); config.setIp(IP);
config.setPort(8123); config.setPort(8123);
// config.setUsername(null);
// config.setPassword(null);
config.setUrl(url); config.setUrl(url);
return Driver.build(config); return Driver.build(config);
} }
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.constant; package com.dlink.metadata.constant;
public interface DorisConstant { public interface DorisConstant {
...@@ -29,7 +28,8 @@ public interface DorisConstant { ...@@ -29,7 +28,8 @@ public interface DorisConstant {
/** /**
* 查询所有schema下的所有表 * 查询所有schema下的所有表
*/ */
String QUERY_TABLE_BY_SCHEMA = " select TABLE_NAME AS `NAME`,TABLE_SCHEMA AS `SCHEMA`,TABLE_COMMENT AS COMMENT, '' as TYPE, '' as CATALOG, '' as ENGINE , '' as OPTIONS , 0 as `ROWS`, null as CREATE_TIME, null as UPDATE_TIME from information_schema.tables where TABLE_SCHEMA = '%s' "; String QUERY_TABLE_BY_SCHEMA = " select TABLE_NAME AS `NAME`,TABLE_SCHEMA AS `SCHEMA`,TABLE_COMMENT AS COMMENT, '' as TYPE, '' as CATALOG, "
+ "'' as ENGINE , '' as OPTIONS , 0 as `ROWS`, null as CREATE_TIME, null as UPDATE_TIME from information_schema.tables where TABLE_SCHEMA = '%s' ";
/** /**
* 查询指定schema.table下的所有列信息 * 查询指定schema.table下的所有列信息
*/ */
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.metadata.convert.DorisTypeConvert; import com.dlink.metadata.convert.DorisTypeConvert;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
import com.dlink.metadata.constant.DorisConstant; import com.dlink.metadata.constant.DorisConstant;
...@@ -46,43 +45,36 @@ public class DorisQuery extends AbstractDBQuery { ...@@ -46,43 +45,36 @@ public class DorisQuery extends AbstractDBQuery {
return "Database"; return "Database";
} }
@Override @Override
public String tableName() { public String tableName() {
return "NAME"; return "NAME";
} }
@Override @Override
public String tableComment() { public String tableComment() {
return "COMMENT"; return "COMMENT";
} }
@Override @Override
public String columnName() { public String columnName() {
return "Field"; return "Field";
} }
@Override @Override
public String columnType() { public String columnType() {
return "Type"; return "Type";
} }
@Override @Override
public String columnComment() { public String columnComment() {
return "Comment"; return "Comment";
} }
@Override @Override
public String columnKey() { public String columnKey() {
return "Key"; return "Key";
} }
public boolean isKeyIdentity(ResultSet results) throws SQLException { public boolean isKeyIdentity(ResultSet results) throws SQLException {
return "auto_increment".equals(results.getString("Extra")); return "auto_increment".equals(results.getString("Extra"));
} }
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata; package com.dlink.metadata;
import com.dlink.metadata.driver.Driver; import com.dlink.metadata.driver.Driver;
...@@ -26,14 +25,15 @@ import com.dlink.metadata.result.JdbcSelectResult; ...@@ -26,14 +25,15 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.Schema; import com.dlink.model.Schema;
import com.dlink.model.Table; import com.dlink.model.Table;
import org.junit.Before;
import org.junit.Test;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import org.junit.Before;
import org.junit.Test;
public class DorisTest { public class DorisTest {
private Driver driver; private Driver driver;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.constant; package com.dlink.metadata.constant;
public interface HiveConstant { public interface HiveConstant {
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
...@@ -31,56 +30,22 @@ import com.dlink.model.Column; ...@@ -31,56 +30,22 @@ import com.dlink.model.Column;
import com.dlink.model.Schema; import com.dlink.model.Schema;
import com.dlink.model.Table; import com.dlink.model.Table;
import com.dlink.utils.LogUtil; import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import java.sql.*; import java.sql.PreparedStatement;
import java.util.*; import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class HiveDriver extends AbstractJdbcDriver implements Driver { public class HiveDriver extends AbstractJdbcDriver implements Driver {
// @Override
// public Table getTable(String schemaName, String tableName) {
// List<Table> tables = listTables(schemaName);
// Table table = null;
// for(Table item : tables){
// if(Asserts.isEquals(item.getName(),tableName)){
// table = item;
// }
// }
// if(Asserts.isNotNull(table)) {
// List<Column> columnList = new ArrayList<>();// 接收排除 Detailed Table Information 之后的 Column对象
// List<Column> columnListWithExt = listColumns(schemaName, table.getName()); //获取所有的 Column对象
//
// Column columnExtInfoToTable = columnListWithExt.get(columnListWithExt.size() - 1); //获取 Detailed Table Information 下方解析该值 并赋值给Table的属性
// String extenedInfo = columnExtInfoToTable.getType(); //获取 Detailed Table Information 的值
// /**
// * 解析 Detailed Table Information 开始
// */
//
// System.out.println(extenedInfo);
//
// /**
// * 解析 Detailed Table Information 结束
// */
//
//
// for (int i = 0; i < columnListWithExt.size(); i++) {
// Column columnExt = columnListWithExt.get(i);
// if (!columnExt.getName().contains(HiveConstant.DETAILED_TABLE_INFO)){// 排除 Detailed Table Information
// Column columnBean = new Column();
// columnBean.setName(columnExt.getName());
// columnBean.setType(columnExt.getType());
// columnBean.setComment(columnExt.getComment());
// columnList.add(columnBean);
// }
// }
// table.setColumns(columnList);
// }
// return table;
// }
@Override @Override
public Table getTable(String schemaName, String tableName) { public Table getTable(String schemaName, String tableName) {
List<Table> tables = listTables(schemaName); List<Table> tables = listTables(schemaName);
...@@ -240,7 +205,6 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver { ...@@ -240,7 +205,6 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
return createTable.toString(); return createTable.toString();
} }
@Override @Override
public int executeUpdate(String sql) throws Exception { public int executeUpdate(String sql) throws Exception {
Asserts.checkNullString(sql, "Sql 语句为空"); Asserts.checkNullString(sql, "Sql 语句为空");
...@@ -331,7 +295,6 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver { ...@@ -331,7 +295,6 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
return "Hive"; return "Hive";
} }
@Override @Override
public Map<String, String> getFlinkColumnTypeConversion() { public Map<String, String> getFlinkColumnTypeConversion() {
HashMap<String, String> map = new HashMap<>(); HashMap<String, String> map = new HashMap<>();
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
import com.dlink.metadata.constant.HiveConstant; import com.dlink.metadata.constant.HiveConstant;
...@@ -28,7 +27,6 @@ public class HiveQuery extends AbstractDBQuery { ...@@ -28,7 +27,6 @@ public class HiveQuery extends AbstractDBQuery {
return HiveConstant.QUERY_ALL_DATABASE; return HiveConstant.QUERY_ALL_DATABASE;
} }
@Override @Override
public String tablesSql(String schemaName) { public String tablesSql(String schemaName) {
return HiveConstant.QUERY_ALL_TABLES_BY_SCHEMA; return HiveConstant.QUERY_ALL_TABLES_BY_SCHEMA;
...@@ -59,19 +57,16 @@ public class HiveQuery extends AbstractDBQuery { ...@@ -59,19 +57,16 @@ public class HiveQuery extends AbstractDBQuery {
return "comment"; return "comment";
} }
@Override @Override
public String columnName() { public String columnName() {
return "col_name"; return "col_name";
} }
@Override @Override
public String columnType() { public String columnType() {
return "data_type"; return "data_type";
} }
@Override @Override
public String columnComment() { public String columnComment() {
return "comment"; return "comment";
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata; package com.dlink.metadata;
import com.dlink.metadata.driver.Driver; import com.dlink.metadata.driver.Driver;
...@@ -26,13 +25,14 @@ import com.dlink.metadata.result.JdbcSelectResult; ...@@ -26,13 +25,14 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.Schema; import com.dlink.model.Schema;
import com.dlink.model.Table; import com.dlink.model.Table;
import org.junit.Test;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.junit.Test;
/** /**
* MysqlTest * MysqlTest
* *
...@@ -46,9 +46,9 @@ public class HiveTest { ...@@ -46,9 +46,9 @@ public class HiveTest {
private static final String hiveDB = "test"; private static final String hiveDB = "test";
private static final String username = "zhumingye"; private static final String username = "zhumingye";
private static final String passwd = "123456"; private static final String passwd = "123456";
private static final String hive="Hive"; private static final String hive = "Hive";
private static String url = "jdbc:hive2://"+IP+":"+PORT+"/"+hiveDB; private static String url = "jdbc:hive2://" + IP + ":" + PORT + "/" + hiveDB;
public Driver getDriver() { public Driver getDriver() {
DriverConfig config = new DriverConfig(); DriverConfig config = new DriverConfig();
...@@ -82,13 +82,11 @@ public class HiveTest { ...@@ -82,13 +82,11 @@ public class HiveTest {
Driver driver = getDriver(); Driver driver = getDriver();
List<Schema> schemasAndTables = driver.listSchemas(); List<Schema> schemasAndTables = driver.listSchemas();
schemasAndTables.forEach(schema -> { schemasAndTables.forEach(schema -> {
System.out.println(schema.getName()+"\t\t"+schema.getTables().toString()); System.out.println(schema.getName() + "\t\t" + schema.getTables().toString());
}); });
System.err.println("end..."); System.err.println("end...");
} }
@Test @Test
public void getTablesByDBTest() throws Exception { public void getTablesByDBTest() throws Exception {
Driver driver = getDriver(); Driver driver = getDriver();
...@@ -103,9 +101,9 @@ public class HiveTest { ...@@ -103,9 +101,9 @@ public class HiveTest {
@Test @Test
public void getColumnsByTableTest() { public void getColumnsByTableTest() {
Driver driver = getDriver(); Driver driver = getDriver();
List<Column> columns= driver.listColumns(hiveDB, "biz_college_planner_mysql_language_score_item"); List<Column> columns = driver.listColumns(hiveDB, "biz_college_planner_mysql_language_score_item");
for (Column column : columns) { for (Column column : columns) {
System.out.println(column.getName()+" \t "+column.getType()+" \t "+column.getComment()); System.out.println(column.getName() + " \t " + column.getType() + " \t " + column.getComment());
} }
System.err.println("end..."); System.err.println("end...");
} }
...@@ -113,20 +111,18 @@ public class HiveTest { ...@@ -113,20 +111,18 @@ public class HiveTest {
@Test @Test
public void getCreateTableTest() throws Exception { public void getCreateTableTest() throws Exception {
Driver driver = getDriver(); Driver driver = getDriver();
// JdbcSelectResult jdbcSelectResult = driver.executeSql("show create table odsp.biz_college_planner_mysql_language_score_item", 1);
Table driverTable = driver.getTable(hiveDB, "biz_college_planner_mysql_language_score_item"); Table driverTable = driver.getTable(hiveDB, "biz_college_planner_mysql_language_score_item");
String createTableSql = driver.getCreateTableSql(driverTable); String createTableSql = driver.getCreateTableSql(driverTable);
System.out.println(createTableSql); System.out.println(createTableSql);
System.err.println("end..."); System.err.println("end...");
} }
@Test @Test
public void getTableExtenedInfoTest() throws Exception { public void getTableExtenedInfoTest() throws Exception {
Driver driver = getDriver(); Driver driver = getDriver();
Table driverTable = driver.getTable(hiveDB, "employees"); Table driverTable = driver.getTable(hiveDB, "employees");
for (Column column : driverTable.getColumns()) { for (Column column : driverTable.getColumns()) {
System.out.println(column.getName()+"\t\t"+column.getType()+"\t\t"+column.getComment()); System.out.println(column.getName() + "\t\t" + column.getType() + "\t\t" + column.getComment());
} }
} }
...@@ -138,23 +134,23 @@ public class HiveTest { ...@@ -138,23 +134,23 @@ public class HiveTest {
* @return: * @return:
*/ */
@Test @Test
public void MultipleSQLTest() throws Exception { public void multipleSQLTest() throws Exception {
Driver driver = getDriver(); Driver driver = getDriver();
String sql ="select\n" + String sql = "select\n"
" date_format(create_time,'yyyy-MM') as pay_success_time,\n" + + " date_format(create_time,'yyyy-MM') as pay_success_time,\n"
" sum(pay_amount)/100 as amount\n" + + " sum(pay_amount)/100 as amount\n"
"from\n" + + "from\n"
" odsp.pub_pay_mysql_pay_order\n" + + " odsp.pub_pay_mysql_pay_order\n"
" group by date_format(create_time,'yyyy-MM') ;\n" + + " group by date_format(create_time,'yyyy-MM') ;\n"
"select\n" + + "select\n"
" *\n" + + " *\n"
"from\n" + + "from\n"
" odsp.pub_pay_mysql_pay_order ;"; + " odsp.pub_pay_mysql_pay_order ;";
JdbcSelectResult selectResult = driver.executeSql(sql,100); JdbcSelectResult selectResult = driver.executeSql(sql,100);
for (LinkedHashMap<String, Object> rowDatum : selectResult.getRowData()) { for (LinkedHashMap<String, Object> rowDatum : selectResult.getRowData()) {
Set<Map.Entry<String, Object>> entrySet = rowDatum.entrySet(); Set<Map.Entry<String, Object>> entrySet = rowDatum.entrySet();
for (Map.Entry<String, Object> stringObjectEntry : entrySet) { for (Map.Entry<String, Object> stringObjectEntry : entrySet) {
System.out.println(stringObjectEntry.getKey()+"\t\t"+stringObjectEntry.getValue()); System.out.println(stringObjectEntry.getKey() + "\t\t" + stringObjectEntry.getValue());
} }
} }
} }
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.metadata.convert.ITypeConvert; import com.dlink.metadata.convert.ITypeConvert;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
/** /**
...@@ -35,19 +34,19 @@ public class MySqlQuery extends AbstractDBQuery { ...@@ -35,19 +34,19 @@ public class MySqlQuery extends AbstractDBQuery {
@Override @Override
public String tablesSql(String schemaName) { public String tablesSql(String schemaName) {
return "select TABLE_NAME AS `NAME`,TABLE_SCHEMA AS `Database`,TABLE_COMMENT AS COMMENT,TABLE_CATALOG AS `CATALOG`" + return "select TABLE_NAME AS `NAME`,TABLE_SCHEMA AS `Database`,TABLE_COMMENT AS COMMENT,TABLE_CATALOG AS `CATALOG`"
",TABLE_TYPE AS `TYPE`,ENGINE AS `ENGINE`,CREATE_OPTIONS AS `OPTIONS`,TABLE_ROWS AS `ROWS`" + + ",TABLE_TYPE AS `TYPE`,ENGINE AS `ENGINE`,CREATE_OPTIONS AS `OPTIONS`,TABLE_ROWS AS `ROWS`"
",CREATE_TIME,UPDATE_TIME from information_schema.tables" + + ",CREATE_TIME,UPDATE_TIME from information_schema.tables"
" where TABLE_SCHEMA = '" + schemaName + "'"; + " where TABLE_SCHEMA = '" + schemaName + "'";
} }
@Override @Override
public String columnsSql(String schemaName, String tableName) { public String columnsSql(String schemaName, String tableName) {
return "select COLUMN_NAME,DATA_TYPE,COLUMN_COMMENT,COLUMN_KEY,EXTRA AS AUTO_INCREMENT" + return "select COLUMN_NAME,DATA_TYPE,COLUMN_COMMENT,COLUMN_KEY,EXTRA AS AUTO_INCREMENT"
",COLUMN_DEFAULT,IS_NULLABLE,NUMERIC_PRECISION,NUMERIC_SCALE,CHARACTER_SET_NAME" + + ",COLUMN_DEFAULT,IS_NULLABLE,NUMERIC_PRECISION,NUMERIC_SCALE,CHARACTER_SET_NAME"
",COLLATION_NAME,ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS " + + ",COLLATION_NAME,ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS "
"where TABLE_SCHEMA = '" + schemaName + "' and TABLE_NAME = '" + tableName + "' " + + "where TABLE_SCHEMA = '" + schemaName + "' and TABLE_NAME = '" + tableName + "' "
"order by ORDINAL_POSITION"; + "order by ORDINAL_POSITION";
} }
@Override @Override
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata; package com.dlink.metadata;
import com.dlink.metadata.driver.Driver; import com.dlink.metadata.driver.Driver;
...@@ -25,11 +24,12 @@ import com.dlink.metadata.driver.DriverConfig; ...@@ -25,11 +24,12 @@ import com.dlink.metadata.driver.DriverConfig;
import com.dlink.metadata.result.JdbcSelectResult; import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.Schema; import com.dlink.model.Schema;
import org.junit.Test;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import org.junit.Test;
/** /**
* MysqlTest * MysqlTest
* *
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
/** /**
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata; package com.dlink.metadata;
import com.dlink.metadata.driver.Driver; import com.dlink.metadata.driver.Driver;
...@@ -25,11 +24,12 @@ import com.dlink.metadata.driver.DriverConfig; ...@@ -25,11 +24,12 @@ import com.dlink.metadata.driver.DriverConfig;
import com.dlink.metadata.result.JdbcSelectResult; import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.Schema; import com.dlink.model.Schema;
import org.junit.Test;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import org.junit.Test;
/** /**
* OracleTest * OracleTest
* *
......
...@@ -17,22 +17,18 @@ ...@@ -17,22 +17,18 @@
* *
*/ */
package com.dlink.metadata.constant; package com.dlink.metadata.constant;
/** /**
* @author lcg * phoenix constant
* @operate Phoenix常量
* @date 2022/2/16 14:19
* @return
*/ */
public interface PhoenixConstant { public interface PhoenixConstant {
/** /**
* 不指定schema列信息模板SQL * 不指定schema列信息模板SQL
*/ */
String QUERY_COLUMNS_SQL_DEFAULT = " select COLUMN_NAME,COLUMN_FAMILY,DATA_TYPE,KEY_SEQ,NULLABLE, '' as CHARACTER_SET_NAME," + String QUERY_COLUMNS_SQL_DEFAULT = " select COLUMN_NAME,COLUMN_FAMILY,DATA_TYPE,KEY_SEQ,NULLABLE, '' as CHARACTER_SET_NAME,"
" '' as COLLATION_NAME ,'' as ORDINAL_POSITION , 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, '' as AUTO_INCREMENT from SYSTEM.CATALOG where TABLE_NAME='%s' and COLUMN_NAME is not null "; + " '' as COLLATION_NAME ,'' as ORDINAL_POSITION , 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, '' as AUTO_INCREMENT from SYSTEM.CATALOG where TABLE_NAME='%s' and COLUMN_NAME is not null ";
/** /**
* 查询默认指定列信息模板SQL * 查询默认指定列信息模板SQL
*/ */
...@@ -46,7 +42,9 @@ public interface PhoenixConstant { ...@@ -46,7 +42,9 @@ public interface PhoenixConstant {
/** /**
* 不指定schema查询table信息模板SQL * 不指定schema查询table信息模板SQL
*/ */
String QUERY_TABLE_BY_SCHEMA_SQL_DEFAULT = " select TABLE_NAME,TABLE_SCHEM,TABLE_TYPE,SCOPE_CATALOG as CATALOG,'' as ENGINE,'' as OPTIONS, 0 as ROWSNUM, null as CREATE_TIME, null as UPDATE_TIME from SYSTEM.CATALOG where TABLE_TYPE in ('u','v') "; String QUERY_TABLE_BY_SCHEMA_SQL_DEFAULT =
" select TABLE_NAME,TABLE_SCHEM,TABLE_TYPE,SCOPE_CATALOG as CATALOG,'' as ENGINE,'' as OPTIONS, 0 as ROWSNUM, null as CREATE_TIME, null as UPDATE_TIME "
+ "from SYSTEM.CATALOG where TABLE_TYPE in ('u','v') ";
/** /**
* 根据schema查询table信息模板SQL * 根据schema查询table信息模板SQL
*/ */
......
...@@ -17,14 +17,10 @@ ...@@ -17,14 +17,10 @@
* *
*/ */
package com.dlink.metadata.constant; package com.dlink.metadata.constant;
/** /**
* @author lcg * Phoenix common data types and corresponding codes
* @operate Phoenix常用数据类型及对应code
* @date 2022/2/16 16:49
* @return
*/ */
public enum PhoenixEnum { public enum PhoenixEnum {
INTEGER(4), INTEGER(4),
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
...@@ -33,8 +32,8 @@ public class PhoenixTypeConvert implements ITypeConvert { ...@@ -33,8 +32,8 @@ public class PhoenixTypeConvert implements ITypeConvert {
} }
String t = column.getType().toLowerCase(); String t = column.getType().toLowerCase();
boolean isNullable = !column.isKeyFlag() && column.isNullable(); boolean isNullable = !column.isKeyFlag() && column.isNullable();
if (t.contains("char") || t.contains("varchar") || t.contains("text") || if (t.contains("char") || t.contains("varchar") || t.contains("text")
t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext") || t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext")
|| t.contains("uniqueidentifier") || t.contains("sql_variant")) { || t.contains("uniqueidentifier") || t.contains("sql_variant")) {
columnType = ColumnType.STRING; columnType = ColumnType.STRING;
} else if (t.contains("bigint")) { } else if (t.contains("bigint")) {
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.metadata.constant.PhoenixConstant; import com.dlink.metadata.constant.PhoenixConstant;
...@@ -28,21 +27,15 @@ import com.dlink.metadata.query.PhoenixQuery; ...@@ -28,21 +27,15 @@ import com.dlink.metadata.query.PhoenixQuery;
import com.dlink.metadata.result.JdbcSelectResult; import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.Table; import com.dlink.model.Table;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import javax.sql.DataSource;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DriverManager; import java.sql.DriverManager;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
/**
* @author lcg
* @operate
* @date 2022/2/16 16:50
* @return
*/
public class PhoenixDriver extends AbstractJdbcDriver { public class PhoenixDriver extends AbstractJdbcDriver {
@Override @Override
public IDBQuery getDBQuery() { public IDBQuery getDBQuery() {
......
...@@ -17,18 +17,10 @@ ...@@ -17,18 +17,10 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
import com.dlink.metadata.constant.PhoenixConstant; import com.dlink.metadata.constant.PhoenixConstant;
/**
* @author lcg
* @operate
* @date 2022/2/16 14:39
* @return
*/
public class PhoenixQuery extends AbstractDBQuery { public class PhoenixQuery extends AbstractDBQuery {
@Override @Override
...@@ -72,31 +64,26 @@ public class PhoenixQuery extends AbstractDBQuery { ...@@ -72,31 +64,26 @@ public class PhoenixQuery extends AbstractDBQuery {
return "TABLE_NAME"; return "TABLE_NAME";
} }
@Override @Override
public String columnName() { public String columnName() {
return "COLUMN_NAME"; return "COLUMN_NAME";
} }
@Override @Override
public String columnType() { public String columnType() {
return "DATA_TYPE"; return "DATA_TYPE";
} }
@Override @Override
public String columnComment() { public String columnComment() {
return "COLUMN_NAME"; return "COLUMN_NAME";
} }
@Override @Override
public String columnKey() { public String columnKey() {
return "KEY_SEQ"; return "KEY_SEQ";
} }
public String isNullable() { public String isNullable() {
return "NULLABLE"; return "NULLABLE";
} }
...@@ -105,4 +92,5 @@ public class PhoenixQuery extends AbstractDBQuery { ...@@ -105,4 +92,5 @@ public class PhoenixQuery extends AbstractDBQuery {
public String rows() { public String rows() {
return "ROWSNUM"; return "ROWSNUM";
} }
} }
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata; package com.dlink.metadata;
import com.dlink.metadata.driver.Driver; import com.dlink.metadata.driver.Driver;
...@@ -26,12 +25,13 @@ import com.dlink.metadata.result.JdbcSelectResult; ...@@ -26,12 +25,13 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.Schema; import com.dlink.model.Schema;
import com.dlink.model.Table; import com.dlink.model.Table;
import org.junit.Before;
import org.junit.Test;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import org.junit.Before;
import org.junit.Test;
public class PhoenixTest { public class PhoenixTest {
private Driver driver; private Driver driver;
...@@ -49,7 +49,6 @@ public class PhoenixTest { ...@@ -49,7 +49,6 @@ public class PhoenixTest {
} }
} }
@Test @Test
public void testSchema() { public void testSchema() {
//schema && table //schema && table
...@@ -70,7 +69,6 @@ public class PhoenixTest { ...@@ -70,7 +69,6 @@ public class PhoenixTest {
} }
} }
@Test @Test
public void testColumns() { public void testColumns() {
// columns // columns
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.metadata.convert.ITypeConvert; import com.dlink.metadata.convert.ITypeConvert;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
/** /**
...@@ -39,9 +38,10 @@ public class PostgreSqlQuery extends AbstractDBQuery { ...@@ -39,9 +38,10 @@ public class PostgreSqlQuery extends AbstractDBQuery {
@Override @Override
public String columnsSql(String schemaName, String tableName) { public String columnsSql(String schemaName, String tableName) {
return "SELECT A.attname AS name,format_type (A.atttypid,A.atttypmod) AS type,col_description (A.attrelid,A.attnum) AS comment,\n" + return "SELECT A.attname AS name,format_type (A.atttypid,A.atttypmod) AS type,col_description (A.attrelid,A.attnum) AS comment,\n"
"(CASE WHEN (SELECT COUNT (*) FROM pg_constraint AS PC WHERE A.attnum = PC.conkey[1] AND PC.contype = 'p') > 0 THEN 'PRI' ELSE '' END) AS key \n" + + "(CASE WHEN (SELECT COUNT (*) FROM pg_constraint AS PC WHERE A.attnum = PC.conkey[1] AND PC.contype = 'p') > 0 THEN 'PRI' ELSE '' END) AS key \n"
"FROM pg_class AS C,pg_attribute AS A WHERE A.attrelid='" + schemaName + "." + tableName + "'::regclass AND A.attrelid= C.oid AND A.attnum> 0 AND NOT A.attisdropped ORDER BY A.attnum"; + "FROM pg_class AS C,pg_attribute AS A WHERE A.attrelid='" + schemaName + "." + tableName
+ "'::regclass AND A.attrelid= C.oid AND A.attnum> 0 AND NOT A.attisdropped ORDER BY A.attnum";
} }
@Override @Override
......
...@@ -17,13 +17,10 @@ ...@@ -17,13 +17,10 @@
* *
*/ */
package com.dlink.metadata.constant; package com.dlink.metadata.constant;
/** /**
* @operate sqlServer常量 * SqlServer constant
* @date 2022/1/26 14:11
* @return
*/ */
public interface SqlServerConstant { public interface SqlServerConstant {
...@@ -35,14 +32,17 @@ public interface SqlServerConstant { ...@@ -35,14 +32,17 @@ public interface SqlServerConstant {
/** /**
* 查询列信息模板SQL * 查询列信息模板SQL
*/ */
String QUERY_COLUMNS_SQL = " SELECT cast(a.name AS VARCHAR(500)) AS TABLE_NAME,cast(b.name AS VARCHAR(500)) AS COLUMN_NAME, isnull(CAST ( c.VALUE AS NVARCHAR ( 500 ) ),'') AS COMMENTS, " + String QUERY_COLUMNS_SQL = " SELECT cast(a.name AS VARCHAR(500)) AS TABLE_NAME,cast(b.name AS VARCHAR(500)) AS COLUMN_NAME, isnull(CAST ( c.VALUE AS NVARCHAR ( 500 ) ),'') AS COMMENTS, "
" CASE b.is_nullable WHEN 1 THEN 'YES' ELSE 'NO' END as NULLVALUE,cast(sys.types.name AS VARCHAR (500)) AS DATA_TYPE," + + " CASE b.is_nullable WHEN 1 THEN 'YES' ELSE 'NO' END as NULLVALUE,cast(sys.types.name AS VARCHAR (500)) AS DATA_TYPE,"
" ( SELECT CASE count(1) WHEN 1 then 'PRI' ELSE '' END FROM syscolumns,sysobjects,sysindexes,sysindexkeys,systypes WHERE syscolumns.xusertype = systypes.xusertype " + + " ( SELECT CASE count(1) WHEN 1 then 'PRI' ELSE '' END FROM syscolumns,sysobjects,sysindexes,sysindexkeys,systypes WHERE syscolumns.xusertype = systypes.xusertype "
" AND syscolumns.id = object_id (a.name) AND sysobjects.xtype = 'PK' AND sysobjects.parent_obj = syscolumns.id " + + " AND syscolumns.id = object_id (a.name) AND sysobjects.xtype = 'PK' AND sysobjects.parent_obj = syscolumns.id "
" AND sysindexes.id = syscolumns.id AND sysobjects.name = sysindexes.name AND sysindexkeys.id = syscolumns.id AND sysindexkeys.indid = sysindexes.indid AND syscolumns.colid = sysindexkeys.colid " + + " AND sysindexes.id = syscolumns.id AND sysobjects.name = sysindexes.name AND sysindexkeys.id = syscolumns.id AND sysindexkeys.indid = sysindexes.indid "
" AND syscolumns.name = b.name) as 'KEY', b.is_identity isIdentity , '' as CHARACTER_SET_NAME, '' as COLLATION_NAME, 0 as ORDINAL_POSITION, 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, '' as AUTO_INCREMENT FROM ( select name,object_id from sys.tables UNION all select name,object_id from sys.views ) a INNER JOIN sys.columns b " + + "AND syscolumns.colid = sysindexkeys.colid "
" ON b.object_id = a.object_id LEFT JOIN sys.types ON b.user_type_id = sys.types.user_type_id LEFT JOIN sys.extended_properties c ON c.major_id = b.object_id AND c.minor_id = b.column_id " + + " AND syscolumns.name = b.name) as 'KEY', b.is_identity isIdentity , '' as CHARACTER_SET_NAME, '' as COLLATION_NAME, 0 as ORDINAL_POSITION, 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, "
" WHERE a.name = '%s' and sys.types.name !='sysname' "; + "'' as AUTO_INCREMENT "
+ "FROM ( select name,object_id from sys.tables UNION all select name,object_id from sys.views ) a INNER JOIN sys.columns b "
+ " ON b.object_id = a.object_id LEFT JOIN sys.types ON b.user_type_id = sys.types.user_type_id LEFT JOIN sys.extended_properties c ON c.major_id = b.object_id "
+ "AND c.minor_id = b.column_id WHERE a.name = '%s' and sys.types.name !='sysname' ";
/** /**
* 查询schema模板SQL * 查询schema模板SQL
...@@ -52,5 +52,7 @@ public interface SqlServerConstant { ...@@ -52,5 +52,7 @@ public interface SqlServerConstant {
/** /**
* 根据schema查询table信息模板SQL * 根据schema查询table信息模板SQL
*/ */
String QUERY_TABLE_BY_SCHEMA_SQL = " SELECT table_name ,table_schema, '' as type, '' as CATALOG, '' as ENGINE , '' as OPTIONS ,0 as rows , null as CREATE_TIME, null as UPDATE_TIME,null AS COMMENTS FROM INFORMATION_SCHEMA.tables WHERE TABLE_SCHEMA = '%s' "; String QUERY_TABLE_BY_SCHEMA_SQL =
" SELECT table_name ,table_schema, '' as type, '' as CATALOG, '' as ENGINE , '' as OPTIONS ,0 as rows , null as CREATE_TIME, null as UPDATE_TIME,null AS COMMENTS "
+ "FROM INFORMATION_SCHEMA.tables WHERE TABLE_SCHEMA = '%s' ";
} }
...@@ -17,18 +17,12 @@ ...@@ -17,18 +17,12 @@
* *
*/ */
package com.dlink.metadata.convert; package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts; import com.dlink.assertion.Asserts;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.ColumnType; import com.dlink.model.ColumnType;
/**
* @operate
* @date 2022/1/26 14:23
* @return
*/
public class SqlServerTypeConvert implements ITypeConvert { public class SqlServerTypeConvert implements ITypeConvert {
@Override @Override
public ColumnType convert(Column column) { public ColumnType convert(Column column) {
...@@ -38,8 +32,8 @@ public class SqlServerTypeConvert implements ITypeConvert { ...@@ -38,8 +32,8 @@ public class SqlServerTypeConvert implements ITypeConvert {
} }
String t = column.getType().toLowerCase(); String t = column.getType().toLowerCase();
boolean isNullable = !column.isKeyFlag() && column.isNullable(); boolean isNullable = !column.isKeyFlag() && column.isNullable();
if (t.contains("char") || t.contains("varchar") || t.contains("text") || if (t.contains("char") || t.contains("varchar") || t.contains("text")
t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext") || t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext")
|| t.contains("uniqueidentifier") || t.contains("sql_variant")) { || t.contains("uniqueidentifier") || t.contains("sql_variant")) {
columnType = ColumnType.STRING; columnType = ColumnType.STRING;
} else if (t.contains("bigint")) { } else if (t.contains("bigint")) {
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.driver; package com.dlink.metadata.driver;
import com.dlink.metadata.constant.SqlServerConstant; import com.dlink.metadata.constant.SqlServerConstant;
...@@ -33,12 +32,6 @@ import java.util.HashMap; ...@@ -33,12 +32,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
/**
* @author lcg
* @operate
* @date 2022/1/26 14:23
* @return
*/
public class SqlServerDriver extends AbstractJdbcDriver { public class SqlServerDriver extends AbstractJdbcDriver {
@Override @Override
public IDBQuery getDBQuery() { public IDBQuery getDBQuery() {
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata.query; package com.dlink.metadata.query;
import com.dlink.metadata.constant.SqlServerConstant; import com.dlink.metadata.constant.SqlServerConstant;
...@@ -25,12 +24,6 @@ import com.dlink.metadata.constant.SqlServerConstant; ...@@ -25,12 +24,6 @@ import com.dlink.metadata.constant.SqlServerConstant;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
/**
* @author lcg
* @operate
* @date 2022/1/26 15:42
* @return
*/
public class SqlServerQuery extends AbstractDBQuery { public class SqlServerQuery extends AbstractDBQuery {
@Override @Override
...@@ -68,31 +61,26 @@ public class SqlServerQuery extends AbstractDBQuery { ...@@ -68,31 +61,26 @@ public class SqlServerQuery extends AbstractDBQuery {
return "COMMENTS"; return "COMMENTS";
} }
@Override @Override
public String columnName() { public String columnName() {
return "COLUMN_NAME"; return "COLUMN_NAME";
} }
@Override @Override
public String columnType() { public String columnType() {
return "DATA_TYPE"; return "DATA_TYPE";
} }
@Override @Override
public String columnComment() { public String columnComment() {
return "COMMENTS"; return "COMMENTS";
} }
@Override @Override
public String columnKey() { public String columnKey() {
return "KEY"; return "KEY";
} }
public boolean isKeyIdentity(ResultSet results) throws SQLException { public boolean isKeyIdentity(ResultSet results) throws SQLException {
return 1 == results.getInt("isIdentity"); return 1 == results.getInt("isIdentity");
} }
...@@ -100,4 +88,5 @@ public class SqlServerQuery extends AbstractDBQuery { ...@@ -100,4 +88,5 @@ public class SqlServerQuery extends AbstractDBQuery {
public String isNullable() { public String isNullable() {
return "NULLVALUE"; return "NULLVALUE";
} }
} }
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
* *
*/ */
package com.dlink.metadata; package com.dlink.metadata;
import com.dlink.metadata.driver.Driver; import com.dlink.metadata.driver.Driver;
...@@ -26,14 +25,15 @@ import com.dlink.metadata.result.JdbcSelectResult; ...@@ -26,14 +25,15 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column; import com.dlink.model.Column;
import com.dlink.model.Schema; import com.dlink.model.Schema;
import com.dlink.model.Table; import com.dlink.model.Table;
import org.junit.Before;
import org.junit.Test;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import org.junit.Before;
import org.junit.Test;
public class SqlServerTest { public class SqlServerTest {
private Driver driver; private Driver driver;
...@@ -98,5 +98,4 @@ public class SqlServerTest { ...@@ -98,5 +98,4 @@ public class SqlServerTest {
} }
} }
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment