Unverified Commit 72412ddc authored by Kerwin's avatar Kerwin Committed by GitHub

Added dlink-metadata module code style. (#921)

parent bf3f6795
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts;
......
......@@ -17,13 +17,8 @@
*
*/
package com.dlink.metadata.driver;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.dlink.assertion.Asserts;
import com.dlink.constant.CommonConstant;
import com.dlink.metadata.query.IDBQuery;
......@@ -33,11 +28,27 @@ import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.result.SqlExplainResult;
import com.dlink.utils.LogUtil;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.util.*;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
/**
* AbstractJdbcDriver
......@@ -396,7 +407,7 @@ public abstract class AbstractJdbcDriver extends AbstractDriver {
public boolean execute(String sql) throws Exception {
Asserts.checkNullString(sql, "Sql 语句为空");
try (Statement statement = conn.get().createStatement()) {
// logger.info("执行sql的连接id:" + ((DruidPooledConnection) conn).getTransactionInfo().getId());
//logger.info("执行sql的连接id:" + ((DruidPooledConnection) conn).getTransactionInfo().getId());
statement.execute(sql);
}
return true;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts;
......@@ -28,7 +27,6 @@ import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.result.SqlExplainResult;
import javax.sql.DataSource;
import java.util.List;
import java.util.Map;
import java.util.Optional;
......@@ -81,7 +79,6 @@ public interface Driver {
}
}
Driver setDriverConfig(DriverConfig config);
boolean canHandle(String type);
......
......@@ -17,16 +17,15 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts;
import lombok.Getter;
import lombok.Setter;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
/**
* DriverConfig
*
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import java.util.Map;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.query;
/**
......@@ -28,7 +27,6 @@ package com.dlink.metadata.query;
**/
public abstract class AbstractDBQuery implements IDBQuery {
@Override
public String createTableSql(String schemaName, String tableName) {
return "show create table " + schemaName + "." + tableName;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.query;
/**
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.result;
import com.dlink.result.AbstractResult;
......@@ -42,9 +41,11 @@ public class JdbcSelectResult extends AbstractResult implements IResult {
private Integer limit;
private static final String STATUS = "status";
private static final List<String> STATUS_COLUMN = new ArrayList<String>() {{
private static final List<String> STATUS_COLUMN = new ArrayList<String>() {
{
add("status");
}};
}
};
public JdbcSelectResult() {
}
......
......@@ -17,9 +17,11 @@
*
*/
package com.dlink.metadata.ast;
import java.util.ArrayList;
import java.util.List;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLOrderBy;
......@@ -28,9 +30,6 @@ import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.ClickhouseVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import java.util.ArrayList;
import java.util.List;
public class Clickhouse20CreateTableStatement extends SQLCreateTableStatement {
protected final List<SQLAssignItem> settings = new ArrayList<SQLAssignItem>();
private SQLOrderBy orderBy;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
......
......@@ -17,15 +17,8 @@
*
*/
package com.dlink.metadata.driver;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.SQLDropTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.Token;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import com.dlink.metadata.convert.ClickHouseTypeConvert;
import com.dlink.metadata.convert.ITypeConvert;
......@@ -36,7 +29,8 @@ import com.dlink.model.Table;
import com.dlink.result.SqlExplainResult;
import com.dlink.utils.LogUtil;
import java.sql.*;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
......@@ -44,6 +38,13 @@ import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.SQLDropTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.Token;
/**
* ClickHouseDriver
*
......@@ -54,7 +55,6 @@ public class ClickHouseDriver extends AbstractJdbcDriver {
@Override
String getDriverClass() {
// return "com.clickhouse.jdbc.ClickHouseDriver";
return "ru.yandex.clickhouse.ClickHouseDriver";
}
......
......@@ -17,9 +17,10 @@
*
*/
package com.dlink.metadata.parser;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
......@@ -28,7 +29,6 @@ import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
public class Clickhouse20CreateTableParser extends SQLCreateTableParser {
public Clickhouse20CreateTableParser(SQLExprParser exprParser) {
......
......@@ -17,9 +17,10 @@
*
*/
package com.dlink.metadata.parser;
import java.util.Arrays;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.expr.SQLArrayExpr;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
......@@ -29,15 +30,12 @@ import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
import java.util.Arrays;
public class Clickhouse20ExprParser extends SQLExprParser {
private final static String[] AGGREGATE_FUNCTIONS;
private final static long[] AGGREGATE_FUNCTIONS_CODES;
private static final String[] AGGREGATE_FUNCTIONS;
private static final long[] AGGREGATE_FUNCTIONS_CODES;
static {
String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER",
"ROWNUMBER"};
String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER", "ROWNUMBER"};
AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true);
AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length];
for (String str : strings) {
......
......@@ -17,20 +17,19 @@
*
*/
package com.dlink.metadata.parser;
import java.util.HashMap;
import java.util.Map;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class Clickhouse20Lexer extends Lexer {
public final static Keywords DEFAULT_KEYWORDS;
public static final Keywords DEFAULT_KEYWORDS;
static {
Map<String, Token> map = new HashMap<String, Token>();
......
......@@ -17,11 +17,14 @@
*
*/
package com.dlink.metadata.parser;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.parser.*;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.sql.parser.Token;
public class Clickhouse20StatementParser extends SQLStatementParser {
public Clickhouse20StatementParser(String sql) {
......@@ -36,7 +39,6 @@ public class Clickhouse20StatementParser extends SQLStatementParser {
super(new Clickhouse20ExprParser(lexer));
}
@Override
public SQLWithSubqueryClause parseWithQuery() {
SQLWithSubqueryClause withQueryClause = new SQLWithSubqueryClause();
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.visitor;
import com.alibaba.druid.DbType;
......
......@@ -17,10 +17,11 @@
*
*/
package com.dlink.metadata.visitor;
import java.util.ArrayList;
import java.util.List;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.expr.SQLBetweenExpr;
import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr;
......@@ -31,9 +32,6 @@ import com.alibaba.druid.sql.ast.statement.SQLSelectItem;
import com.alibaba.druid.sql.visitor.ExportParameterVisitor;
import com.alibaba.druid.sql.visitor.ExportParameterVisitorUtils;
import java.util.ArrayList;
import java.util.List;
public class Clickhouse20ExportParameterVisitor extends Clickhouse20OutputVisitor implements ExportParameterVisitor {
/**
......
......@@ -17,17 +17,25 @@
*
*/
package com.dlink.metadata.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import java.util.List;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.SQLStructDataType;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
public class Clickhouse20OutputVisitor extends SQLASTOutputVisitor implements Clickhouse20Visitor {
public Clickhouse20OutputVisitor(Appendable appender) {
super(appender, DbType.clickhouse);
......
......@@ -17,12 +17,12 @@
*
*/
package com.dlink.metadata.visitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import com.dlink.metadata.ast.Clickhouse20CreateTableStatement;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
public interface Clickhouse20Visitor extends SQLASTVisitor {
default boolean visit(Clickhouse20CreateTableStatement x) {
return true;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata;
import com.dlink.metadata.driver.ClickHouseDriver;
......@@ -27,11 +26,11 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.utils.JSONUtil;
import com.fasterxml.jackson.databind.util.JSONPObject;
import org.junit.Test;
import java.util.List;
import org.junit.Test;
/**
* ClickhouseTest
*
......@@ -41,7 +40,7 @@ import java.util.List;
public class ClickHouseTest {
private static final String IP = "127.0.0.1";
private static String url="jdbc:clickhouse://"+IP+":8123/default";
private static String url = "jdbc:clickhouse://" + IP + ":8123/default";
private ClickHouseDriver clickHouseDriver = new ClickHouseDriver();
public Driver getDriver() {
DriverConfig config = new DriverConfig();
......@@ -49,8 +48,6 @@ public class ClickHouseTest {
config.setName(clickHouseDriver.getName());
config.setIp(IP);
config.setPort(8123);
// config.setUsername(null);
// config.setPassword(null);
config.setUrl(url);
return Driver.build(config);
}
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.constant;
public interface DorisConstant {
......@@ -29,7 +28,8 @@ public interface DorisConstant {
/**
* 查询所有schema下的所有表
*/
String QUERY_TABLE_BY_SCHEMA = " select TABLE_NAME AS `NAME`,TABLE_SCHEMA AS `SCHEMA`,TABLE_COMMENT AS COMMENT, '' as TYPE, '' as CATALOG, '' as ENGINE , '' as OPTIONS , 0 as `ROWS`, null as CREATE_TIME, null as UPDATE_TIME from information_schema.tables where TABLE_SCHEMA = '%s' ";
String QUERY_TABLE_BY_SCHEMA = " select TABLE_NAME AS `NAME`,TABLE_SCHEMA AS `SCHEMA`,TABLE_COMMENT AS COMMENT, '' as TYPE, '' as CATALOG, "
+ "'' as ENGINE , '' as OPTIONS , 0 as `ROWS`, null as CREATE_TIME, null as UPDATE_TIME from information_schema.tables where TABLE_SCHEMA = '%s' ";
/**
* 查询指定schema.table下的所有列信息
*/
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.metadata.convert.DorisTypeConvert;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.query;
import com.dlink.metadata.constant.DorisConstant;
......@@ -46,43 +45,36 @@ public class DorisQuery extends AbstractDBQuery {
return "Database";
}
@Override
public String tableName() {
return "NAME";
}
@Override
public String tableComment() {
return "COMMENT";
}
@Override
public String columnName() {
return "Field";
}
@Override
public String columnType() {
return "Type";
}
@Override
public String columnComment() {
return "Comment";
}
@Override
public String columnKey() {
return "Key";
}
public boolean isKeyIdentity(ResultSet results) throws SQLException {
return "auto_increment".equals(results.getString("Extra"));
}
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata;
import com.dlink.metadata.driver.Driver;
......@@ -26,14 +25,15 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import org.junit.Before;
import org.junit.Test;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.UUID;
import org.junit.Before;
import org.junit.Test;
public class DorisTest {
private Driver driver;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.constant;
public interface HiveConstant {
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts;
......@@ -31,56 +30,22 @@ import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import java.sql.*;
import java.util.*;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class HiveDriver extends AbstractJdbcDriver implements Driver {
// @Override
// public Table getTable(String schemaName, String tableName) {
// List<Table> tables = listTables(schemaName);
// Table table = null;
// for(Table item : tables){
// if(Asserts.isEquals(item.getName(),tableName)){
// table = item;
// }
// }
// if(Asserts.isNotNull(table)) {
// List<Column> columnList = new ArrayList<>();// 接收排除 Detailed Table Information 之后的 Column对象
// List<Column> columnListWithExt = listColumns(schemaName, table.getName()); //获取所有的 Column对象
//
// Column columnExtInfoToTable = columnListWithExt.get(columnListWithExt.size() - 1); //获取 Detailed Table Information 下方解析该值 并赋值给Table的属性
// String extenedInfo = columnExtInfoToTable.getType(); //获取 Detailed Table Information 的值
// /**
// * 解析 Detailed Table Information 开始
// */
//
// System.out.println(extenedInfo);
//
// /**
// * 解析 Detailed Table Information 结束
// */
//
//
// for (int i = 0; i < columnListWithExt.size(); i++) {
// Column columnExt = columnListWithExt.get(i);
// if (!columnExt.getName().contains(HiveConstant.DETAILED_TABLE_INFO)){// 排除 Detailed Table Information
// Column columnBean = new Column();
// columnBean.setName(columnExt.getName());
// columnBean.setType(columnExt.getType());
// columnBean.setComment(columnExt.getComment());
// columnList.add(columnBean);
// }
// }
// table.setColumns(columnList);
// }
// return table;
// }
@Override
public Table getTable(String schemaName, String tableName) {
List<Table> tables = listTables(schemaName);
......@@ -240,7 +205,6 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
return createTable.toString();
}
@Override
public int executeUpdate(String sql) throws Exception {
Asserts.checkNullString(sql, "Sql 语句为空");
......@@ -331,7 +295,6 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
return "Hive";
}
@Override
public Map<String, String> getFlinkColumnTypeConversion() {
HashMap<String, String> map = new HashMap<>();
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.query;
import com.dlink.metadata.constant.HiveConstant;
......@@ -28,7 +27,6 @@ public class HiveQuery extends AbstractDBQuery {
return HiveConstant.QUERY_ALL_DATABASE;
}
@Override
public String tablesSql(String schemaName) {
return HiveConstant.QUERY_ALL_TABLES_BY_SCHEMA;
......@@ -59,19 +57,16 @@ public class HiveQuery extends AbstractDBQuery {
return "comment";
}
@Override
public String columnName() {
return "col_name";
}
@Override
public String columnType() {
return "data_type";
}
@Override
public String columnComment() {
return "comment";
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata;
import com.dlink.metadata.driver.Driver;
......@@ -26,13 +25,14 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import org.junit.Test;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
/**
* MysqlTest
*
......@@ -46,9 +46,9 @@ public class HiveTest {
private static final String hiveDB = "test";
private static final String username = "zhumingye";
private static final String passwd = "123456";
private static final String hive="Hive";
private static final String hive = "Hive";
private static String url = "jdbc:hive2://"+IP+":"+PORT+"/"+hiveDB;
private static String url = "jdbc:hive2://" + IP + ":" + PORT + "/" + hiveDB;
public Driver getDriver() {
DriverConfig config = new DriverConfig();
......@@ -82,13 +82,11 @@ public class HiveTest {
Driver driver = getDriver();
List<Schema> schemasAndTables = driver.listSchemas();
schemasAndTables.forEach(schema -> {
System.out.println(schema.getName()+"\t\t"+schema.getTables().toString());
System.out.println(schema.getName() + "\t\t" + schema.getTables().toString());
});
System.err.println("end...");
}
@Test
public void getTablesByDBTest() throws Exception {
Driver driver = getDriver();
......@@ -103,9 +101,9 @@ public class HiveTest {
@Test
public void getColumnsByTableTest() {
Driver driver = getDriver();
List<Column> columns= driver.listColumns(hiveDB, "biz_college_planner_mysql_language_score_item");
List<Column> columns = driver.listColumns(hiveDB, "biz_college_planner_mysql_language_score_item");
for (Column column : columns) {
System.out.println(column.getName()+" \t "+column.getType()+" \t "+column.getComment());
System.out.println(column.getName() + " \t " + column.getType() + " \t " + column.getComment());
}
System.err.println("end...");
}
......@@ -113,20 +111,18 @@ public class HiveTest {
@Test
public void getCreateTableTest() throws Exception {
Driver driver = getDriver();
// JdbcSelectResult jdbcSelectResult = driver.executeSql("show create table odsp.biz_college_planner_mysql_language_score_item", 1);
Table driverTable = driver.getTable(hiveDB, "biz_college_planner_mysql_language_score_item");
String createTableSql = driver.getCreateTableSql(driverTable);
System.out.println(createTableSql);
System.err.println("end...");
}
@Test
public void getTableExtenedInfoTest() throws Exception {
Driver driver = getDriver();
Table driverTable = driver.getTable(hiveDB, "employees");
for (Column column : driverTable.getColumns()) {
System.out.println(column.getName()+"\t\t"+column.getType()+"\t\t"+column.getComment());
System.out.println(column.getName() + "\t\t" + column.getType() + "\t\t" + column.getComment());
}
}
......@@ -138,23 +134,23 @@ public class HiveTest {
* @return:
*/
@Test
public void MultipleSQLTest() throws Exception {
public void multipleSQLTest() throws Exception {
Driver driver = getDriver();
String sql ="select\n" +
" date_format(create_time,'yyyy-MM') as pay_success_time,\n" +
" sum(pay_amount)/100 as amount\n" +
"from\n" +
" odsp.pub_pay_mysql_pay_order\n" +
" group by date_format(create_time,'yyyy-MM') ;\n" +
"select\n" +
" *\n" +
"from\n" +
" odsp.pub_pay_mysql_pay_order ;";
String sql = "select\n"
+ " date_format(create_time,'yyyy-MM') as pay_success_time,\n"
+ " sum(pay_amount)/100 as amount\n"
+ "from\n"
+ " odsp.pub_pay_mysql_pay_order\n"
+ " group by date_format(create_time,'yyyy-MM') ;\n"
+ "select\n"
+ " *\n"
+ "from\n"
+ " odsp.pub_pay_mysql_pay_order ;";
JdbcSelectResult selectResult = driver.executeSql(sql,100);
for (LinkedHashMap<String, Object> rowDatum : selectResult.getRowData()) {
Set<Map.Entry<String, Object>> entrySet = rowDatum.entrySet();
for (Map.Entry<String, Object> stringObjectEntry : entrySet) {
System.out.println(stringObjectEntry.getKey()+"\t\t"+stringObjectEntry.getValue());
System.out.println(stringObjectEntry.getKey() + "\t\t" + stringObjectEntry.getValue());
}
}
}
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.metadata.convert.ITypeConvert;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.query;
/**
......@@ -35,19 +34,19 @@ public class MySqlQuery extends AbstractDBQuery {
@Override
public String tablesSql(String schemaName) {
return "select TABLE_NAME AS `NAME`,TABLE_SCHEMA AS `Database`,TABLE_COMMENT AS COMMENT,TABLE_CATALOG AS `CATALOG`" +
",TABLE_TYPE AS `TYPE`,ENGINE AS `ENGINE`,CREATE_OPTIONS AS `OPTIONS`,TABLE_ROWS AS `ROWS`" +
",CREATE_TIME,UPDATE_TIME from information_schema.tables" +
" where TABLE_SCHEMA = '" + schemaName + "'";
return "select TABLE_NAME AS `NAME`,TABLE_SCHEMA AS `Database`,TABLE_COMMENT AS COMMENT,TABLE_CATALOG AS `CATALOG`"
+ ",TABLE_TYPE AS `TYPE`,ENGINE AS `ENGINE`,CREATE_OPTIONS AS `OPTIONS`,TABLE_ROWS AS `ROWS`"
+ ",CREATE_TIME,UPDATE_TIME from information_schema.tables"
+ " where TABLE_SCHEMA = '" + schemaName + "'";
}
@Override
public String columnsSql(String schemaName, String tableName) {
return "select COLUMN_NAME,DATA_TYPE,COLUMN_COMMENT,COLUMN_KEY,EXTRA AS AUTO_INCREMENT" +
",COLUMN_DEFAULT,IS_NULLABLE,NUMERIC_PRECISION,NUMERIC_SCALE,CHARACTER_SET_NAME" +
",COLLATION_NAME,ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS " +
"where TABLE_SCHEMA = '" + schemaName + "' and TABLE_NAME = '" + tableName + "' " +
"order by ORDINAL_POSITION";
return "select COLUMN_NAME,DATA_TYPE,COLUMN_COMMENT,COLUMN_KEY,EXTRA AS AUTO_INCREMENT"
+ ",COLUMN_DEFAULT,IS_NULLABLE,NUMERIC_PRECISION,NUMERIC_SCALE,CHARACTER_SET_NAME"
+ ",COLLATION_NAME,ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS "
+ "where TABLE_SCHEMA = '" + schemaName + "' and TABLE_NAME = '" + tableName + "' "
+ "order by ORDINAL_POSITION";
}
@Override
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata;
import com.dlink.metadata.driver.Driver;
......@@ -25,11 +24,12 @@ import com.dlink.metadata.driver.DriverConfig;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import org.junit.Test;
import java.util.List;
import java.util.UUID;
import org.junit.Test;
/**
* MysqlTest
*
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata;
import com.dlink.metadata.driver.Driver;
......@@ -25,11 +24,12 @@ import com.dlink.metadata.driver.DriverConfig;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import org.junit.Test;
import java.util.List;
import java.util.UUID;
import org.junit.Test;
/**
* OracleTest
*
......
......@@ -17,22 +17,18 @@
*
*/
package com.dlink.metadata.constant;
/**
* @author lcg
* @operate Phoenix常量
* @date 2022/2/16 14:19
* @return
* phoenix constant
*/
public interface PhoenixConstant {
/**
* 不指定schema列信息模板SQL
*/
String QUERY_COLUMNS_SQL_DEFAULT = " select COLUMN_NAME,COLUMN_FAMILY,DATA_TYPE,KEY_SEQ,NULLABLE, '' as CHARACTER_SET_NAME," +
" '' as COLLATION_NAME ,'' as ORDINAL_POSITION , 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, '' as AUTO_INCREMENT from SYSTEM.CATALOG where TABLE_NAME='%s' and COLUMN_NAME is not null ";
String QUERY_COLUMNS_SQL_DEFAULT = " select COLUMN_NAME,COLUMN_FAMILY,DATA_TYPE,KEY_SEQ,NULLABLE, '' as CHARACTER_SET_NAME,"
+ " '' as COLLATION_NAME ,'' as ORDINAL_POSITION , 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, '' as AUTO_INCREMENT from SYSTEM.CATALOG where TABLE_NAME='%s' and COLUMN_NAME is not null ";
/**
* 查询默认指定列信息模板SQL
*/
......@@ -46,7 +42,9 @@ public interface PhoenixConstant {
/**
* 不指定schema查询table信息模板SQL
*/
String QUERY_TABLE_BY_SCHEMA_SQL_DEFAULT = " select TABLE_NAME,TABLE_SCHEM,TABLE_TYPE,SCOPE_CATALOG as CATALOG,'' as ENGINE,'' as OPTIONS, 0 as ROWSNUM, null as CREATE_TIME, null as UPDATE_TIME from SYSTEM.CATALOG where TABLE_TYPE in ('u','v') ";
String QUERY_TABLE_BY_SCHEMA_SQL_DEFAULT =
" select TABLE_NAME,TABLE_SCHEM,TABLE_TYPE,SCOPE_CATALOG as CATALOG,'' as ENGINE,'' as OPTIONS, 0 as ROWSNUM, null as CREATE_TIME, null as UPDATE_TIME "
+ "from SYSTEM.CATALOG where TABLE_TYPE in ('u','v') ";
/**
* 根据schema查询table信息模板SQL
*/
......
......@@ -17,14 +17,10 @@
*
*/
package com.dlink.metadata.constant;
/**
* @author lcg
* @operate Phoenix常用数据类型及对应code
* @date 2022/2/16 16:49
* @return
* Phoenix common data types and corresponding codes
*/
public enum PhoenixEnum {
INTEGER(4),
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
......@@ -33,8 +32,8 @@ public class PhoenixTypeConvert implements ITypeConvert {
}
String t = column.getType().toLowerCase();
boolean isNullable = !column.isKeyFlag() && column.isNullable();
if (t.contains("char") || t.contains("varchar") || t.contains("text") ||
t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext")
if (t.contains("char") || t.contains("varchar") || t.contains("text")
|| t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext")
|| t.contains("uniqueidentifier") || t.contains("sql_variant")) {
columnType = ColumnType.STRING;
} else if (t.contains("bigint")) {
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.metadata.constant.PhoenixConstant;
......@@ -28,21 +27,15 @@ import com.dlink.metadata.query.PhoenixQuery;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Table;
import org.apache.commons.lang3.StringUtils;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.List;
import java.util.Properties;
/**
* @author lcg
* @operate
* @date 2022/2/16 16:50
* @return
*/
public class PhoenixDriver extends AbstractJdbcDriver {
@Override
public IDBQuery getDBQuery() {
......
......@@ -17,18 +17,10 @@
*
*/
package com.dlink.metadata.query;
import com.dlink.metadata.constant.PhoenixConstant;
/**
* @author lcg
* @operate
* @date 2022/2/16 14:39
* @return
*/
public class PhoenixQuery extends AbstractDBQuery {
@Override
......@@ -72,31 +64,26 @@ public class PhoenixQuery extends AbstractDBQuery {
return "TABLE_NAME";
}
@Override
public String columnName() {
return "COLUMN_NAME";
}
@Override
public String columnType() {
return "DATA_TYPE";
}
@Override
public String columnComment() {
return "COLUMN_NAME";
}
@Override
public String columnKey() {
return "KEY_SEQ";
}
public String isNullable() {
return "NULLABLE";
}
......@@ -105,4 +92,5 @@ public class PhoenixQuery extends AbstractDBQuery {
public String rows() {
return "ROWSNUM";
}
}
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata;
import com.dlink.metadata.driver.Driver;
......@@ -26,12 +25,13 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import org.junit.Before;
import org.junit.Test;
import java.util.LinkedHashMap;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
public class PhoenixTest {
private Driver driver;
......@@ -49,7 +49,6 @@ public class PhoenixTest {
}
}
@Test
public void testSchema() {
//schema && table
......@@ -70,7 +69,6 @@ public class PhoenixTest {
}
}
@Test
public void testColumns() {
// columns
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.metadata.convert.ITypeConvert;
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.query;
/**
......@@ -39,9 +38,10 @@ public class PostgreSqlQuery extends AbstractDBQuery {
@Override
public String columnsSql(String schemaName, String tableName) {
return "SELECT A.attname AS name,format_type (A.atttypid,A.atttypmod) AS type,col_description (A.attrelid,A.attnum) AS comment,\n" +
"(CASE WHEN (SELECT COUNT (*) FROM pg_constraint AS PC WHERE A.attnum = PC.conkey[1] AND PC.contype = 'p') > 0 THEN 'PRI' ELSE '' END) AS key \n" +
"FROM pg_class AS C,pg_attribute AS A WHERE A.attrelid='" + schemaName + "." + tableName + "'::regclass AND A.attrelid= C.oid AND A.attnum> 0 AND NOT A.attisdropped ORDER BY A.attnum";
return "SELECT A.attname AS name,format_type (A.atttypid,A.atttypmod) AS type,col_description (A.attrelid,A.attnum) AS comment,\n"
+ "(CASE WHEN (SELECT COUNT (*) FROM pg_constraint AS PC WHERE A.attnum = PC.conkey[1] AND PC.contype = 'p') > 0 THEN 'PRI' ELSE '' END) AS key \n"
+ "FROM pg_class AS C,pg_attribute AS A WHERE A.attrelid='" + schemaName + "." + tableName
+ "'::regclass AND A.attrelid= C.oid AND A.attnum> 0 AND NOT A.attisdropped ORDER BY A.attnum";
}
@Override
......
......@@ -17,13 +17,10 @@
*
*/
package com.dlink.metadata.constant;
/**
* @operate sqlServer常量
* @date 2022/1/26 14:11
* @return
* SqlServer constant
*/
public interface SqlServerConstant {
......@@ -35,14 +32,17 @@ public interface SqlServerConstant {
/**
* 查询列信息模板SQL
*/
String QUERY_COLUMNS_SQL = " SELECT cast(a.name AS VARCHAR(500)) AS TABLE_NAME,cast(b.name AS VARCHAR(500)) AS COLUMN_NAME, isnull(CAST ( c.VALUE AS NVARCHAR ( 500 ) ),'') AS COMMENTS, " +
" CASE b.is_nullable WHEN 1 THEN 'YES' ELSE 'NO' END as NULLVALUE,cast(sys.types.name AS VARCHAR (500)) AS DATA_TYPE," +
" ( SELECT CASE count(1) WHEN 1 then 'PRI' ELSE '' END FROM syscolumns,sysobjects,sysindexes,sysindexkeys,systypes WHERE syscolumns.xusertype = systypes.xusertype " +
" AND syscolumns.id = object_id (a.name) AND sysobjects.xtype = 'PK' AND sysobjects.parent_obj = syscolumns.id " +
" AND sysindexes.id = syscolumns.id AND sysobjects.name = sysindexes.name AND sysindexkeys.id = syscolumns.id AND sysindexkeys.indid = sysindexes.indid AND syscolumns.colid = sysindexkeys.colid " +
" AND syscolumns.name = b.name) as 'KEY', b.is_identity isIdentity , '' as CHARACTER_SET_NAME, '' as COLLATION_NAME, 0 as ORDINAL_POSITION, 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, '' as AUTO_INCREMENT FROM ( select name,object_id from sys.tables UNION all select name,object_id from sys.views ) a INNER JOIN sys.columns b " +
" ON b.object_id = a.object_id LEFT JOIN sys.types ON b.user_type_id = sys.types.user_type_id LEFT JOIN sys.extended_properties c ON c.major_id = b.object_id AND c.minor_id = b.column_id " +
" WHERE a.name = '%s' and sys.types.name !='sysname' ";
String QUERY_COLUMNS_SQL = " SELECT cast(a.name AS VARCHAR(500)) AS TABLE_NAME,cast(b.name AS VARCHAR(500)) AS COLUMN_NAME, isnull(CAST ( c.VALUE AS NVARCHAR ( 500 ) ),'') AS COMMENTS, "
+ " CASE b.is_nullable WHEN 1 THEN 'YES' ELSE 'NO' END as NULLVALUE,cast(sys.types.name AS VARCHAR (500)) AS DATA_TYPE,"
+ " ( SELECT CASE count(1) WHEN 1 then 'PRI' ELSE '' END FROM syscolumns,sysobjects,sysindexes,sysindexkeys,systypes WHERE syscolumns.xusertype = systypes.xusertype "
+ " AND syscolumns.id = object_id (a.name) AND sysobjects.xtype = 'PK' AND sysobjects.parent_obj = syscolumns.id "
+ " AND sysindexes.id = syscolumns.id AND sysobjects.name = sysindexes.name AND sysindexkeys.id = syscolumns.id AND sysindexkeys.indid = sysindexes.indid "
+ "AND syscolumns.colid = sysindexkeys.colid "
+ " AND syscolumns.name = b.name) as 'KEY', b.is_identity isIdentity , '' as CHARACTER_SET_NAME, '' as COLLATION_NAME, 0 as ORDINAL_POSITION, 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, "
+ "'' as AUTO_INCREMENT "
+ "FROM ( select name,object_id from sys.tables UNION all select name,object_id from sys.views ) a INNER JOIN sys.columns b "
+ " ON b.object_id = a.object_id LEFT JOIN sys.types ON b.user_type_id = sys.types.user_type_id LEFT JOIN sys.extended_properties c ON c.major_id = b.object_id "
+ "AND c.minor_id = b.column_id WHERE a.name = '%s' and sys.types.name !='sysname' ";
/**
* 查询schema模板SQL
......@@ -52,5 +52,7 @@ public interface SqlServerConstant {
/**
* 根据schema查询table信息模板SQL
*/
String QUERY_TABLE_BY_SCHEMA_SQL = " SELECT table_name ,table_schema, '' as type, '' as CATALOG, '' as ENGINE , '' as OPTIONS ,0 as rows , null as CREATE_TIME, null as UPDATE_TIME,null AS COMMENTS FROM INFORMATION_SCHEMA.tables WHERE TABLE_SCHEMA = '%s' ";
String QUERY_TABLE_BY_SCHEMA_SQL =
" SELECT table_name ,table_schema, '' as type, '' as CATALOG, '' as ENGINE , '' as OPTIONS ,0 as rows , null as CREATE_TIME, null as UPDATE_TIME,null AS COMMENTS "
+ "FROM INFORMATION_SCHEMA.tables WHERE TABLE_SCHEMA = '%s' ";
}
......@@ -17,18 +17,12 @@
*
*/
package com.dlink.metadata.convert;
import com.dlink.assertion.Asserts;
import com.dlink.model.Column;
import com.dlink.model.ColumnType;
/**
* @operate
* @date 2022/1/26 14:23
* @return
*/
public class SqlServerTypeConvert implements ITypeConvert {
@Override
public ColumnType convert(Column column) {
......@@ -38,8 +32,8 @@ public class SqlServerTypeConvert implements ITypeConvert {
}
String t = column.getType().toLowerCase();
boolean isNullable = !column.isKeyFlag() && column.isNullable();
if (t.contains("char") || t.contains("varchar") || t.contains("text") ||
t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext")
if (t.contains("char") || t.contains("varchar") || t.contains("text")
|| t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext")
|| t.contains("uniqueidentifier") || t.contains("sql_variant")) {
columnType = ColumnType.STRING;
} else if (t.contains("bigint")) {
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.driver;
import com.dlink.metadata.constant.SqlServerConstant;
......@@ -33,12 +32,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author lcg
* @operate
* @date 2022/1/26 14:23
* @return
*/
public class SqlServerDriver extends AbstractJdbcDriver {
@Override
public IDBQuery getDBQuery() {
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata.query;
import com.dlink.metadata.constant.SqlServerConstant;
......@@ -25,12 +24,6 @@ import com.dlink.metadata.constant.SqlServerConstant;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* @author lcg
* @operate
* @date 2022/1/26 15:42
* @return
*/
public class SqlServerQuery extends AbstractDBQuery {
@Override
......@@ -68,31 +61,26 @@ public class SqlServerQuery extends AbstractDBQuery {
return "COMMENTS";
}
@Override
public String columnName() {
return "COLUMN_NAME";
}
@Override
public String columnType() {
return "DATA_TYPE";
}
@Override
public String columnComment() {
return "COMMENTS";
}
@Override
public String columnKey() {
return "KEY";
}
public boolean isKeyIdentity(ResultSet results) throws SQLException {
return 1 == results.getInt("isIdentity");
}
......@@ -100,4 +88,5 @@ public class SqlServerQuery extends AbstractDBQuery {
public String isNullable() {
return "NULLVALUE";
}
}
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.metadata;
import com.dlink.metadata.driver.Driver;
......@@ -26,14 +25,15 @@ import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import org.junit.Before;
import org.junit.Test;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.UUID;
import org.junit.Before;
import org.junit.Test;
public class SqlServerTest {
private Driver driver;
......@@ -98,5 +98,4 @@ public class SqlServerTest {
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment