Unverified Commit b3ad0a5f authored by Kerwin's avatar Kerwin Committed by GitHub

[Optimization][Style] Configure global checkstyle validation. (#953)

* Configure global checkstyle validation.

* Configure global checkstyle validation.
parent c6e87d9d
......@@ -53,7 +53,7 @@ jobs:
${{ runner.os }}-maven-
- name: Check Style
run: |
./mvnw -T 2C -B checkstyle:check --no-snapshot-updates
./mvnw -T 2C -B --no-snapshot-updates clean checkstyle:check
- name: Build and Package
run: |
./mvnw -B clean install \
......
......@@ -19,19 +19,26 @@
package com.dlink.controller;
import com.dlink.common.result.ProTableResult;
import com.dlink.common.result.Result;
import com.dlink.model.FragmentVariable;
import com.dlink.service.FragmentVariableService;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.extern.slf4j.Slf4j;
/**
* FragmentVariableController
*
......
......@@ -19,11 +19,12 @@
package com.dlink.dto;
import com.dlink.assertion.Asserts;
import com.dlink.job.JobConfig;
import java.util.HashMap;
import java.util.Map;
import com.dlink.assertion.Asserts;
import com.dlink.job.JobConfig;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
......
......@@ -21,6 +21,7 @@ package com.dlink.mapper;
import com.dlink.db.mapper.SuperMapper;
import com.dlink.model.FragmentVariable;
import org.apache.ibatis.annotations.Mapper;
/**
......
......@@ -19,10 +19,12 @@
package com.dlink.model;
import com.dlink.db.model.SuperEntity;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import com.dlink.db.model.SuperEntity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
......
......@@ -19,13 +19,12 @@
package com.dlink.service;
import com.dlink.db.service.ISuperService;
import com.dlink.model.FragmentVariable;
import java.util.List;
import java.util.Map;
import com.dlink.db.service.ISuperService;
import com.dlink.model.FragmentVariable;
/**
* FragmentVariableService
*
......
......@@ -19,6 +19,11 @@
package com.dlink.service.impl;
import com.dlink.db.service.impl.SuperServiceImpl;
import com.dlink.mapper.FragmentVariableMapper;
import com.dlink.model.FragmentVariable;
import com.dlink.service.FragmentVariableService;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
......@@ -26,10 +31,6 @@ import java.util.Map;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.dlink.db.service.impl.SuperServiceImpl;
import com.dlink.mapper.FragmentVariableMapper;
import com.dlink.model.FragmentVariable;
import com.dlink.service.FragmentVariableService;
/**
......
......@@ -119,6 +119,8 @@ public class JobInstanceServiceImpl extends SuperServiceImpl<JobInstanceMapper,
break;
case UNKNOWN:
jobInstanceStatus.setUnknown(counts);
break;
default:
}
}
jobInstanceStatus.setAll(total);
......
......@@ -19,21 +19,6 @@
package com.dlink.service.impl;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.dlink.api.FlinkAPI;
import com.dlink.assertion.Asserts;
import com.dlink.config.Dialect;
......@@ -80,6 +65,22 @@ import com.dlink.session.SessionPool;
import com.dlink.sql.FlinkQuery;
import com.dlink.utils.RunTimeUtil;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* StudioServiceImpl
*
......@@ -222,15 +223,19 @@ public class StudioServiceImpl implements StudioService {
private List<SqlExplainResult> explainCommonSql(StudioExecuteDTO studioExecuteDTO) {
if (Asserts.isNull(studioExecuteDTO.getDatabaseId())) {
return new ArrayList<SqlExplainResult>() {{
return new ArrayList<SqlExplainResult>() {
{
add(SqlExplainResult.fail(studioExecuteDTO.getStatement(), "请指定数据源"));
}};
}
};
} else {
DataBase dataBase = dataBaseService.getById(studioExecuteDTO.getDatabaseId());
if (Asserts.isNull(dataBase)) {
return new ArrayList<SqlExplainResult>() {{
return new ArrayList<SqlExplainResult>() {
{
add(SqlExplainResult.fail(studioExecuteDTO.getStatement(), "数据源不存在"));
}};
}
};
}
Driver driver = Driver.build(dataBase.getDriverConfig());
List<SqlExplainResult> sqlExplainResults = driver.explain(studioExecuteDTO.getStatement());
......@@ -314,9 +319,9 @@ public class StudioServiceImpl implements StudioService {
return null;
}
if (studioCADTO.getDialect().equalsIgnoreCase("doris")) {
return com.dlink.explainer.sqlLineage.LineageBuilder.getSqlLineage(studioCADTO.getStatement(), "mysql", dataBase.getDriverConfig());
return com.dlink.explainer.sqllineage.LineageBuilder.getSqlLineage(studioCADTO.getStatement(), "mysql", dataBase.getDriverConfig());
} else {
return com.dlink.explainer.sqlLineage.LineageBuilder.getSqlLineage(studioCADTO.getStatement(), studioCADTO.getDialect().toLowerCase(), dataBase.getDriverConfig());
return com.dlink.explainer.sqllineage.LineageBuilder.getSqlLineage(studioCADTO.getStatement(), studioCADTO.getDialect().toLowerCase(), dataBase.getDriverConfig());
}
} else {
addFlinkSQLEnv(studioCADTO);
......
......@@ -19,15 +19,15 @@
package com.dlink.app;
import java.io.IOException;
import java.util.Map;
import com.dlink.app.db.DBConfig;
import com.dlink.app.flinksql.Submiter;
import com.dlink.assertion.Asserts;
import com.dlink.constant.FlinkParamConstant;
import com.dlink.utils.FlinkBaseUtil;
import java.io.IOException;
import java.util.Map;
/**
* MainApp
*
......
......@@ -28,6 +28,7 @@ import com.dlink.executor.ExecutorSetting;
import com.dlink.interceptor.FlinkInterceptor;
import com.dlink.parser.SqlType;
import com.dlink.trans.Operations;
import org.apache.flink.configuration.CheckpointingOptions;
import java.io.IOException;
......@@ -73,9 +74,7 @@ public class Submiter {
try {
statement = DBUtil.getOneByID(getQuerySQL(id), config);
} catch (IOException | SQLException e) {
logger.error("{} --> 获取 FlinkSQL 配置异常,ID 为 {}, \n" +
"连接信息为:{} \n" +
"异常信息为:{} ", LocalDateTime.now(), id, config.toString(), e.getMessage(), e);
logger.error("{} --> 获取 FlinkSQL 配置异常,ID 为 {}, 连接信息为:{} ,异常信息为:{} ", LocalDateTime.now(), id, config.toString(), e.getMessage(), e);
}
return statement;
}
......@@ -85,9 +84,7 @@ public class Submiter {
try {
task = DBUtil.getMapByID(getTaskInfo(id), config);
} catch (IOException | SQLException e) {
logger.error("{} --> 获取 FlinkSQL 配置异常,ID 为 {}, \n" +
"连接信息为:{} \n" +
"异常信息为:{} ", LocalDateTime.now(), id, config.toString(), e.getMessage(), e);
logger.error("{} --> 获取 FlinkSQL 配置异常,ID 为 {}, 连接信息为:{} ,异常信息为:{} ", LocalDateTime.now(), id, config.toString(), e.getMessage(), e);
}
return task;
}
......
......@@ -190,6 +190,7 @@ public abstract class AbstractSinkBuilder {
}
out.collect(uagenericRowData);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -141,6 +141,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
}
out.collect(uarow);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -190,6 +190,7 @@ public abstract class AbstractSinkBuilder {
}
out.collect(uagenericRowData);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -104,6 +104,7 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
case "latest-offset":
sourceBuilder.startupOptions(StartupOptions.latest());
break;
default:
}
} else {
sourceBuilder.startupOptions(StartupOptions.latest());
......
......@@ -141,6 +141,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
}
out.collect(uarow);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -190,6 +190,7 @@ public abstract class AbstractSinkBuilder {
}
out.collect(uagenericRowData);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -123,6 +123,7 @@ public class KafkaSinkJsonBuilder extends AbstractSinkBuilder implements SinkBui
before = (Map) value.get("before");
convertAttr(columnNameList, columnTypeList, before, value.get("op").toString(), 1, schemaName, tableName, tsMs);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Exception:", e);
......
......@@ -132,6 +132,7 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
case "latest-offset":
sourceBuilder.startupOptions(StartupOptions.latest());
break;
default:
}
} else {
sourceBuilder.startupOptions(StartupOptions.latest());
......
......@@ -103,6 +103,7 @@ public class OracleCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
case "latest-offset":
sourceBuilder.startupOptions(StartupOptions.latest());
break;
default:
}
} else {
sourceBuilder.startupOptions(StartupOptions.latest());
......
......@@ -141,6 +141,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
}
out.collect(uarow);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -190,6 +190,7 @@ public abstract class AbstractSinkBuilder {
}
out.collect(uagenericRowData);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -121,6 +121,7 @@ public class KafkaSinkJsonBuilder extends AbstractSinkBuilder implements SinkBui
before = (Map) value.get("before");
convertAttr(columnNameList, columnTypeList, before,value.get("op").toString(), 1,schemaName, tableName, tsMs);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Exception:", e);
......
......@@ -132,6 +132,7 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
case "latest-offset":
sourceBuilder.startupOptions(StartupOptions.latest());
break;
default:
}
} else {
sourceBuilder.startupOptions(StartupOptions.latest());
......
......@@ -103,6 +103,7 @@ public class OracleCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
case "latest-offset":
sourceBuilder.startupOptions(StartupOptions.latest());
break;
default:
}
} else {
sourceBuilder.startupOptions(StartupOptions.latest());
......
......@@ -17,7 +17,6 @@
*
*/
package com.dlink.cdc.postgres;
import com.dlink.assertion.Asserts;
......@@ -26,8 +25,7 @@ import com.dlink.cdc.CDCBuilder;
import com.dlink.constant.ClientConstant;
import com.dlink.constant.FlinkParamConstant;
import com.dlink.model.FlinkCDCConfig;
import com.ververica.cdc.connectors.postgres.PostgreSQLSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
......@@ -36,6 +34,9 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
import com.ververica.cdc.connectors.postgres.PostgreSQLSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
/**
* postgresCDCBuilder
*
......@@ -44,8 +45,8 @@ import java.util.Properties;
**/
public class PostgresCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
private final static String KEY_WORD = "postgres-cdc";
private final static String METADATA_TYPE = "PostgreSql";
private static final String KEY_WORD = "postgres-cdc";
private static final String METADATA_TYPE = "PostgreSql";
public PostgresCDCBuilder() {
}
......
......@@ -141,6 +141,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
}
out.collect(uarow);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -6,13 +6,9 @@ import com.dlink.cdc.CDCBuilder;
import com.dlink.constant.ClientConstant;
import com.dlink.constant.FlinkParamConstant;
import com.dlink.model.FlinkCDCConfig;
import com.ververica.cdc.connectors.sqlserver.SqlServerSource;
import com.ververica.cdc.connectors.sqlserver.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
......@@ -21,17 +17,22 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ververica.cdc.connectors.sqlserver.SqlServerSource;
import com.ververica.cdc.connectors.sqlserver.table.StartupOptions;
/**
* sql server CDC
*
* @author 郑文豪
* @date 2022/8/12 18:00
*/
public class SqlServerCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
protected static final Logger logger = LoggerFactory.getLogger(SqlServerCDCBuilder.class);
private final static String KEY_WORD = "sqlserver-cdc";
private final static String METADATA_TYPE = "SqlServer";
private static final String KEY_WORD = "sqlserver-cdc";
private static final String METADATA_TYPE = "SqlServer";
public SqlServerCDCBuilder() {
}
......@@ -55,8 +56,8 @@ public class SqlServerCDCBuilder extends AbstractCDCBuilder implements CDCBuilde
String database = config.getDatabase();
Properties debeziumProperties = new Properties();
// 为部分转换添加默认值
// debeziumProperties.setProperty("bigint.unsigned.handling.mode", "long");
// debeziumProperties.setProperty("decimal.handling.mode", "string");
//debeziumProperties.setProperty("bigint.unsigned.handling.mode", "long");
//debeziumProperties.setProperty("decimal.handling.mode", "string");
for (Map.Entry<String, String> entry : config.getDebezium().entrySet()) {
if (Asserts.isNotNullString(entry.getKey()) && Asserts.isNotNullString(entry.getValue())) {
debeziumProperties.setProperty(entry.getKey(), entry.getValue());
......@@ -86,7 +87,7 @@ public class SqlServerCDCBuilder extends AbstractCDCBuilder implements CDCBuilde
} else {
sourceBuilder.tableList(new String[0]);
}
// sourceBuilder.deserializer(new JsonDebeziumDeserializationSchema());
//sourceBuilder.deserializer(new JsonDebeziumDeserializationSchema());
sourceBuilder.deserializer(new SqlServerJsonDebeziumDeserializationSchema());
if (Asserts.isNotNullString(config.getStartupMode())) {
switch (config.getStartupMode().toLowerCase()) {
......@@ -96,6 +97,7 @@ public class SqlServerCDCBuilder extends AbstractCDCBuilder implements CDCBuilde
case "latest-offset":
sourceBuilder.startupOptions(StartupOptions.latest());
break;
default:
}
} else {
sourceBuilder.startupOptions(StartupOptions.latest());
......
package com.dlink.cdc.sqlserver;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverter;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.source.SourceRecord;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.storage.ConverterType;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.util.Collector;
......@@ -12,12 +8,16 @@ import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverter;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.source.SourceRecord;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.storage.ConverterType;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
/**
* @version 1.0
* @className: com.dlink.cdc.mysql.MysqlJsonDebeziumDeserializationSchema
* @Description:
* @author: jack zhong
* @date 8/2/221:43 PM
*/
public class SqlServerJsonDebeziumDeserializationSchema implements DebeziumDeserializationSchema<String> {
private static final long serialVersionUID = 1L;
......
......@@ -19,6 +19,12 @@
package com.dlink.executor;
import com.dlink.assertion.Asserts;
import com.dlink.model.LineageRel;
import com.dlink.result.SqlExplainResult;
import com.dlink.utils.FlinkStreamProgramWithoutPhysical;
import com.dlink.utils.LineageContext;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.dag.Transformation;
......@@ -70,11 +76,6 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
import com.dlink.assertion.Asserts;
import com.dlink.model.LineageRel;
import com.dlink.result.SqlExplainResult;
import com.dlink.utils.FlinkStreamProgramWithoutPhysical;
import com.dlink.utils.LineageContext;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
......
......@@ -19,6 +19,8 @@
package com.dlink.utils;
import com.dlink.model.LineageRel;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Snapshot;
......@@ -34,7 +36,6 @@ import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.FunctionCatalog;
import org.apache.flink.table.operations.CatalogSinkModifyOperation;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.planner.calcite.FlinkContext;
import org.apache.flink.table.planner.calcite.FlinkRelBuilder;
import org.apache.flink.table.planner.calcite.SqlExprToRexConverterFactory;
import org.apache.flink.table.planner.delegation.PlannerBase;
......@@ -47,8 +48,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import com.dlink.model.LineageRel;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtMethod;
......@@ -188,10 +187,10 @@ public class LineageContext {
return MiniBatchInterval.NONE;
}
private PlannerBase getPlanner() {
return (PlannerBase) tableEnv.getPlanner();
}
});
}
......
......@@ -190,6 +190,7 @@ public abstract class AbstractSinkBuilder {
}
out.collect(uagenericRowData);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -121,6 +121,7 @@ public class KafkaSinkJsonBuilder extends AbstractSinkBuilder implements SinkBui
before = (Map) value.get("before");
convertAttr(columnNameList, columnTypeList, before, value.get("op").toString(), 1, schemaName, tableName, tsMs);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Exception:", e);
......
......@@ -132,6 +132,7 @@ public class MysqlCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
case "latest-offset":
sourceBuilder.startupOptions(StartupOptions.latest());
break;
default:
}
} else {
sourceBuilder.startupOptions(StartupOptions.latest());
......
......@@ -103,6 +103,7 @@ public class OracleCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
case "latest-offset":
sourceBuilder.startupOptions(StartupOptions.latest());
break;
default:
}
} else {
sourceBuilder.startupOptions(StartupOptions.latest());
......
......@@ -141,6 +141,7 @@ public class SQLSinkBuilder extends AbstractSinkBuilder implements SinkBuilder,
}
out.collect(uarow);
break;
default:
}
} catch (Exception e) {
logger.error("SchameTable: {} - Row: {} - Exception: {}", schemaTableName, JSONUtil.toJsonString(value), e.getCause().getMessage());
......
......@@ -43,7 +43,7 @@ public class LineageRel {
private String targetColumn;
private final static String DELIMITER = ".";
private static final String DELIMITER = ".";
public LineageRel(String sourceCatalog, String sourceDatabase, String sourceTable, String sourceColumn, String targetCatalog, String targetDatabase, String targetTable,
String targetColumn) {
......
......@@ -19,11 +19,12 @@
package com.dlink.model;
import com.dlink.assertion.Asserts;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.dlink.assertion.Asserts;
import com.fasterxml.jackson.databind.JsonNode;
/**
......@@ -40,14 +41,16 @@ public class SystemConfiguration {
return systemConfiguration;
}
private static final List<Configuration> CONFIGURATION_LIST = new ArrayList<Configuration>() {{
private static final List<Configuration> CONFIGURATION_LIST = new ArrayList<Configuration>() {
{
add(systemConfiguration.sqlSubmitJarPath);
add(systemConfiguration.sqlSubmitJarParas);
add(systemConfiguration.sqlSubmitJarMainAppClass);
add(systemConfiguration.useRestAPI);
add(systemConfiguration.useLogicalPlan);
add(systemConfiguration.sqlSeparator);
}};
}
};
private Configuration sqlSubmitJarPath = new Configuration(
"sqlSubmitJarPath",
......
......@@ -26,6 +26,7 @@ import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKN
import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL;
import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS;
import com.dlink.assertion.Asserts;
import java.io.IOException;
import java.util.ArrayList;
......@@ -50,8 +51,6 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.fasterxml.jackson.databind.type.CollectionType;
import com.dlink.assertion.Asserts;
/**
* JSONUtil
*
......
......@@ -57,8 +57,8 @@ import org.slf4j.LoggerFactory;
/** A JDBC outputFormat that supports batching records before writing records to database. */
@Internal
public class JdbcBatchingOutputFormat<
In, JdbcIn, JdbcExec extends JdbcBatchStatementExecutor<JdbcIn>>
extends AbstractJdbcOutputFormat<In> {
I, J, E extends JdbcBatchStatementExecutor<J>>
extends AbstractJdbcOutputFormat<I> {
/**
* An interface to extract a value from given argument.
......@@ -85,10 +85,10 @@ public class JdbcBatchingOutputFormat<
private static final Logger LOG = LoggerFactory.getLogger(JdbcBatchingOutputFormat.class);
private final JdbcExecutionOptions executionOptions;
private final StatementExecutorFactory<JdbcExec> statementExecutorFactory;
private final RecordExtractor<In, JdbcIn> jdbcRecordExtractor;
private final StatementExecutorFactory<E> statementExecutorFactory;
private final RecordExtractor<I, J> jdbcRecordExtractor;
private transient JdbcExec jdbcStatementExecutor;
private transient E jdbcStatementExecutor;
private transient int batchCount = 0;
private transient volatile boolean closed = false;
......@@ -100,8 +100,8 @@ public class JdbcBatchingOutputFormat<
public JdbcBatchingOutputFormat(
@Nonnull JdbcConnectionProvider connectionProvider,
@Nonnull JdbcExecutionOptions executionOptions,
@Nonnull StatementExecutorFactory<JdbcExec> statementExecutorFactory,
@Nonnull RecordExtractor<In, JdbcIn> recordExtractor) {
@Nonnull StatementExecutorFactory<E> statementExecutorFactory,
@Nonnull RecordExtractor<I, J> recordExtractor) {
super(connectionProvider);
this.executionOptions = checkNotNull(executionOptions);
this.statementExecutorFactory = checkNotNull(statementExecutorFactory);
......@@ -150,9 +150,9 @@ public class JdbcBatchingOutputFormat<
}
private JdbcExec createAndOpenStatementExecutor(
StatementExecutorFactory<JdbcExec> statementExecutorFactory) throws IOException {
JdbcExec exec = statementExecutorFactory.apply(getRuntimeContext());
private E createAndOpenStatementExecutor(
StatementExecutorFactory<E> statementExecutorFactory) throws IOException {
E exec = statementExecutorFactory.apply(getRuntimeContext());
try {
exec.prepareStatements(connectionProvider.getConnection());
} catch (SQLException e) {
......@@ -168,7 +168,7 @@ public class JdbcBatchingOutputFormat<
}
@Override
public final synchronized void writeRecord(In record) throws IOException {
public final synchronized void writeRecord(I record) throws IOException {
checkFlushException();
try {
......@@ -184,7 +184,7 @@ public class JdbcBatchingOutputFormat<
}
}
protected void addToBatch(In original, JdbcIn extracted) throws SQLException {
protected void addToBatch(I original, J extracted) throws SQLException {
jdbcStatementExecutor.addToBatch(extracted);
}
......
......@@ -57,8 +57,8 @@ import org.slf4j.LoggerFactory;
/** A JDBC outputFormat that supports batching records before writing records to database. */
@Internal
public class JdbcBatchingOutputFormat<
In, JdbcIn, JdbcExec extends JdbcBatchStatementExecutor<JdbcIn>>
extends AbstractJdbcOutputFormat<In> {
I, J, E extends JdbcBatchStatementExecutor<J>>
extends AbstractJdbcOutputFormat<I> {
/**
* An interface to extract a value from given argument.
......@@ -85,10 +85,10 @@ public class JdbcBatchingOutputFormat<
private static final Logger LOG = LoggerFactory.getLogger(JdbcBatchingOutputFormat.class);
private final JdbcExecutionOptions executionOptions;
private final StatementExecutorFactory<JdbcExec> statementExecutorFactory;
private final RecordExtractor<In, JdbcIn> jdbcRecordExtractor;
private final StatementExecutorFactory<E> statementExecutorFactory;
private final RecordExtractor<I, J> jdbcRecordExtractor;
private transient JdbcExec jdbcStatementExecutor;
private transient E jdbcStatementExecutor;
private transient int batchCount = 0;
private transient volatile boolean closed = false;
......@@ -100,8 +100,8 @@ public class JdbcBatchingOutputFormat<
public JdbcBatchingOutputFormat(
@Nonnull JdbcConnectionProvider connectionProvider,
@Nonnull JdbcExecutionOptions executionOptions,
@Nonnull StatementExecutorFactory<JdbcExec> statementExecutorFactory,
@Nonnull RecordExtractor<In, JdbcIn> recordExtractor) {
@Nonnull StatementExecutorFactory<E> statementExecutorFactory,
@Nonnull RecordExtractor<I, J> recordExtractor) {
super(connectionProvider);
this.executionOptions = checkNotNull(executionOptions);
this.statementExecutorFactory = checkNotNull(statementExecutorFactory);
......@@ -150,9 +150,9 @@ public class JdbcBatchingOutputFormat<
}
private JdbcExec createAndOpenStatementExecutor(
StatementExecutorFactory<JdbcExec> statementExecutorFactory) throws IOException {
JdbcExec exec = statementExecutorFactory.apply(getRuntimeContext());
private E createAndOpenStatementExecutor(
StatementExecutorFactory<E> statementExecutorFactory) throws IOException {
E exec = statementExecutorFactory.apply(getRuntimeContext());
try {
exec.prepareStatements(connectionProvider.getConnection());
} catch (SQLException e) {
......@@ -168,7 +168,7 @@ public class JdbcBatchingOutputFormat<
}
@Override
public final synchronized void writeRecord(In record) throws IOException {
public final synchronized void writeRecord(I record) throws IOException {
checkFlushException();
try {
......@@ -184,7 +184,7 @@ public class JdbcBatchingOutputFormat<
}
}
protected void addToBatch(In original, JdbcIn extracted) throws SQLException {
protected void addToBatch(I original, J extracted) throws SQLException {
jdbcStatementExecutor.addToBatch(extracted);
}
......
......@@ -133,6 +133,8 @@ public class FlinkAPI {
//paramMap.put("target-directory","hdfs:///flink13/ss1");
paramType = FlinkRestAPIConstant.SAVEPOINTS;
jobInfo.setStatus(JobInfo.JobStatus.RUN);
break;
default:
}
ObjectMapper mapper = new ObjectMapper();
JsonNode json = null;
......
......@@ -19,6 +19,12 @@
package com.dlink.explainer.lineage;
import com.dlink.explainer.ca.ColumnCAResult;
import com.dlink.explainer.ca.NodeRel;
import com.dlink.explainer.ca.TableCA;
import com.dlink.model.LineageRel;
import com.dlink.plus.FlinkSqlPlus;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
......@@ -26,12 +32,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import com.dlink.explainer.ca.ColumnCAResult;
import com.dlink.explainer.ca.NodeRel;
import com.dlink.explainer.ca.TableCA;
import com.dlink.model.LineageRel;
import com.dlink.plus.FlinkSqlPlus;
/**
* LineageBuilder
*
......
......@@ -109,8 +109,10 @@ public class LineageRelation {
return false;
}
LineageRelation that = (LineageRelation) o;
return Objects.equals(srcTableId, that.srcTableId) && Objects.equals(tgtTableId, that.tgtTableId) && Objects.equals(srcTableColName, that.srcTableColName) &&
Objects.equals(tgtTableColName, that.tgtTableColName);
return Objects.equals(srcTableId, that.srcTableId)
&& Objects.equals(tgtTableId, that.tgtTableId)
&& Objects.equals(srcTableColName, that.srcTableColName)
&& Objects.equals(tgtTableColName, that.tgtTableColName);
}
@Override
......
......@@ -19,11 +19,11 @@
package com.dlink.explainer.lineage;
import com.dlink.explainer.ca.TableCA;
import java.util.ArrayList;
import java.util.List;
import com.dlink.explainer.ca.TableCA;
/**
* LineageTable
*
......
......@@ -17,7 +17,7 @@
*
*/
package com.dlink.explainer.sqlLineage;
package com.dlink.explainer.sqllineage;
import com.dlink.assertion.Asserts;
import com.dlink.explainer.lineage.LineageRelation;
......
......@@ -17,7 +17,7 @@
*
*/
package com.dlink.explainer.sqlLineage;
package com.dlink.explainer.sqllineage;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,7 @@
*
*/
package com.dlink.explainer.sqlLineage;
package com.dlink.explainer.sqllineage;
import com.dlink.assertion.Asserts;
......
......@@ -17,7 +17,7 @@
*
*/
package com.dlink.explainer.sqlLineage;
package com.dlink.explainer.sqllineage;
import java.util.HashSet;
import java.util.Iterator;
......
......@@ -17,7 +17,7 @@
*
*/
package com.dlink.explainer.sqlLineage;
package com.dlink.explainer.sqllineage;
import java.util.Iterator;
......
......@@ -596,6 +596,8 @@ public class JobManager {
if (Asserts.isNotNull(config.getGatewayConfig()) && Asserts.isNotNullString(config.getGatewayConfig().getFlinkConfig().getJobName())) {
sb.append("set " + YarnConfigOptions.APPLICATION_NAME.key() + " = " + config.getGatewayConfig().getFlinkConfig().getJobName() + ";\r\n");
}
break;
default:
}
sb.append(statement);
return sb.toString();
......
......@@ -73,7 +73,7 @@ public class ResultRunnable implements Runnable {
catchData(ResultPool.get(jobId));
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
......
......@@ -50,10 +50,10 @@ public class SingleSqlParserFactory {
tmp = new UpdateSqlParser(sql);
} else if (contains(sql, "(insert\\s+into)(.+)(values)(.+)")) {
tmp = new InsertSqlParser(sql);
} else if (contains(sql, "(create\\s+table)(.+)")) {
} else if (contains(sql, "(create\\s+database)(.+)")) {
} else if (contains(sql, "(show\\s+databases)")) {
} else if (contains(sql, "(use)(.+)")) {
//} else if (contains(sql, "(create\\s+table)(.+)")) {
//} else if (contains(sql, "(create\\s+database)(.+)")) {
//} else if (contains(sql, "(show\\s+databases)")) {
//} else if (contains(sql, "(use)(.+)")) {
} else if (contains(sql, "(set)(.+)")) {
tmp = new SetSqlParser(sql);
} else if (contains(sql, "(show\\s+fragment)\\s+(.+)")) {
......
......@@ -68,6 +68,7 @@ public class SetOperation extends AbstractOperation implements Operation {
return null;
}
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
Map<String, List<String>> map = SingleSqlParserFactory.generateParser(statement);
if (Asserts.isNotNullMap(map) && map.size() == 2) {
......
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<artifactId>dlink</artifactId>
<groupId>com.dlink</groupId>
......
......@@ -77,7 +77,7 @@ public class FlinkConfig {
}
public static FlinkConfig build(String jobName, String jobId, String actionStr, String savePointTypeStr, String savePoint, String configParasStr) {
// List<ConfigPara> configParasList = new ArrayList<>();
//List<ConfigPara> configParasList = new ArrayList<>();
Map<String, String> configMap = new HashMap<>();
JsonNode paras = null;
if (Asserts.isNotNullString(configParasStr)) {
......
......@@ -25,7 +25,6 @@ import com.dlink.metadata.driver.DriverConfig;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.utils.JSONUtil;
import java.util.List;
......@@ -55,30 +54,30 @@ public class ClickHouseTest {
@Test
public void connectTest() {
String test = getDriver().test();
System.out.println(test);
System.out.println("end...");
//System.out.println(test);
//System.out.println("end...");
}
@Test
public void schemaTest() {
List<Schema> schemasAndTables = getDriver().getSchemasAndTables();
System.out.println(JSONUtil.toJsonString(schemasAndTables));
System.out.println("end...");
//System.out.println(JSONUtil.toJsonString(schemasAndTables));
//System.out.println("end...");
}
@Test
public void columnTest() {
Driver driver = getDriver();
List<Column> columns = driver.listColumns("xxx", "xxx");
System.out.println(JSONUtil.toJsonString(columns));
System.out.println("end...");
//System.out.println(JSONUtil.toJsonString(columns));
//System.out.println("end...");
}
@Test
public void queryTest() {
Driver driver = getDriver();
JdbcSelectResult query = driver.query("select * from xxx", 10);
System.out.println(JSONUtil.toJsonString(query));
System.out.println("end...");
//System.out.println(JSONUtil.toJsonString(query));
//System.out.println("end...");
}
}
......@@ -481,7 +481,6 @@
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
......
......@@ -21,7 +21,7 @@
<module name="Checker">
<property name="charset" value="UTF-8"/>
<property name="severity" value="info"/>
<!-- <property name="severity" value="error"/>-->
<property name="fileExtensions" value="java, properties, xml"/>
......@@ -35,6 +35,7 @@
</module>
<module name="RegexpSingleline">
<property name="severity" value="warning"/>
<property name="format" value="System\.out\.println"/>
<property name="message" value="Prohibit invoking System.out.println in source code !"/>
</module>
......@@ -222,6 +223,7 @@
<module name="UnusedImports"/>
<module name="ImportOrder">
<!-- <property name="severity" value="error"/>-->
<property name="staticGroups" value="com.dlink,org.apache,java,javax,org,com"/>
<property name="separatedStaticGroups" value="true"/>
......@@ -230,6 +232,7 @@
<property name="separated" value="true"/>
<property name="option" value="top"/>
<property name="sortStaticImportsAlphabetically" value="true"/>
<message key="import.ordering" value="Import {0} appears after other imports that it should precede"/>
</module>
<module name="NoWhitespaceBefore">
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment