Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dsk-dsc-flink
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
shezaixing
dsk-dsc-flink
Commits
b573e61b
Commit
b573e61b
authored
May 28, 2024
by
shezaixing
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
过滤ddl,新增sql错误日志写入数据库
parent
4279b65d
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
80 additions
and
9 deletions
+80
-9
SqlErrorLog.java
src/main/java/com/dsk/flink/dsc/common/dto/SqlErrorLog.java
+26
-0
MysqlDataTransferSink.java
.../com/dsk/flink/dsc/common/sink/MysqlDataTransferSink.java
+36
-0
SyncCustomerDataSource.java
...n/java/com/dsk/flink/dsc/sync/SyncCustomerDataSource.java
+18
-9
No files found.
src/main/java/com/dsk/flink/dsc/common/dto/SqlErrorLog.java
0 → 100644
View file @
b573e61b
package
com
.
dsk
.
flink
.
dsc
.
common
.
dto
;
import
lombok.Data
;
import
java.util.Date
;
@Data
public
class
SqlErrorLog
{
private
Long
id
;
private
Date
errorTime
;
private
String
sql
;
private
String
error
;
public
SqlErrorLog
(
Date
errorTime
,
String
sql
,
String
error
)
{
this
.
errorTime
=
errorTime
;
this
.
sql
=
sql
;
this
.
error
=
error
;
}
public
SqlErrorLog
()
{
}
}
src/main/java/com/dsk/flink/dsc/common/sink/MysqlDataTransferSink.java
View file @
b573e61b
package
com
.
dsk
.
flink
.
dsc
.
common
.
sink
;
package
com
.
dsk
.
flink
.
dsc
.
common
.
sink
;
import
cn.hutool.core.date.DateUtil
;
import
cn.hutool.core.date.DateUtil
;
import
cn.hutool.core.lang.Snowflake
;
import
cn.hutool.core.util.IdUtil
;
import
cn.hutool.core.util.RandomUtil
;
import
cn.hutool.db.DbUtil
;
import
com.alibaba.druid.pool.DruidDataSource
;
import
com.alibaba.druid.pool.DruidDataSource
;
import
com.dsk.flink.dsc.common.dto.SqlErrorLog
;
import
com.dsk.flink.dsc.utils.EnvProperties
;
import
com.dsk.flink.dsc.utils.EnvProperties
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.streaming.api.functions.sink.RichSinkFunction
;
import
org.apache.flink.streaming.api.functions.sink.RichSinkFunction
;
...
@@ -10,6 +15,7 @@ import org.slf4j.LoggerFactory;
...
@@ -10,6 +15,7 @@ import org.slf4j.LoggerFactory;
import
java.sql.Connection
;
import
java.sql.Connection
;
import
java.sql.PreparedStatement
;
import
java.sql.PreparedStatement
;
import
java.util.Date
;
import
java.util.concurrent.*
;
import
java.util.concurrent.*
;
public
class
MysqlDataTransferSink
extends
RichSinkFunction
<
String
>
{
public
class
MysqlDataTransferSink
extends
RichSinkFunction
<
String
>
{
...
@@ -70,6 +76,12 @@ public class MysqlDataTransferSink extends RichSinkFunction<String> {
...
@@ -70,6 +76,12 @@ public class MysqlDataTransferSink extends RichSinkFunction<String> {
System
.
out
.
println
(
"sql报错----->"
+
sql
);
System
.
out
.
println
(
"sql报错----->"
+
sql
);
e
.
printStackTrace
();
e
.
printStackTrace
();
logger
.
error
(
"------错误时间:{}-----,sql:{}--------异常:{}"
,
DateUtil
.
now
(),
sql
,
e
.
getMessage
());
logger
.
error
(
"------错误时间:{}-----,sql:{}--------异常:{}"
,
DateUtil
.
now
(),
sql
,
e
.
getMessage
());
SqlErrorLog
errorLog
=
new
SqlErrorLog
(
new
Date
(),
sql
,
e
.
getMessage
());
try
{
writeErrLog
(
errorLog
);
}
catch
(
Exception
re
){
logger
.
error
(
"错误日志保存异常 -> {}"
,
re
.
getMessage
());
}
}
finally
{
}
finally
{
if
(
pt
!=
null
)
{
if
(
pt
!=
null
)
{
pt
.
close
();
pt
.
close
();
...
@@ -79,4 +91,28 @@ public class MysqlDataTransferSink extends RichSinkFunction<String> {
...
@@ -79,4 +91,28 @@ public class MysqlDataTransferSink extends RichSinkFunction<String> {
}
}
}
}
}
}
private
void
writeErrLog
(
SqlErrorLog
errorLog
)
{
writeErrLogDb
(
errorLog
);
}
private
void
writeErrLogDb
(
SqlErrorLog
errorLog
)
{
Snowflake
snowflake
=
IdUtil
.
getSnowflake
(
RandomUtil
.
randomInt
(
31
),
RandomUtil
.
randomInt
(
31
));
String
sql
=
"insert dsc_err_log (id,error_time, error_sql, error_msg) values (?, ?, ?, ?)"
;
Connection
conn
=
null
;
PreparedStatement
pt
=
null
;
try
{
conn
=
dataSource
.
getConnection
();
pt
=
conn
.
prepareStatement
(
sql
);
pt
.
setLong
(
1
,
snowflake
.
nextId
()
+
RandomUtil
.
randomInt
(
10
,
99
));
pt
.
setObject
(
2
,
errorLog
.
getErrorTime
());
pt
.
setString
(
3
,
errorLog
.
getSql
());
pt
.
setString
(
4
,
errorLog
.
getError
());
pt
.
execute
();
}
catch
(
Exception
e
){
logger
.
error
(
"错误日志保存异常 -> {}"
,
e
.
getMessage
());
}
finally
{
DbUtil
.
close
(
conn
,
pt
);
}
}
}
}
src/main/java/com/dsk/flink/dsc/sync/SyncCustomerDataSource.java
View file @
b573e61b
...
@@ -54,19 +54,22 @@ public class SyncCustomerDataSource {
...
@@ -54,19 +54,22 @@ public class SyncCustomerDataSource {
envProps
.
put
(
"providerImpl"
,
JdbcConnectionProviderFactory
.
HikariDataSourceJdbcConnectionProvider
.
class
.
getName
());
envProps
.
put
(
"providerImpl"
,
JdbcConnectionProviderFactory
.
HikariDataSourceJdbcConnectionProvider
.
class
.
getName
());
System
.
out
.
println
(
"读取到的配置文件:-> "
+
envProps
.
toString
());
System
.
out
.
println
(
"读取到的配置文件:-> "
+
envProps
.
toString
());
System
.
out
.
println
(
"读取到的数据连接配置:->"
+
String
.
format
(
envProps
.
getDb_url
(),
envProps
.
getDb_host
(),
envProps
.
getDb_port
(),
envProps
.
getDb_database
()));
System
.
out
.
println
(
"读取到的数据连接配置:->"
+
String
.
format
(
envProps
.
getDb_url
(),
envProps
.
getDb_host
(),
envProps
.
getDb_port
(),
envProps
.
getDb_database
()));
System
.
out
.
println
(
"获取到的kafka消费组:->"
+
EtlUtils
.
getKafkaGroup
(
envProps
));
System
.
out
.
println
(
"读取到的数据库用户名:->"
+
envProps
.
getDb_username
());
System
.
out
.
println
(
"读取到的数据库密码:->"
+
envProps
.
getDb_password
());
System
.
out
.
println
(
"读取到的数据连接配置:->"
+
String
.
format
(
envProps
.
getDb_url
(),
envProps
.
getDb_host
(),
envProps
.
getDb_port
(),
envProps
.
getDb_database
()));
System
.
out
.
println
(
"读取到的数据连接配置:->"
+
String
.
format
(
envProps
.
getDb_url
(),
envProps
.
getDb_host
(),
envProps
.
getDb_port
(),
envProps
.
getDb_database
()));
System
.
out
.
println
(
"获取到的kafka消费组:-> {}"
+
EtlUtils
.
getKafkaGroup
(
envProps
));
System
.
out
.
println
(
"获取到的kafka消费组:-> {}"
+
EtlUtils
.
getKafkaGroup
(
envProps
));
System
.
out
.
println
(
"获取到的kafka消费组:-> {}"
+
envProps
.
getKafka_topic
());
System
.
out
.
println
(
"获取到的kafka消费组:-> {}"
+
envProps
.
getKafka_topic
());
System
.
out
.
println
(
"获取到的kafka消费组:-> {}"
+
envProps
.
getKafka_password
());
System
.
out
.
println
(
"获取到的kafka消费组:-> {}"
+
envProps
.
getKafka_password
());
logger
.
info
(
"获取到的kafka消费组:-> {}"
,
EtlUtils
.
getKafkaGroup
(
envProps
));
//
logger.info("获取到的kafka消费组:-> {}", EtlUtils.getKafkaGroup(envProps));
logger
.
info
(
"读取到的配置文件:-> {}"
,
envProps
.
toString
());
//
logger.info("读取到的配置文件:-> {}", envProps.toString());
logger
.
info
(
"读取到的数据连接配置:-> {}"
,
String
.
format
(
envProps
.
getDb_url
(),
envProps
.
getDb_host
(),
envProps
.
getDb_port
(),
envProps
.
getDb_database
()));
//
logger.info("读取到的数据连接配置:-> {}", String.format(envProps.getDb_url(), envProps.getDb_host(), envProps.getDb_port(), envProps.getDb_database()));
logger
.
info
(
"获取到的kafka消费组:-> {}"
,
EtlUtils
.
getKafkaGroup
(
envProps
));
//
logger.info("获取到的kafka消费组:-> {}", EtlUtils.getKafkaGroup(envProps));
//
logger
.
info
(
"获取到的kafka消费组:-> {}"
,
EtlUtils
.
getKafkaGroup
(
envProps
));
//
logger.info("获取到的kafka消费组:-> {}", EtlUtils.getKafkaGroup(envProps));
logger
.
info
(
"获取到的kafka主题:-> {}"
,
envProps
.
getKafka_topic
());
//
logger.info("获取到的kafka主题:-> {}", envProps.getKafka_topic());
logger
.
info
(
"获取到的kafka用户名:-> {}"
,
envProps
.
getKafka_username
());
//
logger.info("获取到的kafka用户名:-> {}", envProps.getKafka_username());
logger
.
info
(
"获取到的kafka密码:-> {}"
,
envProps
.
getKafka_password
());
//
logger.info("获取到的kafka密码:-> {}", envProps.getKafka_password());
//TODO 到时需要改这里,改成正式的消费组
//TODO 到时需要改这里,改成正式的消费组
FlinkKafkaConsumer
<
String
>
kafkaConsumer
=
new
FlinkKafkaConsumer
<
String
>(
envProps
.
getKafka_topic
(),
new
SimpleStringSchema
(),
EtlUtils
.
getKafkaConfig
(
envProps
.
getKafka_brokers
(),
EtlUtils
.
getKafkaGroup
(
envProps
),
envProps
.
getKafka_username
(),
envProps
.
getKafka_password
()));
FlinkKafkaConsumer
<
String
>
kafkaConsumer
=
new
FlinkKafkaConsumer
<
String
>(
envProps
.
getKafka_topic
(),
new
SimpleStringSchema
(),
EtlUtils
.
getKafkaConfig
(
envProps
.
getKafka_brokers
(),
EtlUtils
.
getKafkaGroup
(
envProps
),
envProps
.
getKafka_username
(),
envProps
.
getKafka_password
()));
//System.out.println(envProps.getKafka_topic());
//System.out.println(envProps.getKafka_topic());
...
@@ -84,6 +87,12 @@ public class SyncCustomerDataSource {
...
@@ -84,6 +87,12 @@ public class SyncCustomerDataSource {
//kafkaSource.print("kafaka stream");
//kafkaSource.print("kafaka stream");
SingleOutputStreamOperator
<
JSONObject
>
canalJsonStream
=
kafkaSource
.
map
(
JSONObject:
:
parseObject
)
SingleOutputStreamOperator
<
JSONObject
>
canalJsonStream
=
kafkaSource
.
map
(
JSONObject:
:
parseObject
)
.
filter
(
new
FilterFunction
<
JSONObject
>()
{
@Override
public
boolean
filter
(
JSONObject
value
)
throws
Exception
{
return
!
value
.
getBoolean
(
"isDdl"
);
}
})
.
name
(
"canalJsonStream"
)
.
name
(
"canalJsonStream"
)
.
uid
(
"canalJsonStream"
);
.
uid
(
"canalJsonStream"
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment