Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dsk-dsc-flink
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
shezaixing
dsk-dsc-flink
Commits
2b8ea804
Commit
2b8ea804
authored
Jan 08, 2025
by
liaowenwu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
优化代码
parent
01f8cec1
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
211 additions
and
63 deletions
+211
-63
MysqlDataTransferFunction.java
.../flink/dsc/common/function/MysqlDataTransferFunction.java
+158
-0
MysqlDataTransferSink.java
.../com/dsk/flink/dsc/common/sink/MysqlDataTransferSink.java
+7
-6
SyncCustomerDataSource.java
...n/java/com/dsk/flink/dsc/sync/SyncCustomerDataSource.java
+46
-57
No files found.
src/main/java/com/dsk/flink/dsc/common/function/MysqlDataTransferFunction.java
0 → 100644
View file @
2b8ea804
package
com
.
dsk
.
flink
.
dsc
.
common
.
function
;
import
cn.hutool.core.collection.CollUtil
;
import
cn.hutool.core.map.MapUtil
;
import
cn.hutool.core.util.StrUtil
;
import
com.alibaba.fastjson.JSONArray
;
import
com.alibaba.fastjson.JSONObject
;
import
com.dsk.flink.dsc.utils.EnvProperties
;
import
org.apache.flink.api.java.tuple.Tuple3
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.streaming.api.functions.ProcessFunction
;
import
org.apache.flink.streaming.api.functions.async.ResultFuture
;
import
org.apache.flink.streaming.api.functions.async.RichAsyncFunction
;
import
org.apache.flink.util.Collector
;
import
org.apache.flink.util.OutputTag
;
import
java.text.SimpleDateFormat
;
import
java.util.*
;
import
java.util.concurrent.ExecutorService
;
import
java.util.concurrent.LinkedBlockingDeque
;
import
java.util.concurrent.ThreadPoolExecutor
;
import
java.util.concurrent.TimeUnit
;
public
class
MysqlDataTransferFunction
extends
ProcessFunction
<
JSONObject
,
Tuple3
<
String
,
String
,
Long
>>
{
//数据库连接信息
private
final
EnvProperties
dbInfoMap
;
private
OutputTag
<
Tuple3
<
String
,
String
,
Long
>>
toSlideTag
;
public
MysqlDataTransferFunction
(
EnvProperties
dbInfoMap
,
OutputTag
<
Tuple3
<
String
,
String
,
Long
>>
toSlideTag
)
{
this
.
dbInfoMap
=
dbInfoMap
;
this
.
toSlideTag
=
toSlideTag
;
}
private
static
String
logSqlFormat
=
"INSERT INTO dsc_cdc_log (`table`,op_type,pk_columns,pk_values,data_json,cdc_ts) values ('%s','%s','%s','%s','%s', %d)"
;
private
String
buildLogData
(
String
type
,
String
table
,
Set
<
String
>
pkNameSet
,
JSONObject
dataObj
,
long
ts
,
String
dataJsonStr
)
{
List
<
String
>
pkValueList
=
new
ArrayList
<>();
for
(
String
pk
:
pkNameSet
)
{
pkValueList
.
add
(
dataObj
.
getString
(
pk
));
}
String
pkColumns
=
String
.
join
(
","
,
pkNameSet
);
String
pkValues
=
String
.
join
(
"-"
,
pkValueList
);
dataJsonStr
=
dataJsonStr
.
replace
(
"\\"
,
"\\\\"
);
return
String
.
format
(
logSqlFormat
,
table
,
type
,
pkColumns
,
pkValues
,
dataJsonStr
,
ts
);
}
private
static
final
String
[]
STR_SQL_TYPE
=
new
String
[]{
"VARCHAR"
,
"CHAR"
,
"TINYBLOB"
,
"BLOB"
,
"MEDIUMBLOB"
,
"LONGBLOB"
,
"TINYTEXT"
,
"TEXT"
,
"MEDIUMTEXT"
,
"LONGTEXT"
,
"TIME"
,
"TIMESTAMP"
,
"JSON"
,
"json"
};
private
static
final
String
[]
KEYWORD
=
new
String
[]{
"limit"
};
private
static
String
tranferInsertSql
(
String
table
,
JSONObject
dataObj
,
JSONObject
mysqlType
)
{
Set
<
String
>
columnSet
=
mysqlType
.
keySet
();
StringBuilder
sb
=
new
StringBuilder
();
for
(
String
s
:
columnSet
)
{
sb
.
append
(
"`"
+
s
+
"`"
).
append
(
","
);
}
List
<
String
>
valueList
=
new
ArrayList
<>();
for
(
String
col
:
columnSet
)
{
if
(
Arrays
.
asList
(
KEYWORD
).
contains
(
col
)){
valueList
.
add
(
getValueString
(
dataObj
,
col
,
mysqlType
.
getString
(
col
)));
}
else
{
valueList
.
add
(
getValueString
(
dataObj
,
col
,
mysqlType
.
getString
(
col
)));
}
}
sb
.
setLength
(
sb
.
length
()-
1
);
String
columnString
=
sb
.
toString
();
String
valueString
=
String
.
join
(
","
,
valueList
);
//return String.format("INSERT INTO %s (%s) values (%s) ON DUPLICATE KEY UPDATE %s;",table,columnString,valueString,updateString);
return
String
.
format
(
"REPLACE INTO %s (%s) values (%s);"
,
table
,
columnString
,
valueString
);
}
private
String
transferDeleteSql
(
String
table
,
JSONObject
dataObj
,
JSONObject
mysqlType
,
Set
<
String
>
pkNameSet
)
{
List
<
String
>
whereList
=
new
ArrayList
<>();
for
(
String
pk
:
pkNameSet
)
{
String
whereString
=
pk
.
concat
(
" = "
).
concat
(
getValueString
(
dataObj
,
pk
,
mysqlType
.
getString
(
pk
)));
whereList
.
add
(
whereString
);
}
String
whereString
=
String
.
join
(
" and "
,
whereList
);
return
String
.
format
(
"DELETE FROM %s WHERE %s"
,
table
,
whereString
);
}
/**
* @author shezaixing
* @date 2023/12/7 14:23
* @description 判断拼接字符串时类型(是否需要加上引号)
*
*/
private
static
String
getValueString
(
JSONObject
dataObj
,
String
columnKey
,
String
mysqlType
){
if
(
null
==
dataObj
.
get
(
columnKey
)){
return
"null"
;
}
//需要处理成字符串加引号的类型
if
(
Arrays
.
asList
(
STR_SQL_TYPE
).
contains
(
mysqlType
.
toUpperCase
())){
return
String
.
format
(
"'%s'"
,
dataObj
.
getString
(
columnKey
).
replace
(
"\\"
,
"\\\\"
).
replace
(
"'"
,
"\\'"
)
);
}
//时间字段处理
if
(
"DATE"
.
equalsIgnoreCase
(
mysqlType
)
||
"DATETIME"
.
equalsIgnoreCase
(
mysqlType
)){
SimpleDateFormat
df
=
"DATETIME"
.
equalsIgnoreCase
(
mysqlType
)
?
new
SimpleDateFormat
(
"yyyy-MM-dd HH:mm:ss"
)
:
new
SimpleDateFormat
(
"yyyy-MM-dd"
);
return
String
.
format
(
"\"%s\""
,
df
.
format
(
dataObj
.
getDate
(
columnKey
)));
}
return
dataObj
.
getString
(
columnKey
);
}
@Override
public
void
processElement
(
JSONObject
value
,
Context
ctx
,
Collector
<
Tuple3
<
String
,
String
,
Long
>>
out
)
throws
Exception
{
//返回数据集合
String
type
=
value
.
getString
(
"type"
);
JSONArray
dataList
=
value
.
getJSONArray
(
"data"
);
JSONObject
mysqlType
=
value
.
getJSONObject
(
"mysqlType"
);
String
table
=
value
.
getString
(
"table"
);
JSONArray
pkNames
=
value
.
getJSONArray
(
"pkNames"
);
Set
<
String
>
pkNameSet
=
new
HashSet
<>();
long
ts
=
value
.
getLong
(
"ts"
)
==
null
?
System
.
currentTimeMillis
()
:
value
.
getLong
(
"ts"
);
if
(
CollUtil
.
isNotEmpty
(
pkNames
)){
pkNames
.
forEach
(
name
->
pkNameSet
.
add
(
String
.
valueOf
(
name
)));
}
String
excueteSql
=
""
;
if
(
value
.
getBoolean
(
"isDdl"
)){
return
;
}
JSONObject
dataObj
=
dataList
.
getJSONObject
(
0
);
Boolean
logicalDelete
=
MapUtil
.
getBool
(
dbInfoMap
,
"logical_delete"
,
false
);
if
(
logicalDelete
){
mysqlType
.
put
(
"is_del"
,
"int"
);
dataObj
.
put
(
"is_del"
,
"DELETE"
.
equals
(
type
)
?
1
:
0
);
}
//处理先后顺序
//获取该条数据的表名和主键作为唯一的groupKey
String
groupKey
=
table
;
for
(
String
pk
:
pkNameSet
)
{
String
pkValue
=
getValueString
(
dataObj
,
pk
,
mysqlType
.
getString
(
pk
));
groupKey
=
table
.
concat
(
"-"
).
concat
(
pkValue
);
}
if
(
"INSERT"
.
equals
(
type
)
||
"UPDATE"
.
equals
(
type
)){
excueteSql
=
tranferInsertSql
(
table
,
dataObj
,
mysqlType
);
}
else
{
excueteSql
=
logicalDelete
?
tranferInsertSql
(
table
,
dataObj
,
mysqlType
)
:
transferDeleteSql
(
table
,
dataObj
,
mysqlType
,
pkNameSet
);
}
out
.
collect
(
Tuple3
.
of
(
excueteSql
,
groupKey
,
ts
));
if
(
MapUtil
.
getBool
(
dbInfoMap
,
"log_enable"
,
false
)){
String
logSql
=
buildLogData
(
type
,
table
,
pkNameSet
,
dataObj
,
ts
,
value
.
toJSONString
());
ctx
.
output
(
toSlideTag
,
Tuple3
.
of
(
logSql
,
"dsc_cdc_log"
,
ts
));
}
}
}
src/main/java/com/dsk/flink/dsc/common/sink/MysqlDataTransferSink.java
View file @
2b8ea804
package
com
.
dsk
.
flink
.
dsc
.
common
.
sink
;
package
com
.
dsk
.
flink
.
dsc
.
common
.
sink
;
import
cn.hutool.core.date.DateUtil
;
import
cn.hutool.core.lang.Snowflake
;
import
cn.hutool.core.lang.Snowflake
;
import
cn.hutool.core.util.IdUtil
;
import
cn.hutool.core.util.IdUtil
;
import
cn.hutool.core.util.RandomUtil
;
import
cn.hutool.core.util.RandomUtil
;
import
cn.hutool.db.DbUtil
;
import
cn.hutool.db.DbUtil
;
import
cn.hutool.db.sql.SqlExecutor
;
import
com.alibaba.druid.pool.DruidDataSource
;
import
com.alibaba.druid.pool.DruidDataSource
;
import
com.dsk.flink.dsc.common.dto.SqlErrorLog
;
import
com.dsk.flink.dsc.common.dto.SqlErrorLog
;
import
com.dsk.flink.dsc.utils.EnvProperties
;
import
com.dsk.flink.dsc.utils.EnvProperties
;
...
@@ -16,7 +16,10 @@ import org.slf4j.LoggerFactory;
...
@@ -16,7 +16,10 @@ import org.slf4j.LoggerFactory;
import
java.sql.Connection
;
import
java.sql.Connection
;
import
java.sql.PreparedStatement
;
import
java.sql.PreparedStatement
;
import
java.util.Date
;
import
java.util.Date
;
import
java.util.concurrent.*
;
import
java.util.concurrent.ExecutorService
;
import
java.util.concurrent.LinkedBlockingDeque
;
import
java.util.concurrent.ThreadPoolExecutor
;
import
java.util.concurrent.TimeUnit
;
public
class
MysqlDataTransferSink
extends
RichSinkFunction
<
String
>
{
public
class
MysqlDataTransferSink
extends
RichSinkFunction
<
String
>
{
...
@@ -62,11 +65,9 @@ public class MysqlDataTransferSink extends RichSinkFunction<String> {
...
@@ -62,11 +65,9 @@ public class MysqlDataTransferSink extends RichSinkFunction<String> {
private
void
executeSql
(
String
sql
){
private
void
executeSql
(
String
sql
){
Connection
connection
=
null
;
Connection
connection
=
null
;
PreparedStatement
pt
=
null
;
try
{
try
{
connection
=
dataSource
.
getConnection
();
connection
=
dataSource
.
getConnection
();
pt
=
connection
.
prepareStatement
(
sql
);
SqlExecutor
.
execute
(
connection
,
sql
);
pt
.
execute
();
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
//logger.error("------错误时间:{}-----,sql:{}--------异常:{}", DateUtil.now(),sql,e.getMessage());
//logger.error("------错误时间:{}-----,sql:{}--------异常:{}", DateUtil.now(),sql,e.getMessage());
logger
.
error
(
"异常信息:"
,
e
);
logger
.
error
(
"异常信息:"
,
e
);
...
@@ -77,7 +78,7 @@ public class MysqlDataTransferSink extends RichSinkFunction<String> {
...
@@ -77,7 +78,7 @@ public class MysqlDataTransferSink extends RichSinkFunction<String> {
logger
.
error
(
"错误日志保存异常 -> {}"
,
re
.
getMessage
());
logger
.
error
(
"错误日志保存异常 -> {}"
,
re
.
getMessage
());
}
}
}
finally
{
}
finally
{
DbUtil
.
close
(
pt
,
connection
);
DbUtil
.
close
(
connection
);
}
}
}
}
...
...
src/main/java/com/dsk/flink/dsc/sync/SyncCustomerDataSource.java
View file @
2b8ea804
...
@@ -3,14 +3,13 @@ package com.dsk.flink.dsc.sync;
...
@@ -3,14 +3,13 @@ package com.dsk.flink.dsc.sync;
import
cn.hutool.core.collection.CollUtil
;
import
cn.hutool.core.collection.CollUtil
;
import
cn.hutool.core.util.StrUtil
;
import
cn.hutool.core.util.StrUtil
;
import
com.alibaba.fastjson.JSONObject
;
import
com.alibaba.fastjson.JSONObject
;
import
com.dsk.flink.dsc.common.function.AsyncMysqlDataTransferFunctionNew
;
import
com.dsk.flink.dsc.common.function.CanalMapToTsGroupFunction
;
import
com.dsk.flink.dsc.common.function.CanalMapToTsGroupFunction
;
import
com.dsk.flink.dsc.common.function.GroupTsProcessWindowFunction
;
import
com.dsk.flink.dsc.common.function.GroupTsProcessWindowFunction
;
import
com.dsk.flink.dsc.common.function.MysqlDataTransferFunction
;
import
com.dsk.flink.dsc.common.sink.MysqlDataTransferSink
;
import
com.dsk.flink.dsc.common.sink.MysqlDataTransferSink
;
import
com.dsk.flink.dsc.utils.EnvProperties
;
import
com.dsk.flink.dsc.utils.EnvProperties
;
import
com.dsk.flink.dsc.utils.EnvPropertiesUtil
;
import
com.dsk.flink.dsc.utils.EnvPropertiesUtil
;
import
com.dsk.flink.dsc.utils.EtlUtils
;
import
com.dsk.flink.dsc.utils.EtlUtils
;
import
io.tidb.bigdata.tidb.JdbcConnectionProviderFactory
;
import
lombok.extern.slf4j.Slf4j
;
import
lombok.extern.slf4j.Slf4j
;
import
org.apache.flink.api.common.functions.FilterFunction
;
import
org.apache.flink.api.common.functions.FilterFunction
;
import
org.apache.flink.api.common.restartstrategy.RestartStrategies
;
import
org.apache.flink.api.common.restartstrategy.RestartStrategies
;
...
@@ -18,7 +17,7 @@ import org.apache.flink.api.common.serialization.SimpleStringSchema;
...
@@ -18,7 +17,7 @@ import org.apache.flink.api.common.serialization.SimpleStringSchema;
import
org.apache.flink.api.java.tuple.Tuple3
;
import
org.apache.flink.api.java.tuple.Tuple3
;
import
org.apache.flink.api.java.utils.ParameterTool
;
import
org.apache.flink.api.java.utils.ParameterTool
;
import
org.apache.flink.streaming.api.CheckpointingMode
;
import
org.apache.flink.streaming.api.CheckpointingMode
;
import
org.apache.flink.streaming.api.datastream.
Async
DataStream
;
import
org.apache.flink.streaming.api.datastream.DataStream
;
import
org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator
;
import
org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator
;
import
org.apache.flink.streaming.api.environment.CheckpointConfig
;
import
org.apache.flink.streaming.api.environment.CheckpointConfig
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
...
@@ -28,19 +27,18 @@ import org.apache.flink.streaming.api.windowing.time.Time;
...
@@ -28,19 +27,18 @@ import org.apache.flink.streaming.api.windowing.time.Time;
import
org.apache.flink.streaming.api.windowing.windows.TimeWindow
;
import
org.apache.flink.streaming.api.windowing.windows.TimeWindow
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
;
import
org.apache.flink.util.Collector
;
import
org.apache.flink.util.Collector
;
import
org.apache.flink.util.OutputTag
;
import
java.time.LocalDateTime
;
import
java.time.LocalDateTime
;
import
java.time.ZoneId
;
import
java.time.ZoneId
;
import
java.util.Comparator
;
import
java.util.Comparator
;
import
java.util.List
;
import
java.util.List
;
import
java.util.concurrent.TimeUnit
;
import
java.util.stream.Collectors
;
/**
/**
* @author shezaixing
* @author shezaixing
* @date 2023/12/5 14:44
* @date 2023/12/5 14:44
* @description 同步至客户目标数据源任务
* @description 同步至客户目标数据源任务
*
*
update by lww
*/
*/
@Slf4j
@Slf4j
public
class
SyncCustomerDataSource
{
public
class
SyncCustomerDataSource
{
...
@@ -48,15 +46,15 @@ public class SyncCustomerDataSource {
...
@@ -48,15 +46,15 @@ public class SyncCustomerDataSource {
public
static
void
main
(
String
[]
args
)
throws
Exception
{
public
static
void
main
(
String
[]
args
)
throws
Exception
{
StreamExecutionEnvironment
env
=
StreamExecutionEnvironment
.
getExecutionEnvironment
();
StreamExecutionEnvironment
env
=
StreamExecutionEnvironment
.
getExecutionEnvironment
();
//env.setParallelism(
3
);
//env.setParallelism(
1
);
env
.
setRestartStrategy
(
RestartStrategies
.
fixedDelayRestart
(
50
,
3
0000
));
env
.
setRestartStrategy
(
RestartStrategies
.
fixedDelayRestart
(
100
,
6
0000
));
env
.
enableCheckpointing
(
30
0000
);
env
.
enableCheckpointing
(
18
0000
);
env
.
getCheckpointConfig
().
setCheckpointingMode
(
CheckpointingMode
.
EXACTLY_ONCE
);
env
.
getCheckpointConfig
().
setCheckpointingMode
(
CheckpointingMode
.
EXACTLY_ONCE
);
env
.
getCheckpointConfig
().
setMinPauseBetweenCheckpoints
(
30
0000
);
env
.
getCheckpointConfig
().
setMinPauseBetweenCheckpoints
(
18
0000
);
env
.
getCheckpointConfig
().
setCheckpointTimeout
(
72
00000
);
env
.
getCheckpointConfig
().
setCheckpointTimeout
(
3
00000
);
env
.
getCheckpointConfig
().
setMaxConcurrentCheckpoints
(
1
);
env
.
getCheckpointConfig
().
setMaxConcurrentCheckpoints
(
1
);
env
.
getCheckpointConfig
().
setExternalizedCheckpointCleanup
(
CheckpointConfig
.
ExternalizedCheckpointCleanup
.
RETAIN_ON_CANCELLATION
);
env
.
getCheckpointConfig
().
setExternalizedCheckpointCleanup
(
CheckpointConfig
.
ExternalizedCheckpointCleanup
.
RETAIN_ON_CANCELLATION
);
env
.
getCheckpointConfig
().
setTolerableCheckpointFailureNumber
(
5
0
);
env
.
getCheckpointConfig
().
setTolerableCheckpointFailureNumber
(
10
0
);
//获取用户自己的配置信息
//获取用户自己的配置信息
ParameterTool
parameterTool
=
ParameterTool
.
fromArgs
(
args
);
ParameterTool
parameterTool
=
ParameterTool
.
fromArgs
(
args
);
...
@@ -77,68 +75,59 @@ public class SyncCustomerDataSource {
...
@@ -77,68 +75,59 @@ public class SyncCustomerDataSource {
kafkaConsumer
.
setStartFromTimestamp
(
Long
.
parseLong
(
offsetTimestamp
));
kafkaConsumer
.
setStartFromTimestamp
(
Long
.
parseLong
(
offsetTimestamp
));
}
}
SingleOutputStreamOperator
<
String
>
kafkaSource
=
env
.
addSource
(
kafkaConsumer
)
SingleOutputStreamOperator
<
Tuple3
<
JSONObject
,
String
,
Long
>>
tsGroupStream
=
env
.
addSource
(
kafkaConsumer
)
.
setParallelism
(
1
)
.
map
(
JSONObject:
:
parseObject
)
.
name
(
"kafka-source"
)
.
uid
(
"kafka-source"
);
//kafkaSource.print("kafaka stream");
SingleOutputStreamOperator
<
JSONObject
>
canalJsonStream
=
kafkaSource
.
map
(
JSONObject:
:
parseObject
)
.
filter
(
new
FilterFunction
<
JSONObject
>()
{
.
filter
(
new
FilterFunction
<
JSONObject
>()
{
@Override
@Override
public
boolean
filter
(
JSONObject
value
)
throws
Exception
{
public
boolean
filter
(
JSONObject
value
)
throws
Exception
{
return
!
value
.
getBoolean
(
"isDdl"
)
&&
!
"TIDB_WATERMARK"
.
equals
(
value
.
getString
(
"type"
));
return
!
value
.
getBoolean
(
"isDdl"
)
&&
!
"TIDB_WATERMARK"
.
equals
(
value
.
getString
(
"type"
));
}
}
})
})
.
name
(
"canalJsonStream"
)
.
name
(
"dsc-source"
)
.
uid
(
"canalJsonStream"
);
.
uid
(
"dsc-source"
)
.
map
(
new
CanalMapToTsGroupFunction
())
//canalJsonStream.print("canal stream");
.
name
(
"dsc-groupKey"
)
.
uid
(
"dsc-groupKey"
);
SingleOutputStreamOperator
<
Tuple3
<
JSONObject
,
String
,
Long
>>
tsGroupStream
=
canalJsonStream
.
map
(
new
CanalMapToTsGroupFunction
());
SingleOutputStreamOperator
<
JSONObject
>
process
=
tsGroupStream
.
keyBy
(
x
->
x
.
f1
)
SingleOutputStreamOperator
<
JSONObject
>
process
=
tsGroupStream
.
keyBy
(
x
->
x
.
f1
)
.
window
(
TumblingProcessingTimeWindows
.
of
(
Time
.
milliseconds
(
100
)))
.
window
(
TumblingProcessingTimeWindows
.
of
(
Time
.
milliseconds
(
50
)))
.
process
(
new
GroupTsProcessWindowFunction
());
.
process
(
new
GroupTsProcessWindowFunction
())
.
uid
(
"dsc-w1"
)
SingleOutputStreamOperator
<
Tuple3
<
String
,
String
,
Long
>>
sqlResultStream1
=
AsyncDataStream
.
orderedWait
(
process
,
.
name
(
"dsc-w1"
);
new
AsyncMysqlDataTransferFunctionNew
(
envProps
),
120L
,
TimeUnit
.
SECONDS
)
.
filter
(
new
FilterFunction
<
Tuple3
<
String
,
String
,
Long
>>()
{
OutputTag
<
Tuple3
<
String
,
String
,
Long
>>
cdcLogTag
=
new
OutputTag
<
Tuple3
<
String
,
String
,
Long
>>(
"dsc_cdc_log"
)
{};
@Override
SingleOutputStreamOperator
<
Tuple3
<
String
,
String
,
Long
>>
slide
=
process
public
boolean
filter
(
Tuple3
<
String
,
String
,
Long
>
value
)
throws
Exception
{
.
process
(
new
MysqlDataTransferFunction
(
envProps
,
cdcLogTag
))
return
StrUtil
.
isNotBlank
(
value
.
f0
)
&&
!
"err"
.
equals
(
value
.
f0
);
.
name
(
"dsc-sql"
)
}
.
uid
(
"dsc-sql"
);
})
.
name
(
"sqlResultStream"
)
SingleOutputStreamOperator
<
String
>
groupWindowSqlResultStream
=
slide
.
uid
(
"sqlResultStream"
);
.
keyBy
(
value
->
value
.
f1
)
.
window
(
TumblingProcessingTimeWindows
.
of
(
Time
.
milliseconds
(
50
)))
//sqlResultStream1.print("async sql==>");
SingleOutputStreamOperator
<
String
>
groupWindowSqlResultStream
=
sqlResultStream1
.
keyBy
(
value
->
value
.
f1
)
.
window
(
TumblingProcessingTimeWindows
.
of
(
Time
.
milliseconds
(
100
)))
.
process
(
new
ProcessWindowFunction
<
Tuple3
<
String
,
String
,
Long
>,
String
,
String
,
TimeWindow
>()
{
.
process
(
new
ProcessWindowFunction
<
Tuple3
<
String
,
String
,
Long
>,
String
,
String
,
TimeWindow
>()
{
@Override
@Override
public
void
process
(
String
s
,
ProcessWindowFunction
<
Tuple3
<
String
,
String
,
Long
>,
String
,
String
,
public
void
process
(
String
s
,
ProcessWindowFunction
<
Tuple3
<
String
,
String
,
Long
>,
String
,
String
,
TimeWindow
>.
Context
context
,
Iterable
<
Tuple3
<
String
,
String
,
Long
>>
elements
,
TimeWindow
>.
Context
context
,
Iterable
<
Tuple3
<
String
,
String
,
Long
>>
elements
,
Collector
<
String
>
out
)
throws
Exception
{
Collector
<
String
>
out
)
throws
Exception
{
List
<
Tuple3
<
String
,
String
,
Long
>>
list
=
CollUtil
.
list
(
false
,
elements
);
List
<
Tuple3
<
String
,
String
,
Long
>>
list
=
CollUtil
.
list
(
false
,
elements
);
if
(
"dsc_cdc_log"
.
equals
(
list
.
get
(
0
).
f1
))
{
if
(
CollUtil
.
isNotEmpty
(
list
))
{
list
=
list
.
stream
().
sorted
(
Comparator
.
comparing
(
x
->
x
.
f2
,
Comparator
.
reverseOrder
()
)).
collect
(
Collectors
.
toList
());
Tuple3
<
String
,
String
,
Long
>
maxTsElement
=
list
.
forEach
(
x
->
{
out
.
collect
(
x
.
f0
);}
);
list
.
stream
().
max
(
Comparator
.
comparing
(
x
->
x
.
f2
)).
get
(
);
return
;
out
.
collect
(
maxTsElement
.
f0
)
;
}
}
Tuple3
<
String
,
String
,
Long
>
maxTsElement
=
list
.
stream
().
max
(
Comparator
.
comparing
(
x
->
x
.
f2
)).
get
();
out
.
collect
(
maxTsElement
.
f0
);
//list.forEach(x -> {out.collect(x.f0);});
}
}
})
})
.
name
(
"groupWindowSqlResultStream"
)
.
name
(
"dsc-max"
)
.
uid
(
"groupWindowSqlResultStream"
);
.
uid
(
"dsc-max"
);
//groupWindowSqlResultStream.print("sql result==>");
groupWindowSqlResultStream
.
addSink
(
new
MysqlDataTransferSink
(
envProps
))
.
name
(
"dsc-sink"
)
.
uid
(
"dsc-sink"
);
slide
.
getSideOutput
(
cdcLogTag
).
map
(
x
->
x
.
f0
).
addSink
(
new
MysqlDataTransferSink
(
envProps
))
.
name
(
"dsc-cdc-log"
)
.
uid
(
"dsc-cdc-log"
);
groupWindowSqlResultStream
.
addSink
(
new
MysqlDataTransferSink
(
envProps
)).
name
(
"sqlSinkStream"
).
uid
(
"sqlSinkStream"
);
env
.
execute
(
"dsc-client"
);
env
.
execute
();
}
}
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment