Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dsk-dsc-flink
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
shezaixing
dsk-dsc-flink
Commits
c5470ee1
Commit
c5470ee1
authored
Sep 05, 2024
by
shezaixing
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
可配置逻辑删除
可配置cdc日志记录写入
parent
0153cd84
Changes
5
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
376 additions
and
11 deletions
+376
-11
AsyncMysqlDataTransferFunction.java
...k/dsc/common/function/AsyncMysqlDataTransferFunction.java
+17
-1
AsyncMysqlDataTransferFunctionNew.java
...sc/common/function/AsyncMysqlDataTransferFunctionNew.java
+279
-0
SyncCustomerDataSource.java
...n/java/com/dsk/flink/dsc/sync/SyncCustomerDataSource.java
+59
-9
EnvProperties.java
src/main/java/com/dsk/flink/dsc/utils/EnvProperties.java
+20
-0
EnvPropertiesUtil.java
src/main/java/com/dsk/flink/dsc/utils/EnvPropertiesUtil.java
+1
-1
No files found.
src/main/java/com/dsk/flink/dsc/common/function/AsyncMysqlDataTransferFunction.java
View file @
c5470ee1
package
com
.
dsk
.
flink
.
dsc
.
common
.
function
;
import
cn.hutool.core.collection.CollUtil
;
import
cn.hutool.core.map.MapUtil
;
import
cn.hutool.core.util.StrUtil
;
import
com.alibaba.fastjson.JSONArray
;
import
com.alibaba.fastjson.JSONObject
;
...
...
@@ -80,6 +81,12 @@ public class AsyncMysqlDataTransferFunction extends RichAsyncFunction<JSONObject
}
JSONObject
dataObj
=
dataList
.
getJSONObject
(
0
);
Boolean
logicalDelete
=
MapUtil
.
getBool
(
dbInfoMap
,
"logical_delete"
,
false
);
if
(
logicalDelete
){
mysqlType
.
put
(
"is_del"
,
"int"
);
dataObj
.
put
(
"is_del"
,
"DELETE"
.
equals
(
type
)
?
1
:
0
);
}
if
(
"INSERT"
.
equals
(
type
)){
excueteSql
=
tranferInsertSql
(
table
,
dataObj
,
mysqlType
);
}
...
...
@@ -90,8 +97,17 @@ public class AsyncMysqlDataTransferFunction extends RichAsyncFunction<JSONObject
}
if
(
"DELETE"
.
equals
(
type
)){
excueteSql
=
transferDeleteSql
(
table
,
dataObj
,
mysqlType
,
pkNameSet
);
excueteSql
=
logicalDelete
?
tranferInsertSql
(
table
,
dataObj
,
mysqlType
)
:
transferDeleteSql
(
table
,
dataObj
,
mysqlType
,
pkNameSet
);
}
//处理先后顺序
//获取该条数据的表名和主键作为唯一的groupKey
String
groupKey
=
table
;
for
(
String
pk
:
pkNameSet
)
{
String
pkValue
=
getValueString
(
dataObj
,
pk
,
mysqlType
.
getString
(
pk
));
groupKey
=
table
.
concat
(
"-"
).
concat
(
pkValue
);
}
Long
ts
=
value
.
getLong
(
"ts"
);
resultFuture
.
complete
(
Collections
.
singleton
(
excueteSql
));
}
catch
(
Exception
e
){
...
...
src/main/java/com/dsk/flink/dsc/common/function/AsyncMysqlDataTransferFunctionNew.java
0 → 100644
View file @
c5470ee1
This diff is collapsed.
Click to expand it.
src/main/java/com/dsk/flink/dsc/sync/SyncCustomerDataSource.java
View file @
c5470ee1
package
com
.
dsk
.
flink
.
dsc
.
sync
;
import
cn.hutool.core.collection.CollUtil
;
import
cn.hutool.core.util.StrUtil
;
import
com.alibaba.fastjson.JSONObject
;
import
com.dsk.flink.dsc.common.function.AsyncMysqlDataTransferFunction
;
import
com.dsk.flink.dsc.common.function.AsyncMysqlDataTransferFunction
New
;
import
com.dsk.flink.dsc.common.sink.MysqlDataTransferSink
;
import
com.dsk.flink.dsc.utils.EnvProperties
;
import
com.dsk.flink.dsc.utils.EnvPropertiesUtil
;
...
...
@@ -12,15 +13,26 @@ import lombok.extern.slf4j.Slf4j;
import
org.apache.flink.api.common.functions.FilterFunction
;
import
org.apache.flink.api.common.restartstrategy.RestartStrategies
;
import
org.apache.flink.api.common.serialization.SimpleStringSchema
;
import
org.apache.flink.api.java.tuple.Tuple3
;
import
org.apache.flink.api.java.utils.ParameterTool
;
import
org.apache.flink.streaming.api.CheckpointingMode
;
import
org.apache.flink.streaming.api.datastream.AsyncDataStream
;
import
org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator
;
import
org.apache.flink.streaming.api.environment.CheckpointConfig
;
import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
;
import
org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction
;
import
org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
;
import
org.apache.flink.streaming.api.windowing.time.Time
;
import
org.apache.flink.streaming.api.windowing.windows.TimeWindow
;
import
org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
;
import
org.apache.flink.util.Collector
;
import
java.time.LocalDateTime
;
import
java.time.ZoneId
;
import
java.util.Comparator
;
import
java.util.List
;
import
java.util.concurrent.TimeUnit
;
import
java.util.stream.Collectors
;
/**
* @author shezaixing
...
...
@@ -55,7 +67,9 @@ public class SyncCustomerDataSource {
//TODO 到时需要改这里,改成正式的消费组
FlinkKafkaConsumer
<
String
>
kafkaConsumer
=
new
FlinkKafkaConsumer
<
String
>(
envProps
.
getKafka_topic
(),
new
SimpleStringSchema
(),
EtlUtils
.
getKafkaConfig
(
envProps
.
getKafka_brokers
(),
EtlUtils
.
getKafkaGroup
(
envProps
),
envProps
.
getKafka_username
(),
envProps
.
getKafka_password
()));
//System.out.println(envProps.getKafka_topic());
kafkaConsumer
.
setStartFromEarliest
();
long
defaultOffset
=
LocalDateTime
.
now
().
minusMinutes
(
5
).
atZone
(
ZoneId
.
systemDefault
()).
toInstant
().
toEpochMilli
();
kafkaConsumer
.
setStartFromTimestamp
(
defaultOffset
);
//kafkaConsumer.setStartFromLatest();
//偏移量
if
(
StrUtil
.
isNotBlank
(
offsetTimestamp
))
{
kafkaConsumer
.
setStartFromTimestamp
(
Long
.
parseLong
(
offsetTimestamp
));
...
...
@@ -72,7 +86,8 @@ public class SyncCustomerDataSource {
.
filter
(
new
FilterFunction
<
JSONObject
>()
{
@Override
public
boolean
filter
(
JSONObject
value
)
throws
Exception
{
return
!
value
.
getBoolean
(
"isDdl"
);
return
!
value
.
getBoolean
(
"isDdl"
)
&&
!
"TIDB_WATERMARK"
.
equals
(
value
.
getString
(
"type"
));
}
})
.
name
(
"canalJsonStream"
)
...
...
@@ -80,19 +95,54 @@ public class SyncCustomerDataSource {
//canalJsonStream.print("canal stream");
SingleOutputStreamOperator
<
String
>
sqlResultStream
=
AsyncDataStream
.
orderedWait
(
canalJsonStream
,
new
AsyncMysqlDataTransferFunction
(
envProps
),
1200L
,
TimeUnit
.
SECONDS
,
20
)
.
filter
(
new
FilterFunction
<
String
>()
{
// SingleOutputStreamOperator<String> sqlResultStream = AsyncDataStream.orderedWait(canalJsonStream, new AsyncMysqlDataTransferFunction(envProps), 1200L, TimeUnit.SECONDS, 20)
// .filter(new FilterFunction<String>() {
// @Override
// public boolean filter(String value) throws Exception {
// return StrUtil.isNotBlank(value) && !"err".equals(value);
// }
// })
// .name("sqlResultStream")
// .uid("sqlResultStream");
//
// //sqlResultStream.print("sql result");
//
// sqlResultStream.addSink(new MysqlDataTransferSink(envProps)).name("sqlSinkStream").uid("sqlSinkStream");
SingleOutputStreamOperator
<
Tuple3
<
String
,
String
,
Long
>>
sqlResultStream1
=
AsyncDataStream
.
orderedWait
(
canalJsonStream
,
new
AsyncMysqlDataTransferFunctionNew
(
envProps
),
1200L
,
TimeUnit
.
SECONDS
,
20
)
.
filter
(
new
FilterFunction
<
Tuple3
<
String
,
String
,
Long
>>()
{
@Override
public
boolean
filter
(
String
value
)
throws
Exception
{
return
StrUtil
.
isNotBlank
(
value
)
&&
!
"err"
.
equals
(
value
);
public
boolean
filter
(
Tuple3
<
String
,
String
,
Long
>
value
)
throws
Exception
{
return
StrUtil
.
isNotBlank
(
value
.
f0
)
&&
!
"err"
.
equals
(
value
.
f0
);
}
})
.
name
(
"sqlResultStream"
)
.
uid
(
"sqlResultStream"
);
//sqlResultStream.print("sql result");
SingleOutputStreamOperator
<
String
>
groupWindowSqlResultStream
=
sqlResultStream1
.
keyBy
(
value
->
value
.
f1
)
.
window
(
TumblingProcessingTimeWindows
.
of
(
Time
.
seconds
(
3
)))
.
process
(
new
ProcessWindowFunction
<
Tuple3
<
String
,
String
,
Long
>,
String
,
String
,
TimeWindow
>()
{
@Override
public
void
process
(
String
s
,
ProcessWindowFunction
<
Tuple3
<
String
,
String
,
Long
>,
String
,
String
,
TimeWindow
>.
Context
context
,
Iterable
<
Tuple3
<
String
,
String
,
Long
>>
elements
,
Collector
<
String
>
out
)
throws
Exception
{
List
<
Tuple3
<
String
,
String
,
Long
>>
list
=
CollUtil
.
list
(
false
,
elements
);
if
(
"dsc_cdc_log"
.
equals
(
list
.
get
(
0
).
f1
))
{
list
=
list
.
stream
().
sorted
(
Comparator
.
comparing
(
x
->
x
.
f2
,
Comparator
.
reverseOrder
()
)).
collect
(
Collectors
.
toList
());
list
.
forEach
(
x
->
{
out
.
collect
(
x
.
f0
);});
return
;
}
Tuple3
<
String
,
String
,
Long
>
maxTsElement
=
list
.
stream
().
max
(
Comparator
.
comparing
(
x
->
x
.
f2
)).
get
();
out
.
collect
(
maxTsElement
.
f0
);
}
})
.
name
(
"groupWindowSqlResultStream"
)
.
uid
(
"groupWindowSqlResultStream"
);
groupWindowSqlResultStream
.
print
(
"sql result"
);
s
qlResultStream
.
addSink
(
new
MysqlDataTransferSink
(
envProps
)).
name
(
"sqlSinkStream"
).
uid
(
"sqlSinkStream"
);
groupWindowS
qlResultStream
.
addSink
(
new
MysqlDataTransferSink
(
envProps
)).
name
(
"sqlSinkStream"
).
uid
(
"sqlSinkStream"
);
env
.
execute
();
}
}
src/main/java/com/dsk/flink/dsc/utils/EnvProperties.java
View file @
c5470ee1
...
...
@@ -113,6 +113,26 @@ public class EnvProperties extends Properties {
String
solr_urls
;
String
solr_zk_hosts
;
String
logical_delete
;
String
log_enable
;
public
String
getLog_enable
()
{
return
logical_delete
==
null
?
this
.
getProperty
(
"logical_delete"
)
:
logical_delete
;
}
public
void
setLog_enable
(
String
log_enable
)
{
this
.
log_enable
=
log_enable
;
}
public
String
getLogical_delete
()
{
return
logical_delete
==
null
?
this
.
getProperty
(
"logical_delete"
)
:
logical_delete
;
}
public
void
setLogical_delete
(
String
logical_delete
)
{
this
.
logical_delete
=
logical_delete
;
}
public
String
getEnv
()
{
return
env
==
null
?
this
.
getProperty
(
"env"
)
:
env
;
}
...
...
src/main/java/com/dsk/flink/dsc/utils/EnvPropertiesUtil.java
View file @
c5470ee1
...
...
@@ -145,7 +145,7 @@ public class EnvPropertiesUtil {
EnvProperties
envProperties
=
new
EnvProperties
();
if
(
StrUtil
.
isBlank
(
filePath
)){
filePath
=
System
.
getProperties
().
getProperty
(
"os.name"
).
contains
(
"Windows"
)
?
"D:\\Env\\application
_pro
.properties"
:
"/home/module/flink-job/application.properties"
;
filePath
=
System
.
getProperties
().
getProperty
(
"os.name"
).
contains
(
"Windows"
)
?
"D:\\Env\\application.properties"
:
"/home/module/flink-job/application.properties"
;
}
File
file
=
new
File
(
filePath
);
if
(!
file
.
exists
())
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment