Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
eagleEye
/
eagleEye-flink_kafka
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
19f3245f
authored
Jun 24, 2025
by
魏建枢
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
代码提交增加事件错误事件
parent
25c910d7
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
410 additions
and
6 deletions
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/EventLogAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/VectorAngleCalculationAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/CollectLogProcessFunction.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/PointRecordJoinProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorAngleProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/util/TimeConvertUtil.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/Properties.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/EventLogAchi.java
View file @
19f3245f
This diff is collapsed.
Click to expand it.
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/VectorAngleCalculationAchi.java
View file @
19f3245f
This diff is collapsed.
Click to expand it.
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/CollectLogProcessFunction.java
0 → 100644
View file @
19f3245f
package
com
.
flink
.
processor
.
function
;
import
java.time.Duration
;
import
org.apache.flink.api.common.state.StateTtlConfig
;
import
org.apache.flink.api.common.state.ValueState
;
import
org.apache.flink.api.common.state.ValueStateDescriptor
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.streaming.api.functions.KeyedProcessFunction
;
import
org.apache.flink.util.Collector
;
import
com.flink.vo.CollectLogToJsonSource
;
/**
* @author wjs
* @version 创建时间:2025-6-23 13:50:36
* 类说明
*/
public
class
CollectLogProcessFunction
extends
KeyedProcessFunction
<
String
,
CollectLogToJsonSource
,
CollectLogToJsonSource
>{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
ValueState
<
CollectLogToJsonSource
>
latestUserState
;
@Override
public
void
open
(
Configuration
parameters
)
{
// 初始化用户状态
ValueStateDescriptor
<
CollectLogToJsonSource
>
descriptor
=
new
ValueStateDescriptor
<>(
"collectLog-state"
,
CollectLogToJsonSource
.
class
);
StateTtlConfig
ttlConfig
=
StateTtlConfig
.
newBuilder
(
Duration
.
ofMinutes
(
30
))
.
setUpdateType
(
StateTtlConfig
.
UpdateType
.
OnCreateAndWrite
)
.
cleanupIncrementally
(
1000
,
true
)
//Heap StateBackend 使用增量清理
// .cleanupInRocksdbCompactFilter(1000) //RocksDB StateBackend 使用压缩清理
.
build
();
descriptor
.
enableTimeToLive
(
ttlConfig
);
latestUserState
=
getRuntimeContext
().
getState
(
descriptor
);
}
@Override
public
void
processElement
(
CollectLogToJsonSource
collectLog
,
Context
ctx
,
Collector
<
CollectLogToJsonSource
>
out
)
throws
Exception
{
CollectLogToJsonSource
currentLatest
=
latestUserState
.
value
();
if
(
currentLatest
==
null
||
collectLog
.
getCollectTime
()
>
currentLatest
.
getCollectTime
())
{
latestUserState
.
update
(
collectLog
);
out
.
collect
(
collectLog
);
}
}
}
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/PointRecordJoinProcessor.java
0 → 100644
View file @
19f3245f
package
com
.
flink
.
processor
.
function
;
import
java.util.Collections
;
import
java.util.Iterator
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Optional
;
import
org.apache.flink.api.common.state.MapState
;
import
org.apache.flink.api.common.state.MapStateDescriptor
;
import
org.apache.flink.api.common.state.ValueState
;
import
org.apache.flink.api.common.state.ValueStateDescriptor
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.streaming.api.functions.co.CoProcessFunction
;
import
org.apache.flink.util.Collector
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
com.flink.achieve.doris.VectorAngleCalculationAchi.PointRecord
;
import
com.flink.vo.CollectLogToJsonSource
;
import
com.flink.vo.EventList
;
import
com.flink.vo.EventLogToJsonSource
;
import
com.flink.vo.Properties
;
/**
* @author wjs
* @version 创建时间:2025-6-23 14:05:21
* 类说明
*/
public
class
PointRecordJoinProcessor
extends
CoProcessFunction
<
EventLogToJsonSource
,
CollectLogToJsonSource
,
PointRecord
>{
private
static
final
Logger
logger
=
LoggerFactory
.
getLogger
(
PointRecordJoinProcessor
.
class
);
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
transient
ValueState
<
CollectLogToJsonSource
>
userState
;
private
transient
MapState
<
Long
,
EventLogToJsonSource
>
pendingEventLog
;
@Override
public
void
open
(
Configuration
parameters
)
{
userState
=
getRuntimeContext
().
getState
(
new
ValueStateDescriptor
<>(
"collectLog-state"
,
CollectLogToJsonSource
.
class
));
pendingEventLog
=
getRuntimeContext
().
getMapState
(
new
MapStateDescriptor
<>(
"pendingEventLog"
,
Long
.
class
,
EventLogToJsonSource
.
class
));
}
@Override
public
void
processElement1
(
EventLogToJsonSource
eventLog
,
Context
ctx
,
Collector
<
PointRecord
>
out
)
throws
Exception
{
try
{
CollectLogToJsonSource
collectLog
=
userState
.
value
();
if
(
collectLog
!=
null
)
{
processEventWithCollectLog
(
eventLog
,
collectLog
,
out
);
// 直接处理
}
else
{
// 存储并设置超时计时器
pendingEventLog
.
put
(
ctx
.
timestamp
(),
eventLog
);
ctx
.
timerService
().
registerEventTimeTimer
(
ctx
.
timestamp
()
+
60000
);
}
}
catch
(
Exception
e
)
{
logger
.
error
(
"ProcessElement1 error: {}"
,
e
.
getMessage
(),
e
);
}}
@Override
public
void
processElement2
(
CollectLogToJsonSource
collectLog
,
Context
ctx
,
Collector
<
PointRecord
>
out
)
throws
Exception
{
try
{
userState
.
update
(
collectLog
);
// 更新最新状态
// 主动处理滞留事件(解决数据滞留问题)
Iterator
<
Map
.
Entry
<
Long
,
EventLogToJsonSource
>>
it
=
pendingEventLog
.
iterator
();
while
(
it
.
hasNext
())
{
EventLogToJsonSource
pendingEvent
=
it
.
next
().
getValue
();
processEventWithCollectLog
(
pendingEvent
,
collectLog
,
out
);
it
.
remove
();
}
}
catch
(
Exception
e
)
{
logger
.
error
(
"ProcessElement2 error: {}"
,
e
.
getMessage
(),
e
);
}}
// 统一事件处理逻辑
private
void
processEventWithCollectLog
(
EventLogToJsonSource
event
,
CollectLogToJsonSource
collectLog
,
Collector
<
PointRecord
>
out
)
{
for
(
EventList
eventLogInfo
:
event
.
getEventList
())
{
List
<
String
>
pointList
=
Optional
.
ofNullable
(
eventLogInfo
.
getR8
())
.
map
(
Properties:
:
getR6
).
orElse
(
Collections
.
emptyList
());
for
(
String
pointStr
:
pointList
)
{
String
points
=
cleanPointString
(
pointStr
);
if
(
points
.
isEmpty
())
continue
;
// 增强解析健壮性
parseAndEmitPoints
(
points
,
event
.
getId
(),
eventLogInfo
.
getR9
(),
collectLog
,
out
);
}
}
}
// 坐标解析(含异常处理)
private
void
parseAndEmitPoints
(
String
pointStr
,
String
eventId
,
long
timestamp
,
CollectLogToJsonSource
collectLog
,
Collector
<
PointRecord
>
out
)
{
String
[]
points
=
pointStr
.
split
(
","
);
for
(
int
i
=
0
;
i
<
points
.
length
;
i
++)
{
String
trimmed
=
points
[
i
].
trim
();
if
(!
isValidPointFormat
(
trimmed
))
continue
;
String
[]
xy
=
splitPoint
(
trimmed
);
if
(
xy
.
length
!=
2
)
continue
;
try
{
double
x
=
Double
.
parseDouble
(
xy
[
0
]);
double
y
=
Double
.
parseDouble
(
xy
[
1
]);
logger
.
info
(
"parseAndEmitPoints params id:{},r9:{},i:{},xy0:{},xy1:{},Resolution_x:{},Resolution_y:{}"
,
eventId
,
timestamp
,
i
,
Double
.
parseDouble
(
xy
[
0
].
trim
()),
Double
.
parseDouble
(
xy
[
1
].
trim
()),
collectLog
.
getResolution_x
(),
collectLog
.
getResolution_y
());
out
.
collect
(
new
PointRecord
(
eventId
,
timestamp
,
i
,
x
,
y
,
collectLog
.
getResolution_x
(),
collectLog
.
getResolution_y
()));
}
catch
(
NumberFormatException
e
)
{
logger
.
warn
(
"Coordinate parse failed: {} | Error: {}"
,
pointStr
,
e
.
getMessage
());
}
}
}
// 辅助方法:坐标格式校验
private
boolean
isValidPointFormat
(
String
point
)
{
int
firstIdx
=
point
.
indexOf
(
'&'
);
int
lastIdx
=
point
.
lastIndexOf
(
'&'
);
return
!
point
.
isEmpty
()
&&
firstIdx
!=
-
1
&&
firstIdx
==
lastIdx
&&
!
point
.
startsWith
(
"."
)
&&
!
point
.
endsWith
(
"."
);
}
// 辅助方法:坐标拆分
private
String
[]
splitPoint
(
String
point
)
{
return
point
.
split
(
"&"
,
2
);
// 限制分割次数
}
// 辅助方法:数据清洗
private
String
cleanPointString
(
String
raw
)
{
return
Optional
.
ofNullable
(
raw
)
.
map
(
s
->
s
.
replace
(
"["
,
""
)
.
replace
(
"]"
,
""
)
.
replace
(
"\""
,
""
)
.
trim
())
.
orElse
(
""
);
}
// 定时器清理滞留事件
@Override
public
void
onTimer
(
long
timestamp
,
OnTimerContext
ctx
,
Collector
<
PointRecord
>
out
)
{
try
{
pendingEventLog
.
remove
(
timestamp
-
60000
);
logger
.
info
(
"Cleaned expired events at {}"
,
timestamp
);
}
catch
(
Exception
e
)
{
logger
.
error
(
"Timer error: {}"
,
e
.
getMessage
(),
e
);
}
}
}
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorAngleProcessor.java
View file @
19f3245f
...
...
@@ -8,6 +8,8 @@ import org.apache.flink.api.java.tuple.Tuple2;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.streaming.api.functions.KeyedProcessFunction
;
import
org.apache.flink.util.Collector
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
com.flink.achieve.doris.VectorAngleCalculationAchi.PointRecord
;
import
com.flink.achieve.doris.VectorAngleCalculationAchi.ResultRecord
;
...
...
@@ -23,6 +25,7 @@ public class VectorAngleProcessor extends KeyedProcessFunction<Tuple2<String, Lo
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
static
final
Logger
logger
=
LoggerFactory
.
getLogger
(
VectorAngleProcessor
.
class
);
private
transient
ValueState
<
VectorState
>
vectorState
;
...
...
@@ -39,6 +42,7 @@ public class VectorAngleProcessor extends KeyedProcessFunction<Tuple2<String, Lo
public
void
processElement
(
PointRecord
record
,
KeyedProcessFunction
<
Tuple2
<
String
,
Long
>,
PointRecord
,
ResultRecord
>.
Context
ctx
,
Collector
<
ResultRecord
>
out
)
throws
Exception
{
logger
.
info
(
"processElement >>>start!"
);
VectorState
state
=
vectorState
.
value
();
double
vectorX
,
vectorY
,
vectorM
,
pointV
;
// 处理第一条记录
...
...
@@ -66,8 +70,8 @@ public class VectorAngleProcessor extends KeyedProcessFunction<Tuple2<String, Lo
// 计算弧度和角度
double
radianV
=
Math
.
acos
(
cosV
);
double
angleV
=
radianV
*
180
/
Math
.
PI
;
// 构建结果记录(对应最终SELECT)
ResultRecord
result
=
new
ResultRecord
(
// 构建结果记录(对应最终SELECT)
ResultRecord
result
=
new
ResultRecord
(
record
.
id
,
record
.
eventTime
,
record
.
rowNum
,
...
...
@@ -83,6 +87,24 @@ public class VectorAngleProcessor extends KeyedProcessFunction<Tuple2<String, Lo
record
.
resolutionX
,
record
.
resolutionY
);
logger
.
info
(
"VectorAngleProcessor processElement >>>end! id:{},eventTime:{},rowNum:{},"
+
"positionX:{},positionY:{},vectorX:{},vectorY:{},vectorM:{},"
+
"pointV:{},cosV:{},angleV:{},radianV:{},resolutionX:{},resolutionY:{}"
,
record
.
id
,
record
.
eventTime
,
record
.
rowNum
,
record
.
positionX
,
record
.
positionY
,
vectorX
,
vectorY
,
vectorM
,
pointV
,
cosV
,
angleV
,
radianV
,
record
.
resolutionX
,
record
.
resolutionY
);
out
.
collect
(
result
);
// 更新状态(当前记录成为下一条的"前一条")
...
...
eagleEye-flink_kafka/src/main/java/com/flink/util/TimeConvertUtil.java
View file @
19f3245f
...
...
@@ -9,6 +9,7 @@ import java.time.format.DateTimeFormatter;
import
java.util.TimeZone
;
import
org.apache.flink.table.data.TimestampData
;
import
org.apache.kerby.util.SysUtil
;
/**
* @author wjs
...
...
@@ -55,4 +56,18 @@ public class TimeConvertUtil {
DateTimeFormatter
formatter
=
DateTimeFormatter
.
ofPattern
(
"yyyy-MM-dd HH:mm:ss"
);
return
dateTime
.
format
(
formatter
);
}
public
static
String
parseToStringSSS
(
Long
timestamp
)
{
// 步骤1:时间戳 → Instant对象
Instant
instant
=
Instant
.
ofEpochMilli
(
timestamp
);
// 步骤2:指定时区 → 转为LocalDateTime
LocalDateTime
dateTime
=
LocalDateTime
.
ofInstant
(
instant
,
ZoneId
.
systemDefault
());
// 步骤3:定义格式 → 生成字符串
DateTimeFormatter
formatter
=
DateTimeFormatter
.
ofPattern
(
"yyyy-MM-dd HH:mm:ss.SSS"
);
return
dateTime
.
format
(
formatter
);
}
public
static
void
main
(
String
[]
args
)
{
String
aa
=
parseToStringSSS
(
1750739369000L
);
System
.
out
.
println
(
aa
);
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/Properties.java
View file @
19f3245f
...
...
@@ -21,6 +21,12 @@ public class Properties implements Serializable{
private
String
r5
;
private
List
<
String
>
r6
;
private
String
data
;
private
Long
startTime
;
private
String
timeDifference
;
private
Long
endTime
;
private
String
userId
;
public
String
getR1
()
{
return
r1
;
...
...
@@ -58,5 +64,35 @@ public class Properties implements Serializable{
public
void
setR6
(
List
<
String
>
r6
)
{
this
.
r6
=
r6
;
}
public
String
getData
()
{
return
data
;
}
public
void
setData
(
String
data
)
{
this
.
data
=
data
;
}
public
Long
getStartTime
()
{
return
startTime
;
}
public
void
setStartTime
(
Long
startTime
)
{
this
.
startTime
=
startTime
;
}
public
String
getTimeDifference
()
{
return
timeDifference
;
}
public
void
setTimeDifference
(
String
timeDifference
)
{
this
.
timeDifference
=
timeDifference
;
}
public
Long
getEndTime
()
{
return
endTime
;
}
public
void
setEndTime
(
Long
endTime
)
{
this
.
endTime
=
endTime
;
}
public
String
getUserId
()
{
return
userId
;
}
public
void
setUserId
(
String
userId
)
{
this
.
userId
=
userId
;
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment