Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
eagleEye
/
eagleEye-flink_kafka
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
25c910d7
authored
Jun 23, 2025
by
魏建枢
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
事件采集作业
parent
18db721c
Hide whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
1060 additions
and
0 deletions
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/EventLogAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/VectorAngleCalculationAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/enums/JobTypeEnum.java
eagleEye-flink_kafka/src/main/java/com/flink/factory/JobProcessorFactory.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorAngleProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/impl/EventLogProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/impl/VectorAngleCalculationProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/util/TimeConvertUtil.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/CollectLogToJsonSource.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/EventList.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/EventLogToJsonSource.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/Properties.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/EventLogAchi.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
achieve
.
doris
;
import
java.io.Serializable
;
import
java.time.LocalDateTime
;
import
java.time.format.DateTimeFormatter
;
import
java.util.Objects
;
import
org.apache.commons.lang3.StringUtils
;
import
org.apache.doris.flink.sink.DorisSink
;
import
org.apache.flink.api.common.io.ParseException
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.table.api.DataTypes
;
import
org.apache.flink.table.data.GenericRowData
;
import
org.apache.flink.table.data.RowData
;
import
org.apache.flink.table.data.StringData
;
import
org.apache.flink.table.data.TimestampData
;
import
org.apache.flink.table.types.DataType
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
com.alibaba.fastjson.JSONObject
;
import
com.alibaba.fastjson.TypeReference
;
import
com.flink.common.DorisConnector
;
import
com.flink.common.SourceCommonBase
;
import
com.flink.config.TableConfig
;
import
com.flink.processor.function.UserPropertiesProcessor
;
import
com.flink.util.TimeConvertUtil
;
import
com.flink.vo.OdsEventLog
;
import
com.flink.vo.UserProperties
;
/**
* @author wjs
* @version 创建时间:2025-6-20 23:40:33
* 类说明
*/
public
class
EventLogAchi
extends
SourceCommonBase
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
static
final
Logger
logger
=
LoggerFactory
.
getLogger
(
EventLogAchi
.
class
);
@Override
public
void
parseSourceKafkaJson
(
DataStreamSource
<
String
>
dataStreamSource
)
throws
ParseException
,
Exception
{
// =================配置入库字段=========================================
// 事件明细表结构
TableConfig
tableConfig
=
new
TableConfig
(
new
String
[]{
"id"
,
"dt"
,
"send_time"
,
"create_time"
,
"strategy_group_id"
,
"app_key"
,
"app_type"
,
"app_channel"
,
"zone_code"
,
"zone_name"
,
"zone_type"
,
"sdk_version"
,
"user_agent"
,
"device_id"
,
"uid"
,
"strategy_version"
,
"event_list"
,
"route_ip"
,
"cid"
,
"phone"
,
"nick"
,
"unique_id"
,
"__DORIS_DELETE_SIGN__"
},
new
DataType
[]{
DataTypes
.
STRING
(),
DataTypes
.
DATE
(),
DataTypes
.
TIMESTAMP
(
3
),
DataTypes
.
TIMESTAMP
(
3
),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
INT
()
},
"bi.event_log"
);
//=================流式处理=========================================
DorisSink
<
RowData
>
dorisSink
=
DorisConnector
.
sinkDoris
(
tableConfig
.
getFields
(),
tableConfig
.
getTypes
(),
tableConfig
.
getTableName
());
//=================数据处理流水线=========================================
dataStreamSource
.
map
(
value
->{
try
{
// 解析 Kafka 数据
OdsEventLog
event
=
JSONObject
.
parseObject
(
value
,
new
TypeReference
<
OdsEventLog
>()
{});
String
createTime
=
event
.
getCreate_time
();
String
routeIp
=
event
.
getRoute_ip
();
String
appKey
=
event
.
getApp_key
().
trim
();
String
appType
=
event
.
getApp_type
();
String
sendTime
=
event
.
getSend_time
();
if
(
StringUtils
.
isEmpty
(
appKey
)
||
StringUtils
.
equals
(
appKey
,
"C7jias27jias2"
))
{
appKey
=
"8ooOvXJo276"
;
}
UserProperties
userProperties
=
UserPropertiesProcessor
.
userPropertiesToJson
(
event
.
getUser_properties
());
// 转换为RowData
GenericRowData
row
=
new
GenericRowData
(
tableConfig
.
getFields
().
length
);
row
.
setField
(
0
,
StringData
.
fromString
(
event
.
getId
()));
row
.
setField
(
1
,
TimeConvertUtil
.
convertToSqlDate
(
createTime
.
substring
(
0
,
10
)));
row
.
setField
(
2
,
TimestampData
.
fromLocalDateTime
(
LocalDateTime
.
parse
(
sendTime
,
DateTimeFormatter
.
ofPattern
(
"yyyy-MM-dd HH:mm:ss"
))));
row
.
setField
(
3
,
TimestampData
.
fromLocalDateTime
(
LocalDateTime
.
parse
(
createTime
,
DateTimeFormatter
.
ofPattern
(
"yyyy-MM-dd HH:mm:ss.SSS"
))));
row
.
setField
(
4
,
StringData
.
fromString
(
event
.
getStrategy_group_id
()));
row
.
setField
(
5
,
StringData
.
fromString
(
event
.
getApp_key
()));
row
.
setField
(
6
,
StringData
.
fromString
(
event
.
getApp_type
()));
row
.
setField
(
7
,
StringData
.
fromString
(
event
.
getApp_channel
()));
row
.
setField
(
8
,
StringData
.
fromString
(
event
.
getZone_code
()));
row
.
setField
(
9
,
StringData
.
fromString
(
event
.
getZone_name
()));
row
.
setField
(
10
,
StringData
.
fromString
(
event
.
getZone_type
()));
row
.
setField
(
11
,
StringData
.
fromString
(
event
.
getSdk_version
()));
row
.
setField
(
12
,
StringData
.
fromString
(
event
.
getUser_agent
()));
row
.
setField
(
13
,
StringData
.
fromString
(
event
.
getDevice_id
()));
row
.
setField
(
14
,
StringData
.
fromString
(
event
.
getUid
()));
row
.
setField
(
15
,
StringData
.
fromString
(
event
.
getStrategy_version
()));
row
.
setField
(
16
,
StringData
.
fromString
(
event
.
getEvent_list
()));
row
.
setField
(
17
,
StringData
.
fromString
(
event
.
getRoute_ip
()));
row
.
setField
(
18
,
StringData
.
fromString
(
userProperties
==
null
?
null
:
userProperties
.
getCid
()));
row
.
setField
(
19
,
StringData
.
fromString
(
userProperties
==
null
?
null
:
userProperties
.
getPhone
()));
row
.
setField
(
20
,
StringData
.
fromString
(
userProperties
==
null
?
null
:
userProperties
.
getNick
()));
row
.
setField
(
21
,
StringData
.
fromString
(
event
.
getUnique_id
()));
row
.
setField
(
22
,
0
);
return
(
RowData
)
row
;
}
catch
(
Exception
e
)
{
System
.
err
.
println
(
"解析失败: "
+
e
.
toString
());
return
null
;
}
})
.
filter
(
Objects:
:
nonNull
)
// .print()
.
sinkTo
(
dorisSink
)
.
name
(
"Doris-CollectLog"
);
}
@Override
public
void
sendToSinkKafka
(
DataStreamSource
<
String
>
mStream
)
{
// TODO Auto-generated method stub
}
}
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/VectorAngleCalculationAchi.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
achieve
.
doris
;
import
java.io.Serializable
;
import
java.time.Duration
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Objects
;
import
java.util.Optional
;
import
org.apache.commons.collections.CollectionUtils
;
import
org.apache.commons.lang3.StringUtils
;
import
org.apache.flink.api.common.eventtime.WatermarkStrategy
;
import
org.apache.flink.api.common.functions.FlatMapFunction
;
import
org.apache.flink.api.common.functions.RichCoGroupFunction
;
import
org.apache.flink.api.common.io.ParseException
;
import
org.apache.flink.api.common.state.ListState
;
import
org.apache.flink.api.common.state.ListStateDescriptor
;
import
org.apache.flink.api.java.functions.KeySelector
;
import
org.apache.flink.api.java.tuple.Tuple2
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.streaming.api.datastream.DataStream
;
import
org.apache.flink.streaming.api.datastream.DataStreamSource
;
import
org.apache.flink.streaming.api.datastream.KeyedStream
;
import
org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
;
import
org.apache.flink.util.Collector
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
com.alibaba.fastjson.JSONObject
;
import
com.alibaba.fastjson.TypeReference
;
import
com.flink.common.MultipleSourceCommonBase
;
import
com.flink.enums.AppTypeEnum
;
import
com.flink.enums.TopicTypeEnum
;
import
com.flink.processor.function.UserPropertiesProcessor
;
import
com.flink.processor.function.VectorAngleProcessor
;
import
com.flink.util.TimeConvertUtil
;
import
com.flink.vo.CollectLog
;
import
com.flink.vo.CollectLogToJsonSource
;
import
com.flink.vo.EventList
;
import
com.flink.vo.EventLogToJsonSource
;
import
com.flink.vo.KafkaDataSource
;
import
com.flink.vo.OdsEventLog
;
import
com.flink.vo.Properties
;
import
com.flink.vo.UserProperties
;
import
com.flink.vo.android.deviceInfo.AndroidA1
;
import
com.flink.vo.ios.IosDeviceInfo
;
/**
* @author wjs
* @version 创建时间:2025-6-5 10:49:50
* 类说明
*/
public
class
VectorAngleCalculationAchi
extends
MultipleSourceCommonBase
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
static
final
Logger
logger
=
LoggerFactory
.
getLogger
(
VectorAngleCalculationAchi
.
class
);
@Override
public
void
parseSourceKafkaJson
(
List
<
KafkaDataSource
>
dataSourceList
)
throws
ParseException
,
Exception
{
DataStreamSource
<
String
>
collectLogStreamSource
=
null
;
DataStreamSource
<
String
>
eventLogStreamSource
=
null
;
if
(
CollectionUtils
.
isNotEmpty
(
dataSourceList
))
{
for
(
KafkaDataSource
kafkaDataSource
:
dataSourceList
)
{
if
(
StringUtils
.
equals
(
kafkaDataSource
.
getTopic
(),
TopicTypeEnum
.
ODS_EVENT_LOG
.
getTopic
()))
{
eventLogStreamSource
=
kafkaDataSource
.
getDataStreamSource
();
}
if
(
StringUtils
.
equals
(
kafkaDataSource
.
getTopic
(),
TopicTypeEnum
.
ODS_NEW_COLLECT_LOG
.
getTopic
()))
{
collectLogStreamSource
=
kafkaDataSource
.
getDataStreamSource
();
}
}
}
else
{
return
;
}
// 事件数据流处理
DataStream
<
EventLogToJsonSource
>
eventDataStream
=
eventLogStreamSource
.
flatMap
(
new
FlatMapFunction
<
String
,
EventLogToJsonSource
>()
{
private
static
final
long
serialVersionUID
=
1L
;
@Override
public
void
flatMap
(
String
value
,
Collector
<
EventLogToJsonSource
>
out
)
throws
Exception
{
// logger.info("eventLogStreamSource flatMap start!");
try
{
// 解析 Kafka 数据
OdsEventLog
odsEventLog
=
JSONObject
.
parseObject
(
value
,
new
TypeReference
<
OdsEventLog
>()
{
});
if
(
null
==
odsEventLog
)
{
return
;
}
String
id
=
odsEventLog
.
getId
();
String
uniqueId
=
odsEventLog
.
getUnique_id
();
String
deviceId
=
odsEventLog
.
getDevice_id
();
String
event_list
=
odsEventLog
.
getEvent_list
();
String
createTime
=
odsEventLog
.
getCreate_time
();
UserProperties
userProperties
=
UserPropertiesProcessor
.
userPropertiesToJson
(
odsEventLog
.
getUser_properties
());
if
(
StringUtils
.
isEmpty
(
odsEventLog
.
getEvent_list
()))
{
return
;
}
List
<
EventList
>
eventList
=
JSONObject
.
parseObject
(
event_list
,
new
TypeReference
<
List
<
EventList
>>()
{
});
EventLogToJsonSource
eventLogToJsonSource
=
new
EventLogToJsonSource
(
id
,
uniqueId
,
deviceId
,
userProperties
.
getCid
(),
userProperties
.
getPhone
(),
userProperties
.
getNick
(),
eventList
,
TimeConvertUtil
.
convertToTimestamp
(
createTime
)
);
if
(
eventLogToJsonSource
!=
null
)
out
.
collect
(
eventLogToJsonSource
);
// logger.info("eventLogStreamSource flatMap end!");
}
catch
(
Exception
e
)
{
logger
.
error
(
"Error parsing ods_event_log 处理 Kafka 消息出错 | data:{} | error:{}"
,
value
,
e
.
getMessage
());
}
}
}).
assignTimestampsAndWatermarks
(
WatermarkStrategy
.<
EventLogToJsonSource
>
forBoundedOutOfOrderness
(
Duration
.
ofSeconds
(
5
))
.
withTimestampAssigner
((
event
,
ts
)
->
event
.
getCreateTime
()))
.
filter
(
event
->
event
!=
null
&&
StringUtils
.
isNotEmpty
(
event
.
getUniqueId
())
&&
StringUtils
.
isNotEmpty
(
event
.
getDeviceId
())
&&
StringUtils
.
isNotEmpty
(
event
.
getCid
())
&&
StringUtils
.
isNotEmpty
(
event
.
getPhone
())
&&
CollectionUtils
.
isNotEmpty
(
event
.
getEventList
()))
.
keyBy
(
EventLogToJsonSource:
:
getJoinKey
);
// 设备信息数据流处理
DataStream
<
CollectLogToJsonSource
>
collectDataStream
=
collectLogStreamSource
.
flatMap
(
new
FlatMapFunction
<
String
,
CollectLogToJsonSource
>()
{
private
static
final
long
serialVersionUID
=
1L
;
@Override
public
void
flatMap
(
String
value
,
Collector
<
CollectLogToJsonSource
>
out
)
throws
Exception
{
// logger.info("collectLogStreamSource flatMap start!");
try
{
// 解析 Kafka 数据
CollectLog
log
=
JSONObject
.
parseObject
(
value
,
new
TypeReference
<
CollectLog
>()
{
});
if
(
null
==
log
)
{
return
;
}
String
deviceId
=
log
.
getDeviceId
();
String
appType
=
log
.
getAppType
();
String
device_info
=
log
.
getDeviceInfo
();
String
uniqueId
=
log
.
getUniqueId
();
String
createTime
=
log
.
getCreateTime
();
if
(
StringUtils
.
isEmpty
(
device_info
))
{
return
;
}
String
resolution
=
null
;
if
(
StringUtils
.
equals
(
appType
,
AppTypeEnum
.
ANDROID
.
getCode
()))
{
AndroidA1
a1
=
JSONObject
.
parseObject
(
device_info
,
new
TypeReference
<
AndroidA1
>()
{
});
resolution
=
a1
.
getC3
();
}
else
if
(
StringUtils
.
equals
(
appType
,
AppTypeEnum
.
IOS
.
getCode
()))
{
IosDeviceInfo
a1
=
JSONObject
.
parseObject
(
device_info
,
new
TypeReference
<
IosDeviceInfo
>()
{
});
resolution
=
a1
.
getC3
();
}
UserProperties
userProperties
=
UserPropertiesProcessor
.
userPropertiesToJson
(
log
.
getUserProperties
());
CollectLogToJsonSource
collectLogToJsonSource
=
new
CollectLogToJsonSource
(
deviceId
,
uniqueId
,
userProperties
.
getCid
(),
userProperties
.
getPhone
(),
userProperties
.
getNick
(),
Integer
.
valueOf
(
resolution
.
split
(
"\\*"
)[
0
]),
Integer
.
valueOf
(
resolution
.
split
(
"\\*"
)[
1
]),
TimeConvertUtil
.
convertToTimestamp
(
createTime
)
);
if
(
collectLogToJsonSource
!=
null
)
out
.
collect
(
collectLogToJsonSource
);
// logger.info("collectLogStreamSource flatMap end!");
}
catch
(
Exception
e
)
{
logger
.
error
(
"Error parsing ods_new_collect_log 处理 Kafka 消息出错 | data:{} | error:{}"
,
value
,
e
.
getMessage
());
}
}
})
.
assignTimestampsAndWatermarks
(
WatermarkStrategy
.<
CollectLogToJsonSource
>
forBoundedOutOfOrderness
(
Duration
.
ofSeconds
(
5
))
.
withTimestampAssigner
((
collectLog
,
ts
)
->
collectLog
.
getCollectTime
()))
.
filter
(
collectLog
->
collectLog
!=
null
&&
StringUtils
.
isNotEmpty
(
collectLog
.
getUniqueId
())
&&
StringUtils
.
isNotEmpty
(
collectLog
.
getDeviceId
())
&&
StringUtils
.
isNotEmpty
(
collectLog
.
getPhone
())
&&
StringUtils
.
isNotEmpty
(
collectLog
.
getCid
()))
.
keyBy
(
CollectLogToJsonSource:
:
getJoinKey
);
// 步骤1: 展开draw_point字段
DataStream
<
PointRecord
>
pointRecordStream
=
eventDataStream
.
coGroup
(
collectDataStream
)
.
where
(
EventLogToJsonSource:
:
getJoinKey
)
.
equalTo
(
CollectLogToJsonSource:
:
getJoinKey
)
.
window
(
TumblingEventTimeWindows
.
of
(
Duration
.
ofMinutes
(
5
)))
.
apply
(
new
RichCoGroupFunction
<
EventLogToJsonSource
,
CollectLogToJsonSource
,
PointRecord
>()
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
transient
ListState
<
CollectLogToJsonSource
>
collectState
;
@Override
public
void
open
(
Configuration
parameters
)
{
// 初始化状态存储
ListStateDescriptor
<
CollectLogToJsonSource
>
stateDescriptor
=
new
ListStateDescriptor
<>(
"collect-state"
,
CollectLogToJsonSource
.
class
);
collectState
=
getRuntimeContext
().
getListState
(
stateDescriptor
);
}
@Override
public
void
coGroup
(
Iterable
<
EventLogToJsonSource
>
events
,
Iterable
<
CollectLogToJsonSource
>
collects
,
Collector
<
PointRecord
>
out
)
throws
Exception
{
// logger.info("coGroup start!");
// 缓存collect数据到状态
collects
.
forEach
(
collect
->
{
try
{
collectState
.
add
(
collect
);
}
catch
(
Exception
e
)
{
logger
.
error
(
"coGroup Error parsing CollectLogToJsonSource 处理 Kafka 消息出错 | data:{} | error:{}"
,
collect
,
e
.
getMessage
());
}
});
// 创建本地缓存Map提高查询性能
Map
<
JoinKey
,
CollectLogToJsonSource
>
collectMap
=
new
HashMap
<>();
collectState
.
get
().
forEach
(
collect
->
collectMap
.
put
(
collect
.
getJoinKey
(),
collect
));
for
(
EventLogToJsonSource
event
:
events
)
{
CollectLogToJsonSource
matchedCollect
=
collectMap
.
get
(
event
.
getJoinKey
());
if
(
matchedCollect
==
null
)
continue
;
for
(
EventList
eventLog
:
event
.
getEventList
())
{
List
<
String
>
pointList
=
Optional
.
ofNullable
(
eventLog
.
getR8
())
.
map
(
Properties:
:
getR6
)
.
orElse
(
Collections
.
emptyList
());
for
(
int
i
=
0
;
i
<
pointList
.
size
();
i
++)
{
String
pointStr
=
pointList
.
get
(
i
);
String
[]
xy
=
pointStr
.
split
(
"&"
,
2
);
// 格式校验
if
(
xy
.
length
!=
2
)
{
logger
.
error
(
"Invalid point format:{},in event ID:{} "
,
pointStr
,
event
.
getId
());
continue
;
}
// 坐标解析(兼容异常)
try
{
// long x = Long.parseLong(xy[0].trim());
// long y = Long.parseLong(xy[1].trim());
PointRecord
pointRecord
=
new
PointRecord
(
event
.
getId
(),
eventLog
.
getR9
(),
i
,
Double
.
parseDouble
(
xy
[
0
].
trim
()),
Double
.
parseDouble
(
xy
[
1
].
trim
()),
matchedCollect
.
getResolution_x
(),
matchedCollect
.
getResolution_y
()
);
out
.
collect
(
pointRecord
);
// logger.info("coGroup end! id:{},r9Time:{},rowNum:{},xy0:{},xy1:{},resolution_x:{},resolution_y:{}",
// event.getId(),
// eventLog.getR9(),
// i,
// xy[0].trim(),
// xy[1].trim(),
// matchedCollect.getResolution_x(),
// matchedCollect.getResolution_y());
}
catch
(
NumberFormatException
e
)
{
logger
.
error
(
"Coordinate parsing error:{},in event ID:{},e:{} "
,
pointStr
,
event
.
getId
(),
e
.
getMessage
());
}
}
}
}
// 清理当前窗口的状态
collectState
.
clear
();
}
});
KeyedStream
<
PointRecord
,
Tuple2
<
String
,
Long
>>
keyedStream
=
pointRecordStream
.
keyBy
(
new
KeySelector
<
PointRecord
,
Tuple2
<
String
,
Long
>>()
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
@Override
public
Tuple2
<
String
,
Long
>
getKey
(
PointRecord
r
)
{
return
Tuple2
.
of
(
r
.
id
,
r
.
eventTime
);
}
});
// // 步骤3: 计算向量和角度
DataStream
<
ResultRecord
>
resultStream
=
keyedStream
.
process
(
new
VectorAngleProcessor
())
.
name
(
"vector-angle-calculation"
);
resultStream
.
print
(
"<<<<<<<<<<<<<<<<计算向量和角度"
);
}
// // 最终结果POJO
public
static
class
ResultRecord
{
public
String
id
;
public
long
eventTime
;
public
int
rowNum
;
public
double
positionX
;
public
double
positionY
;
public
double
vectorX
;
public
double
vectorY
;
public
double
vectorM
;
public
double
pointV
;
public
double
cosV
;
public
double
angleV
;
public
double
radianV
;
public
int
resolutionX
;
public
int
resolutionY
;
// public String dt; // 日期字段
public
ResultRecord
(
String
id
,
long
eventTime
,
int
rowNum
,
double
posX
,
double
posY
,
double
vecX
,
double
vecY
,
double
vecM
,
double
pointV
,
double
cosV
,
double
angleV
,
double
radianV
,
int
resX
,
int
resY
)
{
this
.
id
=
id
;
this
.
eventTime
=
eventTime
;
this
.
rowNum
=
rowNum
;
this
.
positionX
=
posX
;
this
.
positionY
=
posY
;
this
.
vectorX
=
vecX
;
this
.
vectorY
=
vecY
;
this
.
vectorM
=
vecM
;
this
.
pointV
=
pointV
;
this
.
cosV
=
cosV
;
this
.
angleV
=
angleV
;
this
.
radianV
=
radianV
;
this
.
resolutionX
=
resX
;
this
.
resolutionY
=
resY
;
// this.dt = convertToDateString(eventTime);
}
// private String convertToDateString(long timestamp) {
// return LocalDate.ofInstant(new Date(timestamp).toInstant(),ZoneId.systemDefault()
// ).toString();
// }
@Override
public
String
toString
()
{
return
String
.
format
(
"id=%s, event_time=%d, row_num=%d, x=%.2f, y=%.2f, "
+
"vec_x=%.2f, vec_y=%.2f, vec_m=%.4f, point_v=%.2f, "
+
"cos=%.4f, angle=%.2f°, radian=%.4f, res=%dx%d"
,
id
,
eventTime
,
rowNum
,
positionX
,
positionY
,
vectorX
,
vectorY
,
vectorM
,
pointV
,
cosV
,
angleV
,
radianV
,
resolutionX
,
resolutionY
);
}
}
public
static
class
JoinKey
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
uniqueId
;
private
String
deviceId
;
private
String
cid
;
private
String
phone
;
// 构造方法
public
JoinKey
(
String
uniqueId
,
String
deviceId
,
String
cid
,
String
phone
)
{
this
.
uniqueId
=
uniqueId
;
this
.
deviceId
=
deviceId
;
this
.
cid
=
cid
;
this
.
phone
=
phone
;
}
public
String
getUniqueId
()
{
return
uniqueId
;
}
public
void
setUniqueId
(
String
uniqueId
)
{
this
.
uniqueId
=
uniqueId
;
}
public
String
getDeviceId
()
{
return
deviceId
;
}
public
void
setDeviceId
(
String
deviceId
)
{
this
.
deviceId
=
deviceId
;
}
public
String
getCid
()
{
return
cid
;
}
public
void
setCid
(
String
cid
)
{
this
.
cid
=
cid
;
}
public
String
getPhone
()
{
return
phone
;
}
public
void
setPhone
(
String
phone
)
{
this
.
phone
=
phone
;
}
// Getters & hashCode()/equals() 必须重写
@Override
public
int
hashCode
()
{
return
Objects
.
hash
(
uniqueId
,
deviceId
,
cid
,
phone
);
}
@Override
public
boolean
equals
(
Object
obj
)
{
if
(
this
==
obj
)
return
true
;
if
(
obj
==
null
||
getClass
()
!=
obj
.
getClass
())
return
false
;
JoinKey
that
=
(
JoinKey
)
obj
;
return
Objects
.
equals
(
uniqueId
,
that
.
uniqueId
)
&&
Objects
.
equals
(
deviceId
,
that
.
deviceId
)
&&
Objects
.
equals
(
cid
,
that
.
cid
)
&&
Objects
.
equals
(
phone
,
that
.
phone
);
}
}
// 2. 关联结果类型
public
class
JoinedResult
{
private
String
id
;
private
String
eventTime
;
private
int
resolution_x
;
private
int
resolution_y
;
private
List
<
String
>
draw_point
;
public
JoinedResult
(
String
id
,
String
eventTime
,
int
resolution_x
,
int
resolution_y
,
List
<
String
>
draw_point
)
{
this
.
id
=
id
;
this
.
eventTime
=
eventTime
;
this
.
resolution_x
=
resolution_x
;
this
.
resolution_y
=
resolution_y
;
this
.
draw_point
=
draw_point
;
}
public
String
getId
()
{
return
id
;
}
public
void
setId
(
String
id
)
{
this
.
id
=
id
;
}
public
String
getEventTime
()
{
return
eventTime
;
}
public
void
setEventTime
(
String
eventTime
)
{
this
.
eventTime
=
eventTime
;
}
public
int
getResolution_x
()
{
return
resolution_x
;
}
public
void
setResolution_x
(
int
resolution_x
)
{
this
.
resolution_x
=
resolution_x
;
}
public
int
getResolution_y
()
{
return
resolution_y
;
}
public
void
setResolution_y
(
int
resolution_y
)
{
this
.
resolution_y
=
resolution_y
;
}
public
List
<
String
>
getDraw_point
()
{
return
draw_point
;
}
public
void
setDraw_point
(
List
<
String
>
draw_point
)
{
this
.
draw_point
=
draw_point
;
}
}
// 点位记录POJO (t2输出)
public
static
class
PointRecord
{
public
String
id
;
public
long
eventTime
;
public
int
rowNum
;
public
double
positionX
;
public
double
positionY
;
public
int
resolutionX
;
public
int
resolutionY
;
public
PointRecord
(
String
id
,
long
eventTime
,
int
rowNum
,
double
posX
,
double
posY
,
int
resX
,
int
resY
)
{
this
.
id
=
id
;
this
.
eventTime
=
eventTime
;
this
.
rowNum
=
rowNum
;
this
.
positionX
=
posX
;
this
.
positionY
=
posY
;
this
.
resolutionX
=
resX
;
this
.
resolutionY
=
resY
;
}
}
}
\ No newline at end of file
eagleEye-flink_kafka/src/main/java/com/flink/enums/JobTypeEnum.java
View file @
25c910d7
...
...
@@ -28,6 +28,7 @@ public enum JobTypeEnum {
SIMI_FRIENDS
(
"JOB_09"
,
"SIMI好友作业"
),
SIMI_GROUPS
(
"JOB_10"
,
"SIMI群组作业"
),
VECTOR_ANGLE_CALCULATION
(
"JOB_11"
,
"矢量角度计算作业"
),
EVENT_LOG
(
"JOB_12"
,
"事件采集作业"
),
;
...
...
eagleEye-flink_kafka/src/main/java/com/flink/factory/JobProcessorFactory.java
View file @
25c910d7
...
...
@@ -6,6 +6,7 @@ import com.flink.processor.impl.CollectLogProcessor;
import
com.flink.processor.impl.DeviceIdLatestProcessor
;
import
com.flink.processor.impl.EventIpConvertProcessor
;
import
com.flink.processor.impl.EventIpLatestProcessor
;
import
com.flink.processor.impl.EventLogProcessor
;
import
com.flink.processor.impl.RealBalanceProcessor
;
import
com.flink.processor.impl.RealKycProcessor
;
import
com.flink.processor.impl.RealTransactionProcessor
;
...
...
@@ -45,6 +46,8 @@ public class JobProcessorFactory {
return
new
SimiGroupstProcessor
();
case
VECTOR_ANGLE_CALCULATION:
return
new
VectorAngleCalculationProcessor
();
case
EVENT_LOG:
return
new
EventLogProcessor
();
default
:
throw
new
IllegalArgumentException
(
"未知的Job类型: "
+
jobType
);
}
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorAngleProcessor.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
processor
.
function
;
import
org.apache.flink.api.common.state.ValueState
;
import
org.apache.flink.api.common.state.ValueStateDescriptor
;
import
org.apache.flink.api.common.typeinfo.TypeHint
;
import
org.apache.flink.api.common.typeinfo.TypeInformation
;
import
org.apache.flink.api.java.tuple.Tuple2
;
import
org.apache.flink.configuration.Configuration
;
import
org.apache.flink.streaming.api.functions.KeyedProcessFunction
;
import
org.apache.flink.util.Collector
;
import
com.flink.achieve.doris.VectorAngleCalculationAchi.PointRecord
;
import
com.flink.achieve.doris.VectorAngleCalculationAchi.ResultRecord
;
/**
* @author wjs
* @version 创建时间:2025-6-20 16:06:54
* 类说明
*/
public
class
VectorAngleProcessor
extends
KeyedProcessFunction
<
Tuple2
<
String
,
Long
>,
PointRecord
,
ResultRecord
>{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
transient
ValueState
<
VectorState
>
vectorState
;
@Override
public
void
open
(
Configuration
parameters
)
{
ValueStateDescriptor
<
VectorState
>
descriptor
=
new
ValueStateDescriptor
<>(
"vectorState"
,
TypeInformation
.
of
(
new
TypeHint
<
VectorState
>()
{})
);
vectorState
=
getRuntimeContext
().
getState
(
descriptor
);
}
@Override
public
void
processElement
(
PointRecord
record
,
KeyedProcessFunction
<
Tuple2
<
String
,
Long
>,
PointRecord
,
ResultRecord
>.
Context
ctx
,
Collector
<
ResultRecord
>
out
)
throws
Exception
{
VectorState
state
=
vectorState
.
value
();
double
vectorX
,
vectorY
,
vectorM
,
pointV
;
// 处理第一条记录
if
(
state
==
null
)
{
vectorX
=
0
;
vectorY
=
0
;
vectorM
=
0
;
pointV
=
0
;
// 首行点积为0
}
else
{
// 计算向量差(对应t3)
vectorX
=
record
.
positionX
-
state
.
prevPositionX
;
vectorY
=
record
.
positionY
-
state
.
prevPositionY
;
// 计算向量模长(对应temp111)
vectorM
=
Math
.
sqrt
(
vectorX
*
vectorX
+
vectorY
*
vectorY
);
// 计算点积(对应point_v)
pointV
=
vectorX
*
state
.
prevVectorX
+
vectorY
*
state
.
prevVectorY
;
}
// 准备前一个向量的模长(用于余弦计算)
double
prevVectorM
=
(
state
!=
null
)
?
state
.
prevVectorM
:
vectorM
;
// 分母检查(防止除0)
double
denominator
=
vectorM
*
prevVectorM
;
double
cosV
=
(
denominator
==
0
)
?
1.0
:
pointV
/
denominator
;
// 约束余弦值[-1,1]
cosV
=
Math
.
max
(-
1.0
,
Math
.
min
(
1.0
,
cosV
));
// 计算弧度和角度
double
radianV
=
Math
.
acos
(
cosV
);
double
angleV
=
radianV
*
180
/
Math
.
PI
;
// 构建结果记录(对应最终SELECT)
ResultRecord
result
=
new
ResultRecord
(
record
.
id
,
record
.
eventTime
,
record
.
rowNum
,
record
.
positionX
,
record
.
positionY
,
vectorX
,
vectorY
,
vectorM
,
pointV
,
cosV
,
angleV
,
radianV
,
record
.
resolutionX
,
record
.
resolutionY
);
out
.
collect
(
result
);
// 更新状态(当前记录成为下一条的"前一条")
vectorState
.
update
(
new
VectorState
(
record
.
positionX
,
record
.
positionY
,
vectorX
,
vectorY
,
vectorM
));
}
// // 向量计算状态类
public
static
class
VectorState
{
public
double
prevPositionX
;
public
double
prevPositionY
;
public
double
prevVectorX
;
public
double
prevVectorY
;
public
double
prevVectorM
;
public
VectorState
(
double
x
,
double
y
,
double
vx
,
double
vy
,
double
vm
)
{
prevPositionX
=
x
;
prevPositionY
=
y
;
prevVectorX
=
vx
;
prevVectorY
=
vy
;
prevVectorM
=
vm
;
}
}
}
eagleEye-flink_kafka/src/main/java/com/flink/processor/impl/EventLogProcessor.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
processor
.
impl
;
import
com.flink.achieve.doris.EventLogAchi
;
import
com.flink.enums.JobTypeEnum
;
import
com.flink.enums.TopicTypeEnum
;
import
com.flink.processor.JobProcessor
;
/**
* @author wjs
* @version 创建时间:2025-6-20 23:39:38
* 类说明
*/
public
class
EventLogProcessor
implements
JobProcessor
{
@Override
public
void
process
()
throws
Exception
{
new
EventLogAchi
().
handleDataStreamSource
(
JobTypeEnum
.
EVENT_LOG
,
TopicTypeEnum
.
ODS_EVENT_LOG
);
}
}
eagleEye-flink_kafka/src/main/java/com/flink/processor/impl/VectorAngleCalculationProcessor.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
processor
.
impl
;
import
java.util.Arrays
;
import
java.util.List
;
import
java.util.stream.Collectors
;
import
com.flink.achieve.doris.VectorAngleCalculationAchi
;
import
com.flink.enums.JobTypeEnum
;
import
com.flink.enums.TopicTypeEnum
;
import
com.flink.processor.JobProcessor
;
import
com.flink.vo.KafkaTopic
;
/**
* @author wjs
* @version 创建时间:2025-6-18 11:07:02
* 类说明
*/
public
class
VectorAngleCalculationProcessor
implements
JobProcessor
{
@Override
public
void
process
()
throws
Exception
{
new
VectorAngleCalculationAchi
().
handleDataStreamSource
(
createTopicList
(),
JobTypeEnum
.
VECTOR_ANGLE_CALCULATION
);
}
private
static
List
<
KafkaTopic
>
createTopicList
()
{
return
Arrays
.
stream
(
new
TopicTypeEnum
[]{
TopicTypeEnum
.
ODS_EVENT_LOG
,
TopicTypeEnum
.
ODS_NEW_COLLECT_LOG
}).
map
(
TopicTypeEnum:
:
createKafkaTopic
)
.
collect
(
Collectors
.
toList
());
}
}
eagleEye-flink_kafka/src/main/java/com/flink/util/TimeConvertUtil.java
View file @
25c910d7
package
com
.
flink
.
util
;
import
java.text.SimpleDateFormat
;
import
java.time.Instant
;
import
java.time.LocalDate
;
import
java.time.LocalDateTime
;
import
java.time.ZoneId
;
...
...
@@ -44,4 +45,14 @@ public class TimeConvertUtil {
public
static
String
format
(
TimestampData
timestamp
)
{
return
FORMATTER
.
format
(
timestamp
.
toLocalDateTime
());
}
public
static
String
parseToString
(
Long
timestamp
)
{
// 步骤1:时间戳 → Instant对象
Instant
instant
=
Instant
.
ofEpochMilli
(
timestamp
);
// 步骤2:指定时区 → 转为LocalDateTime
LocalDateTime
dateTime
=
LocalDateTime
.
ofInstant
(
instant
,
ZoneId
.
systemDefault
());
// 步骤3:定义格式 → 生成字符串
DateTimeFormatter
formatter
=
DateTimeFormatter
.
ofPattern
(
"yyyy-MM-dd HH:mm:ss"
);
return
dateTime
.
format
(
formatter
);
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/CollectLogToJsonSource.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
vo
;
import
java.io.Serializable
;
import
com.flink.achieve.doris.VectorAngleCalculationAchi.JoinKey
;
import
lombok.Data
;
import
lombok.ToString
;
/**
* @author wjs
* @version 创建时间:2025-6-19 11:09:31
* 类说明
*/
@Data
@ToString
public
class
CollectLogToJsonSource
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
deviceId
;
private
String
uniqueId
;
private
String
cid
;
private
String
phone
;
private
String
nick
;
private
int
resolution_x
;
private
int
resolution_y
;
private
Long
collectTime
;
private
transient
JoinKey
joinKey
;
// 非序列化字段
public
JoinKey
getJoinKey
()
{
if
(
joinKey
==
null
)
{
joinKey
=
new
JoinKey
(
uniqueId
,
deviceId
,
cid
,
phone
);
}
return
joinKey
;
}
public
CollectLogToJsonSource
(
String
deviceId
,
String
uniqueId
,
String
cid
,
String
phone
,
String
nick
,
int
resolution_x
,
int
resolution_y
,
Long
collectTime
)
{
this
.
deviceId
=
deviceId
;
this
.
uniqueId
=
uniqueId
;
this
.
cid
=
cid
;
this
.
phone
=
phone
;
this
.
nick
=
nick
;
this
.
resolution_x
=
resolution_x
;
this
.
resolution_y
=
resolution_y
;
this
.
collectTime
=
collectTime
;
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/EventList.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
vo
;
import
java.io.Serializable
;
import
lombok.Data
;
import
lombok.ToString
;
/**
* @author wjs
* @version 创建时间:2025-6-18 17:35:56
* 类说明
*/
@Data
@ToString
public
class
EventList
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
r7
;
private
Properties
r8
;
private
Long
r9
;
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/EventLogToJsonSource.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
vo
;
import
java.io.Serializable
;
import
java.util.List
;
import
com.flink.achieve.doris.VectorAngleCalculationAchi.JoinKey
;
import
lombok.Data
;
import
lombok.ToString
;
/**
* @author wjs
* @version 创建时间:2025-6-19 11:09:03 类说明
*/
@Data
@ToString
public
class
EventLogToJsonSource
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
id
;
private
String
uniqueId
;
private
String
deviceId
;
private
String
cid
;
private
String
phone
;
private
String
nick
;
private
List
<
EventList
>
eventList
;
private
Long
createTime
;
private
transient
JoinKey
joinKey
;
// 非序列化字段
public
JoinKey
getJoinKey
()
{
if
(
joinKey
==
null
)
{
joinKey
=
new
JoinKey
(
uniqueId
,
deviceId
,
cid
,
phone
);
}
return
joinKey
;
}
public
EventLogToJsonSource
(
String
id
,
String
uniqueId
,
String
deviceId
,
String
cid
,
String
phone
,
String
nick
,
List
<
EventList
>
eventList
,
Long
createTime
)
{
this
.
id
=
id
;
this
.
uniqueId
=
uniqueId
;
this
.
deviceId
=
deviceId
;
this
.
cid
=
cid
;
this
.
phone
=
phone
;
this
.
nick
=
nick
;
this
.
eventList
=
eventList
;
this
.
createTime
=
createTime
;
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/Properties.java
0 → 100644
View file @
25c910d7
package
com
.
flink
.
vo
;
import
java.io.Serializable
;
import
java.util.List
;
/**
* @author wjs
* @version 创建时间:2025-2-26 17:00:37
* 类说明
*/
public
class
Properties
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
r1
;
private
String
r2
;
private
String
r3
;
private
String
r4
;
private
String
r5
;
private
List
<
String
>
r6
;
public
String
getR1
()
{
return
r1
;
}
public
void
setR1
(
String
r1
)
{
this
.
r1
=
r1
;
}
public
String
getR2
()
{
return
r2
;
}
public
void
setR2
(
String
r2
)
{
this
.
r2
=
r2
;
}
public
String
getR3
()
{
return
r3
;
}
public
void
setR3
(
String
r3
)
{
this
.
r3
=
r3
;
}
public
String
getR4
()
{
return
r4
;
}
public
void
setR4
(
String
r4
)
{
this
.
r4
=
r4
;
}
public
String
getR5
()
{
return
r5
;
}
public
void
setR5
(
String
r5
)
{
this
.
r5
=
r5
;
}
public
List
<
String
>
getR6
()
{
return
r6
;
}
public
void
setR6
(
List
<
String
>
r6
)
{
this
.
r6
=
r6
;
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment