Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
eagleEye
/
eagleEye-flink_kafka
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
44507015
authored
Aug 14, 2025
by
魏建枢
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
日活,事件曝光,质押代码提交
parent
2ac7a1b0
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
24 changed files
with
690 additions
and
49 deletions
eagleEye-flink_kafka/src/main/java/com/flink/achieve/base/EventLogAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/base/UserInvitationAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/EventIpConvertAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/RegistrationCheckAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/SimiFriendsAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/UserDailyActivityAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/VectorAngleCalculationAchi.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/JoinDeviceWithRegistrationProcessFunction.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/KeyPointSelector.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/PointRecordJoinProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorAngleProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorDifferenceProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorSimilarityProcessor.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/WindowResultFunction.java
eagleEye-flink_kafka/src/main/java/com/flink/processor/impl/OkHttpService.java
eagleEye-flink_kafka/src/main/java/com/flink/util/ip2region/SearcherUtil.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/PcProperties.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/Properties.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/RealStaking.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/BulidDailyParams.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/DailyActivityCombinedLog.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/DailyActivityDeviceInfo.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/EnrichedLog.java → eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/DailyActivityEnrichedLog.java
eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/DailyActivityEventInfo.java
eagleEye-flink_kafka/src/main/java/com/flink/achieve/base/EventLogAchi.java
View file @
44507015
...
@@ -77,16 +77,26 @@ public class EventLogAchi implements Serializable {
...
@@ -77,16 +77,26 @@ public class EventLogAchi implements Serializable {
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
TIMESTAMP
(
3
),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
TIMESTAMP
(
3
),
DataTypes
.
STRING
(),
DataTypes
.
TIMESTAMP
(
3
),
DataTypes
.
STRING
(),
DataTypes
.
INT
()
};
DataTypes
.
TIMESTAMP
(
3
),
DataTypes
.
STRING
(),
DataTypes
.
INT
()
};
// 事件曝光表配置
private
static
final
String
[]
EVENT_EXPOSURE_FIELDS
=
{
"cid"
,
"phone"
,
"exposure_type"
,
"time"
,
"event_type"
,
"article_id"
,
"nick"
,
"create_time"
,
"send_time"
,
DORIS_DELETE_SIGN
};
private
static
final
DataType
[]
EVENT_EXPOSURE_TYPES
=
{
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
TIMESTAMP
(
3
),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
INT
()
};
public
static
void
eventLog
(
DataStreamSource
<
String
>
dataStreamSource
)
{
public
static
void
eventLog
(
DataStreamSource
<
String
>
dataStreamSource
)
{
// 初始化表配置
// 初始化表配置
TableConfig
eventConfig
=
new
TableConfig
(
EVENT_FIELDS
,
EVENT_TYPES
,
"bi.event_log"
);
TableConfig
eventConfig
=
new
TableConfig
(
EVENT_FIELDS
,
EVENT_TYPES
,
"bi.event_log"
);
TableConfig
eventErrorConfig
=
new
TableConfig
(
EVENT_ERROR_FIELDS
,
EVENT_ERROR_TYPES
,
"bi.event_log_error"
);
TableConfig
eventErrorConfig
=
new
TableConfig
(
EVENT_ERROR_FIELDS
,
EVENT_ERROR_TYPES
,
"bi.event_log_error"
);
TableConfig
eventExposureConfig
=
new
TableConfig
(
EVENT_EXPOSURE_FIELDS
,
EVENT_EXPOSURE_TYPES
,
"ai.event_exposure"
);
// 创建Doris Sink
// 创建Doris Sink
DorisSink
<
RowData
>
dorisEventSink
=
DorisConnector
.
sinkDoris
(
eventConfig
.
getFields
(),
eventConfig
.
getTypes
(),
DorisSink
<
RowData
>
dorisEventSink
=
DorisConnector
.
sinkDoris
(
eventConfig
.
getFields
(),
eventConfig
.
getTypes
(),
eventConfig
.
getTableName
());
eventConfig
.
getTableName
());
DorisSink
<
RowData
>
dorisEventErrorSink
=
DorisConnector
.
sinkDoris
(
eventErrorConfig
.
getFields
(),
DorisSink
<
RowData
>
dorisEventErrorSink
=
DorisConnector
.
sinkDoris
(
eventErrorConfig
.
getFields
(),
eventErrorConfig
.
getTypes
(),
eventErrorConfig
.
getTableName
());
eventErrorConfig
.
getTypes
(),
eventErrorConfig
.
getTableName
());
DorisSink
<
RowData
>
dorisEventExposureSink
=
DorisConnector
.
sinkDoris
(
eventExposureConfig
.
getFields
(),
eventExposureConfig
.
getTypes
(),
eventExposureConfig
.
getTableName
());
// 处理设备信息采集日志数据
// 处理设备信息采集日志数据
processDataStream
(
dataStreamSource
,
"eventLog"
,
eventConfig
,
dorisEventSink
,
processDataStream
(
dataStreamSource
,
"eventLog"
,
eventConfig
,
dorisEventSink
,
...
@@ -97,6 +107,11 @@ public class EventLogAchi implements Serializable {
...
@@ -97,6 +107,11 @@ public class EventLogAchi implements Serializable {
return
null
;
return
null
;
},
EventLogAchi:
:
mapToEventErrorRow
},
EventLogAchi:
:
mapToEventErrorRow
);
);
batchExposureProcessDataStream
(
dataStreamSource
,
"eventLog"
,
eventExposureConfig
,
dorisEventExposureSink
,
(
item
,
fieldCount
)
->
{
return
null
;
},
EventLogAchi:
:
mapToEventExposureRow
);
}
}
private
static
void
processDataStream
(
DataStreamSource
<
String
>
dataStream
,
String
flumeType
,
private
static
void
processDataStream
(
DataStreamSource
<
String
>
dataStream
,
String
flumeType
,
...
@@ -117,6 +132,14 @@ public class EventLogAchi implements Serializable {
...
@@ -117,6 +132,14 @@ public class EventLogAchi implements Serializable {
processedStream
.
sinkTo
(
dorisSink
).
name
(
"Doris-"
+
flumeType
);
processedStream
.
sinkTo
(
dorisSink
).
name
(
"Doris-"
+
flumeType
);
}
}
private
static
void
batchExposureProcessDataStream
(
DataStreamSource
<
String
>
dataStream
,
String
flumeType
,
TableConfig
tableConfig
,
DorisSink
<
RowData
>
dorisSink
,
RowMapper
mapper
,
EventExposureMapper
exposureMapper
)
{
SingleOutputStreamOperator
<
RowData
>
processedStream
=
dataStream
.
flatMap
(
new
ElementExposureProcessor
(
flumeType
,
exposureMapper
,
tableConfig
.
getFields
().
length
))
.
returns
(
TypeInformation
.
of
(
RowData
.
class
)).
filter
(
Objects:
:
nonNull
);
processedStream
.
sinkTo
(
dorisSink
).
name
(
"Doris-"
+
flumeType
);
}
/**
/**
* 使用map算子的内部处理类
* 使用map算子的内部处理类
*/
*/
...
@@ -223,6 +246,74 @@ public class EventLogAchi implements Serializable {
...
@@ -223,6 +246,74 @@ public class EventLogAchi implements Serializable {
}
}
}
}
private
static
class
ElementExposureProcessor
implements
FlatMapFunction
<
String
,
RowData
>,
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
final
String
flumeType
;
private
final
EventExposureMapper
exposureMapper
;
private
final
int
fieldCount
;
public
ElementExposureProcessor
(
String
flumeType
,
EventExposureMapper
exposureMapper
,
int
fieldCount
)
{
this
.
flumeType
=
flumeType
;
this
.
exposureMapper
=
exposureMapper
;
this
.
fieldCount
=
fieldCount
;
}
@Override
public
void
flatMap
(
String
value
,
Collector
<
RowData
>
out
)
throws
Exception
{
try
{
JSONObject
jsonObj
=
JSON
.
parseObject
(
value
);
if
(!
flumeType
.
equals
(
jsonObj
.
getString
(
FLUME_TYPE_FIELD
)))
{
return
;
}
processKafkaMessage
(
value
,
out
);
}
catch
(
Exception
e
)
{
logger
.
error
(
"UserInvitationAchi 处理 {} 数据出错 | rawData:{} | error:{}"
,
flumeType
,
value
,
e
.
getMessage
(),
e
);
}
}
private
void
processKafkaMessage
(
String
value
,
Collector
<
RowData
>
out
)
{
try
{
OdsEventLog
event
=
JSON
.
parseObject
(
value
,
OdsEventLog
.
class
);
if
(
event
==
null
)
return
;
List
<
EventList
>
eventList
=
parseEventListSafely
(
event
.
getEvent_list
());
if
(
CollectionUtils
.
isEmpty
(
eventList
))
return
;
int
dt
=
TimeConvertUtil
.
convertToSqlDate
(
event
.
getCreate_time
().
substring
(
0
,
10
));
UserProperties
userProps
=
UserPropertiesProcessor
.
userPropertiesToJson
(
event
.
getUser_properties
());
for
(
EventList
eventInfo
:
eventList
)
{
if
(
isTargetEventType
(
eventInfo
.
getR7
()))
{
out
.
collect
(
exposureMapper
.
map
(
event
,
eventInfo
,
userProps
,
dt
,
fieldCount
));
}
}
}
catch
(
Exception
e
)
{
logger
.
error
(
"Kafka消息处理失败 | data:{} | error:{}"
,
value
,
e
.
getMessage
());
}
}
private
static
List
<
EventList
>
parseEventListSafely
(
String
jsonStr
)
{
try
{
return
JSON
.
parseArray
(
jsonStr
,
EventList
.
class
);
}
catch
(
Exception
e
)
{
logger
.
warn
(
"事件列表解析失败: {}"
,
jsonStr
);
return
Collections
.
emptyList
();
}
}
// 事件类型过滤
private
static
boolean
isTargetEventType
(
String
eventType
)
{
return
CompareUtils
.
stringExists
(
eventType
,
"enter_act"
,
"exit_act"
,
"show_act"
);
}
}
private
static
RowData
mapToEventRow
(
Object
item
,
int
fieldCount
)
{
private
static
RowData
mapToEventRow
(
Object
item
,
int
fieldCount
)
{
String
value
=
(
String
)
item
;
String
value
=
(
String
)
item
;
OdsEventLog
event
=
JSONObject
.
parseObject
(
value
,
new
TypeReference
<
OdsEventLog
>()
{
OdsEventLog
event
=
JSONObject
.
parseObject
(
value
,
new
TypeReference
<
OdsEventLog
>()
{
...
@@ -295,6 +386,25 @@ public class EventLogAchi implements Serializable {
...
@@ -295,6 +386,25 @@ public class EventLogAchi implements Serializable {
return
row
;
return
row
;
}
}
private
static
RowData
mapToEventExposureRow
(
OdsEventLog
event
,
EventList
eventInfo
,
UserProperties
userProps
,
int
dt
,
int
fieldCount
)
{
GenericRowData
row
=
new
GenericRowData
(
fieldCount
);
row
.
setField
(
0
,
StringData
.
fromString
(
userProps
==
null
?
null
:
userProps
.
getCid
()));
row
.
setField
(
1
,
StringData
.
fromString
(
userProps
==
null
?
null
:
userProps
.
getPhone
()));
row
.
setField
(
2
,
StringData
.
fromString
(
eventInfo
.
getR7
()));
row
.
setField
(
3
,
TimestampData
.
fromLocalDateTime
(
LocalDateTime
.
parse
(
TimeConvertUtil
.
parseToStringSSS
(
eventInfo
.
getR9
()),
DateTimeFormatter
.
ofPattern
(
"yyyy-MM-dd HH:mm:ss.SSS"
))));
Properties
r8
=
eventInfo
.
getR8
();
row
.
setField
(
4
,
StringData
.
fromString
(
r8
.
getType
()));
row
.
setField
(
5
,
StringData
.
fromString
(
r8
.
getId
()));
row
.
setField
(
6
,
StringData
.
fromString
(
userProps
==
null
?
null
:
userProps
.
getNick
()));
row
.
setField
(
7
,
StringData
.
fromString
(
event
.
getCreate_time
()));
row
.
setField
(
8
,
StringData
.
fromString
(
event
.
getSend_time
()));
row
.
setField
(
9
,
DELETE_SIGN_VALUE
);
return
row
;
}
/**
/**
* 行数据映射接口
* 行数据映射接口
*
*
...
@@ -309,4 +419,9 @@ public class EventLogAchi implements Serializable {
...
@@ -309,4 +419,9 @@ public class EventLogAchi implements Serializable {
private
interface
EventErrorMapper
extends
Serializable
{
private
interface
EventErrorMapper
extends
Serializable
{
RowData
map
(
OdsEventLog
event
,
EventList
eventInfo
,
UserProperties
userProps
,
int
dt
,
int
fieldCount
);
RowData
map
(
OdsEventLog
event
,
EventList
eventInfo
,
UserProperties
userProps
,
int
dt
,
int
fieldCount
);
}
}
@FunctionalInterface
private
interface
EventExposureMapper
extends
Serializable
{
RowData
map
(
OdsEventLog
event
,
EventList
eventInfo
,
UserProperties
userProps
,
int
dt
,
int
fieldCount
);
}
}
}
eagleEye-flink_kafka/src/main/java/com/flink/achieve/base/UserInvitationAchi.java
View file @
44507015
...
@@ -26,6 +26,7 @@ import com.flink.config.TableConfig;
...
@@ -26,6 +26,7 @@ import com.flink.config.TableConfig;
import
com.flink.vo.RealBalance
;
import
com.flink.vo.RealBalance
;
import
com.flink.vo.RealKyc
;
import
com.flink.vo.RealKyc
;
import
com.flink.vo.RealLead
;
import
com.flink.vo.RealLead
;
import
com.flink.vo.RealStaking
;
import
com.flink.vo.RealTransaction
;
import
com.flink.vo.RealTransaction
;
import
com.flink.vo.RealUsers
;
import
com.flink.vo.RealUsers
;
...
@@ -87,6 +88,14 @@ public class UserInvitationAchi implements Serializable {
...
@@ -87,6 +88,14 @@ public class UserInvitationAchi implements Serializable {
DataTypes
.
STRING
(),
DataTypes
.
BIGINT
(),
DataTypes
.
BIGINT
(),
DataTypes
.
BIGINT
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
BIGINT
(),
DataTypes
.
BIGINT
(),
DataTypes
.
BIGINT
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
INT
()
};
DataTypes
.
STRING
(),
DataTypes
.
INT
()
};
//质押表配置
private
static
final
String
[]
REAL_STAKING_FIELDS
=
{
"tx_index"
,
"tx_hash"
,
"block_height"
,
"block_timestamp"
,
"from_account_id"
,
"to_account_id"
,
"is_relayer"
,
"amount"
,
"symbol"
,
"post_time"
,
DORIS_DELETE_SIGN
};
private
static
final
DataType
[]
REAL_STAKING_TYPES
=
{
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
STRING
(),
DataTypes
.
INT
()
};
public
static
void
userInvitation
(
DataStreamSource
<
String
>
dataStreamSource
)
{
public
static
void
userInvitation
(
DataStreamSource
<
String
>
dataStreamSource
)
{
// 初始化表配置
// 初始化表配置
TableConfig
usersConfig
=
new
TableConfig
(
USERS_FIELDS
,
USERS_TYPES
,
"bi.real_users"
);
TableConfig
usersConfig
=
new
TableConfig
(
USERS_FIELDS
,
USERS_TYPES
,
"bi.real_users"
);
...
@@ -94,6 +103,7 @@ public class UserInvitationAchi implements Serializable {
...
@@ -94,6 +103,7 @@ public class UserInvitationAchi implements Serializable {
TableConfig
keyConfig
=
new
TableConfig
(
KYC_FIELDS
,
KYC_TYPES
,
"bi.real_kyc"
);
TableConfig
keyConfig
=
new
TableConfig
(
KYC_FIELDS
,
KYC_TYPES
,
"bi.real_kyc"
);
TableConfig
balanceConfig
=
new
TableConfig
(
BALANCE_FIELDS
,
BALANCE_TYPES
,
"bi.real_balance"
);
TableConfig
balanceConfig
=
new
TableConfig
(
BALANCE_FIELDS
,
BALANCE_TYPES
,
"bi.real_balance"
);
TableConfig
leadConfig
=
new
TableConfig
(
LEAD_FIELDS
,
LEAD_TYPES
,
"bi.real_lead_switch"
);
TableConfig
leadConfig
=
new
TableConfig
(
LEAD_FIELDS
,
LEAD_TYPES
,
"bi.real_lead_switch"
);
TableConfig
realStakingConfig
=
new
TableConfig
(
REAL_STAKING_FIELDS
,
REAL_STAKING_TYPES
,
"bi.real_staking"
);
// 创建Doris Sink
// 创建Doris Sink
DorisSink
<
RowData
>
dorisUsersSink
=
DorisConnector
.
sinkDoris
(
usersConfig
.
getFields
(),
usersConfig
.
getTypes
(),
DorisSink
<
RowData
>
dorisUsersSink
=
DorisConnector
.
sinkDoris
(
usersConfig
.
getFields
(),
usersConfig
.
getTypes
(),
...
@@ -106,6 +116,8 @@ public class UserInvitationAchi implements Serializable {
...
@@ -106,6 +116,8 @@ public class UserInvitationAchi implements Serializable {
balanceConfig
.
getTypes
(),
balanceConfig
.
getTableName
());
balanceConfig
.
getTypes
(),
balanceConfig
.
getTableName
());
DorisSink
<
RowData
>
dorisLeadSink
=
DorisConnector
.
sinkDoris
(
leadConfig
.
getFields
(),
leadConfig
.
getTypes
(),
DorisSink
<
RowData
>
dorisLeadSink
=
DorisConnector
.
sinkDoris
(
leadConfig
.
getFields
(),
leadConfig
.
getTypes
(),
leadConfig
.
getTableName
());
leadConfig
.
getTableName
());
DorisSink
<
RowData
>
dorisRealStakingSink
=
DorisConnector
.
sinkDoris
(
realStakingConfig
.
getFields
(),
realStakingConfig
.
getTypes
(),
realStakingConfig
.
getTableName
());
// 处理用户数据
// 处理用户数据
// processDataStream(dataStreamSource, "realUsers", usersConfig, dorisUsersSink,(RowMapper<RealUsers>) UserInvitationAchi::mapToUsersRow);
// processDataStream(dataStreamSource, "realUsers", usersConfig, dorisUsersSink,(RowMapper<RealUsers>) UserInvitationAchi::mapToUsersRow);
...
@@ -119,6 +131,8 @@ public class UserInvitationAchi implements Serializable {
...
@@ -119,6 +131,8 @@ public class UserInvitationAchi implements Serializable {
processDataStream
(
dataStreamSource
,
"realBalance"
,
balanceConfig
,
dorisBalanceSink
,(
RowMapper
<
RealBalance
>)
UserInvitationAchi:
:
mapToBalanceRow
);
processDataStream
(
dataStreamSource
,
"realBalance"
,
balanceConfig
,
dorisBalanceSink
,(
RowMapper
<
RealBalance
>)
UserInvitationAchi:
:
mapToBalanceRow
);
//处理真实上级数据
//处理真实上级数据
processDataStream
(
dataStreamSource
,
"realLead"
,
leadConfig
,
dorisLeadSink
,(
RowMapper
<
RealLead
>)
UserInvitationAchi:
:
mapToLeadRow
);
processDataStream
(
dataStreamSource
,
"realLead"
,
leadConfig
,
dorisLeadSink
,(
RowMapper
<
RealLead
>)
UserInvitationAchi:
:
mapToLeadRow
);
//处理质押数据
processDataStream
(
dataStreamSource
,
"realStaking"
,
realStakingConfig
,
dorisRealStakingSink
,(
RowMapper
<
RealStaking
>)
UserInvitationAchi:
:
mapToStakingRow
);
}
}
/**
/**
...
@@ -183,13 +197,12 @@ public class UserInvitationAchi implements Serializable {
...
@@ -183,13 +197,12 @@ public class UserInvitationAchi implements Serializable {
case
"realKyc"
:
return
RealKyc
.
class
;
case
"realKyc"
:
return
RealKyc
.
class
;
case
"realBalance"
:
return
RealBalance
.
class
;
case
"realBalance"
:
return
RealBalance
.
class
;
case
"realLead"
:
return
RealLead
.
class
;
case
"realLead"
:
return
RealLead
.
class
;
case
"realStaking"
:
return
RealStaking
.
class
;
default
:
throw
new
IllegalArgumentException
(
"未知类型: "
+
type
);
default
:
throw
new
IllegalArgumentException
(
"未知类型: "
+
type
);
}
}
}
}
}
}
// 用户数据映射
// 用户数据映射
private
static
RowData
mapToUsersRow
(
Object
item
,
int
fieldCount
)
{
private
static
RowData
mapToUsersRow
(
Object
item
,
int
fieldCount
)
{
RealUsers
user
=
(
RealUsers
)
item
;
// 显式类型转换
RealUsers
user
=
(
RealUsers
)
item
;
// 显式类型转换
...
@@ -281,6 +294,23 @@ public class UserInvitationAchi implements Serializable {
...
@@ -281,6 +294,23 @@ public class UserInvitationAchi implements Serializable {
row
.
setField
(
9
,
DELETE_SIGN_VALUE
);
row
.
setField
(
9
,
DELETE_SIGN_VALUE
);
return
row
;
return
row
;
}
}
// 质押 数据映射
private
static
RowData
mapToStakingRow
(
Object
item
,
int
fieldCount
)
{
RealStaking
staking
=
(
RealStaking
)
item
;
GenericRowData
row
=
new
GenericRowData
(
fieldCount
);
row
.
setField
(
0
,
StringData
.
fromString
(
staking
.
getTx_index
()));
row
.
setField
(
1
,
StringData
.
fromString
(
staking
.
getTx_hash
()));
row
.
setField
(
2
,
StringData
.
fromString
(
staking
.
getBlock_height
()));
row
.
setField
(
3
,
StringData
.
fromString
(
staking
.
getBlock_timestamp
()));
row
.
setField
(
4
,
StringData
.
fromString
(
staking
.
getFrom_account_id
()));
row
.
setField
(
5
,
StringData
.
fromString
(
staking
.
getTo_account_id
()));
row
.
setField
(
6
,
StringData
.
fromString
(
staking
.
getIs_relayer
()));
row
.
setField
(
7
,
StringData
.
fromString
(
staking
.
getAmount
()));
row
.
setField
(
8
,
StringData
.
fromString
(
staking
.
getSymbol
()));
row
.
setField
(
9
,
StringData
.
fromString
(
staking
.
getPost_time
()));
row
.
setField
(
10
,
DELETE_SIGN_VALUE
);
return
row
;
}
/**
/**
* 行数据映射接口
* 行数据映射接口
...
...
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/EventIpConvertAchi.java
View file @
44507015
...
@@ -103,7 +103,6 @@ public class EventIpConvertAchi extends SourceCommonBase implements Serializable
...
@@ -103,7 +103,6 @@ public class EventIpConvertAchi extends SourceCommonBase implements Serializable
}
}
public
static
JSONObject
handleData
(
String
record
)
throws
ParseException
,
Exception
{
public
static
JSONObject
handleData
(
String
record
)
throws
ParseException
,
Exception
{
logger
.
info
(
"EventIpConvertAchi record:{}"
,
record
);
// TODO 数据的 ETL 处理
// TODO 数据的 ETL 处理
OdsEventLog
odsEventLog
=
JSONObject
.
parseObject
(
record
,
new
TypeReference
<
OdsEventLog
>(){});
OdsEventLog
odsEventLog
=
JSONObject
.
parseObject
(
record
,
new
TypeReference
<
OdsEventLog
>(){});
String
id
=
odsEventLog
.
getId
();
String
id
=
odsEventLog
.
getId
();
...
@@ -111,7 +110,6 @@ public class EventIpConvertAchi extends SourceCommonBase implements Serializable
...
@@ -111,7 +110,6 @@ public class EventIpConvertAchi extends SourceCommonBase implements Serializable
String
deviceId
=
odsEventLog
.
getDevice_id
();
String
deviceId
=
odsEventLog
.
getDevice_id
();
String
routeIp
=
odsEventLog
.
getRoute_ip
();
String
routeIp
=
odsEventLog
.
getRoute_ip
();
String
userProperties
=
odsEventLog
.
getUser_properties
();
String
userProperties
=
odsEventLog
.
getUser_properties
();
logger
.
info
(
"组装数据 body:{}"
,
odsEventLog
.
toString
());
String
cid
=
null
;
String
cid
=
null
;
String
phone
=
null
;
String
phone
=
null
;
String
nick
=
null
;
String
nick
=
null
;
...
@@ -156,7 +154,6 @@ public class EventIpConvertAchi extends SourceCommonBase implements Serializable
...
@@ -156,7 +154,6 @@ public class EventIpConvertAchi extends SourceCommonBase implements Serializable
if
(
StringUtils
.
isEmpty
(
ip_name
))
{
if
(
StringUtils
.
isEmpty
(
ip_name
))
{
return
null
;
return
null
;
}
}
logger
.
info
(
"组装数据开始"
);
JSONObject
jsonObj
=
new
JSONObject
();
JSONObject
jsonObj
=
new
JSONObject
();
jsonObj
.
put
(
"id"
,
id
);
jsonObj
.
put
(
"id"
,
id
);
jsonObj
.
put
(
"ips"
,
ip_name
);
jsonObj
.
put
(
"ips"
,
ip_name
);
...
...
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/RegistrationCheckAchi.java
View file @
44507015
...
@@ -97,8 +97,8 @@ public class RegistrationCheckAchi extends MultipleSourceCommonBase implements S
...
@@ -97,8 +97,8 @@ public class RegistrationCheckAchi extends MultipleSourceCommonBase implements S
simiUserInfo
.
setPhone_number
(
userRegistrationReqDto
.
getPhoneNumber
());
simiUserInfo
.
setPhone_number
(
userRegistrationReqDto
.
getPhoneNumber
());
simiUserInfo
.
setUpdateTime
(
TimeConvertUtil
.
convertToTimestampSSS
(
userRegistrationReqDto
.
getTime
()));
simiUserInfo
.
setUpdateTime
(
TimeConvertUtil
.
convertToTimestampSSS
(
userRegistrationReqDto
.
getTime
()));
logger
.
info
(
">>>>>>>>>>registerDataStream cid:{},Country_code:{},Phone_number:{},UpdateTime:{}"
,
userRegistrationReqDto
.
getCid
()
//
logger.info(">>>>>>>>>>registerDataStream cid:{},Country_code:{},Phone_number:{},UpdateTime:{}", userRegistrationReqDto.getCid()
,
userRegistrationReqDto
.
getCountryCode
(),
userRegistrationReqDto
.
getPhoneNumber
(),
TimeConvertUtil
.
convertToTimestampSSS
(
userRegistrationReqDto
.
getTime
()));
//
,userRegistrationReqDto.getCountryCode(),userRegistrationReqDto.getPhoneNumber(),TimeConvertUtil.convertToTimestampSSS(userRegistrationReqDto.getTime()));
out
.
collect
(
simiUserInfo
);
out
.
collect
(
simiUserInfo
);
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
logger
.
error
(
"Error parsing simi_user_list 处理 Kafka 消息出错 | data:{} | error:{}"
,
value
,
e
.
getMessage
());
logger
.
error
(
"Error parsing simi_user_list 处理 Kafka 消息出错 | data:{} | error:{}"
,
value
,
e
.
getMessage
());
...
@@ -123,7 +123,7 @@ public class RegistrationCheckAchi extends MultipleSourceCommonBase implements S
...
@@ -123,7 +123,7 @@ public class RegistrationCheckAchi extends MultipleSourceCommonBase implements S
//解析 Kafka 数据
//解析 Kafka 数据
DeviceId
device
=
DeviceIdLatestAchi
.
handleData
(
value
);
DeviceId
device
=
DeviceIdLatestAchi
.
handleData
(
value
);
if
(
device
!=
null
)
{
if
(
device
!=
null
)
{
logger
.
info
(
">>>>>>>>>>mergedDeviceStream cid:{},CollectTime:{},CreateTime:{}"
,
device
.
getCid
(),
device
.
getCollectTime
(),
device
.
getCreateTime
());
//
logger.info(">>>>>>>>>>mergedDeviceStream cid:{},CollectTime:{},CreateTime:{}", device.getCid(),device.getCollectTime(),device.getCreateTime());
out
.
collect
(
device
);
out
.
collect
(
device
);
}
}
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
...
@@ -141,7 +141,7 @@ public class RegistrationCheckAchi extends MultipleSourceCommonBase implements S
...
@@ -141,7 +141,7 @@ public class RegistrationCheckAchi extends MultipleSourceCommonBase implements S
//解析 Kafka 数据
//解析 Kafka 数据
DeviceId
device
=
DeviceIdLatestAchi
.
handlePcData
(
value
);
DeviceId
device
=
DeviceIdLatestAchi
.
handlePcData
(
value
);
if
(
device
!=
null
)
{
if
(
device
!=
null
)
{
logger
.
info
(
">>>>>>>>>>mergedDeviceStreamPc cid:{},CollectTime:{},CreateTime:{}"
,
device
.
getCid
(),
device
.
getCollectTime
(),
device
.
getCreateTime
());
//
logger.info(">>>>>>>>>>mergedDeviceStreamPc cid:{},CollectTime:{},CreateTime:{}", device.getCid(),device.getCollectTime(),device.getCreateTime());
out
.
collect
(
device
);
out
.
collect
(
device
);
}
}
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
...
@@ -168,7 +168,7 @@ public class RegistrationCheckAchi extends MultipleSourceCommonBase implements S
...
@@ -168,7 +168,7 @@ public class RegistrationCheckAchi extends MultipleSourceCommonBase implements S
public
void
processElement
(
SimiUserInfo
user
,
DeviceId
device
,
Context
ctx
,
public
void
processElement
(
SimiUserInfo
user
,
DeviceId
device
,
Context
ctx
,
Collector
<
Tuple3
<
String
,
String
,
Long
>>
out
)
{
Collector
<
Tuple3
<
String
,
String
,
Long
>>
out
)
{
// 输出: (设备ID, 用户ID, 注册时间)
// 输出: (设备ID, 用户ID, 注册时间)
logger
.
info
(
">>>>>>>>>>deviceUserStream deviceId:{},cid:{},UpdateTime:{}"
,
device
.
getDeviceId
(),
user
.
getCid
(),
user
.
getUpdateTime
());
//
logger.info(">>>>>>>>>>deviceUserStream deviceId:{},cid:{},UpdateTime:{}", device.getDeviceId(), user.getCid(), user.getUpdateTime());
// 确保cid不为空且相等(实际上keyBy已经保证,此处为冗余校验)
// 确保cid不为空且相等(实际上keyBy已经保证,此处为冗余校验)
if
(
user
.
getCid
().
equals
(
device
.
getCid
()))
{
if
(
user
.
getCid
().
equals
(
device
.
getCid
()))
{
out
.
collect
(
Tuple3
.
of
(
device
.
getDeviceId
(),
user
.
getCid
(),
user
.
getUpdateTime
()));
out
.
collect
(
Tuple3
.
of
(
device
.
getDeviceId
(),
user
.
getCid
(),
user
.
getUpdateTime
()));
...
...
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/SimiFriendsAchi.java
View file @
44507015
...
@@ -583,7 +583,7 @@ public class SimiFriendsAchi extends MultipleSourceCommonBase implements Seriali
...
@@ -583,7 +583,7 @@ public class SimiFriendsAchi extends MultipleSourceCommonBase implements Seriali
.
batchSize
(
3
)
// 每批2个参数
.
batchSize
(
3
)
// 每批2个参数
.
build
();
.
build
();
List
<
Map
<
String
,
Object
>>
params
=
DynamicSqlBuilder
.
queryDoris
(
queries
);
List
<
Map
<
String
,
Object
>>
params
=
DynamicSqlBuilder
.
queryDoris
(
queries
);
logger
.
info
(
">>>>>>>>>>>params:{}"
,
params
);
//
logger.info(">>>>>>>>>>>params:{}",params);
return
params
;
return
params
;
}
}
...
...
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/UserDailyActivityAchi.java
View file @
44507015
This diff is collapsed.
Click to expand it.
eagleEye-flink_kafka/src/main/java/com/flink/achieve/doris/VectorAngleCalculationAchi.java
View file @
44507015
...
@@ -247,7 +247,7 @@ public class VectorAngleCalculationAchi extends MultipleSourceCommonBase impleme
...
@@ -247,7 +247,7 @@ public class VectorAngleCalculationAchi extends MultipleSourceCommonBase impleme
@Override
@Override
public
Tuple2
<
String
,
Long
>
getKey
(
PointRecord
r
)
{
public
Tuple2
<
String
,
Long
>
getKey
(
PointRecord
r
)
{
logger
.
info
(
"KeyedStream >> id:{},eventTime:{}"
,
r
.
id
,
r
.
eventTime
);
//
logger.info("KeyedStream >> id:{},eventTime:{}",r.id,r.eventTime);
return
Tuple2
.
of
(
r
.
id
,
r
.
eventTime
);
return
Tuple2
.
of
(
r
.
id
,
r
.
eventTime
);
}
}
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/JoinDeviceWithRegistrationProcessFunction.java
View file @
44507015
...
@@ -41,7 +41,7 @@ public class JoinDeviceWithRegistrationProcessFunction extends KeyedCoProcessFun
...
@@ -41,7 +41,7 @@ public class JoinDeviceWithRegistrationProcessFunction extends KeyedCoProcessFun
DeviceId
device
=
deviceState
.
value
();
DeviceId
device
=
deviceState
.
value
();
if
(
device
!=
null
)
{
if
(
device
!=
null
)
{
//输出三元组: (设备ID, 用户ID, 注册时间)
//输出三元组: (设备ID, 用户ID, 注册时间)
logger
.
info
(
"输出三元组 设备ID:{}, 用户ID:{}, 注册时间:{}"
,
device
.
getDeviceId
(),
user
.
getCid
(),
user
.
getUpdateTime
());
//
logger.info("输出三元组 设备ID:{}, 用户ID:{}, 注册时间:{}",device.getDeviceId(), user.getCid(), user.getUpdateTime());
out
.
collect
(
new
Tuple3
<>(
device
.
getDeviceId
(),
user
.
getCid
(),
user
.
getUpdateTime
()));
out
.
collect
(
new
Tuple3
<>(
device
.
getDeviceId
(),
user
.
getCid
(),
user
.
getUpdateTime
()));
}
}
}
}
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/KeyPointSelector.java
View file @
44507015
...
@@ -78,9 +78,9 @@ public class KeyPointSelector extends KeyedProcessFunction<Tuple2<String, Long>,
...
@@ -78,9 +78,9 @@ public class KeyPointSelector extends KeyedProcessFunction<Tuple2<String, Long>,
// 3. 生成坐标数组字符串
// 3. 生成坐标数组字符串
String
vectorArray
=
generateVectorArray
(
keyPoints
);
String
vectorArray
=
generateVectorArray
(
keyPoints
);
logger
.
info
(
">>>>>>>>KeyPointSelector id:{},eventTime:{},vectorArray:{}"
,
ctx
.
getCurrentKey
().
f0
,
//
logger.info(">>>>>>>>KeyPointSelector id:{},eventTime:{},vectorArray:{}",ctx.getCurrentKey().f0,
ctx
.
getCurrentKey
().
f1
,
//
ctx.getCurrentKey().f1,
vectorArray
);
//
vectorArray);
// 4. 输出结果
// 4. 输出结果
out
.
collect
(
new
ResultOutput
(
out
.
collect
(
new
ResultOutput
(
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/PointRecordJoinProcessor.java
View file @
44507015
...
@@ -154,7 +154,7 @@ public class PointRecordJoinProcessor extends CoProcessFunction<EventLogToJsonSo
...
@@ -154,7 +154,7 @@ public class PointRecordJoinProcessor extends CoProcessFunction<EventLogToJsonSo
public
void
onTimer
(
long
timestamp
,
OnTimerContext
ctx
,
Collector
<
PointRecord
>
out
)
{
public
void
onTimer
(
long
timestamp
,
OnTimerContext
ctx
,
Collector
<
PointRecord
>
out
)
{
try
{
try
{
pendingEventLog
.
remove
(
timestamp
-
60000
);
pendingEventLog
.
remove
(
timestamp
-
60000
);
logger
.
info
(
"Cleaned expired events at {}"
,
timestamp
);
//
logger.info("Cleaned expired events at {}", timestamp);
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
logger
.
error
(
"Timer error: {}"
,
e
.
getMessage
(),
e
);
logger
.
error
(
"Timer error: {}"
,
e
.
getMessage
(),
e
);
}
}
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorAngleProcessor.java
View file @
44507015
...
@@ -42,7 +42,7 @@ public class VectorAngleProcessor extends KeyedProcessFunction<Tuple2<String, Lo
...
@@ -42,7 +42,7 @@ public class VectorAngleProcessor extends KeyedProcessFunction<Tuple2<String, Lo
public
void
processElement
(
PointRecord
record
,
public
void
processElement
(
PointRecord
record
,
KeyedProcessFunction
<
Tuple2
<
String
,
Long
>,
PointRecord
,
ResultRecord
>.
Context
ctx
,
KeyedProcessFunction
<
Tuple2
<
String
,
Long
>,
PointRecord
,
ResultRecord
>.
Context
ctx
,
Collector
<
ResultRecord
>
out
)
throws
Exception
{
Collector
<
ResultRecord
>
out
)
throws
Exception
{
logger
.
info
(
"processElement >>>start!"
);
//
logger.info("processElement >>>start!");
VectorState
state
=
vectorState
.
value
();
VectorState
state
=
vectorState
.
value
();
double
vectorX
,
vectorY
,
vectorM
,
pointV
;
double
vectorX
,
vectorY
,
vectorM
,
pointV
;
// 处理第一条记录
// 处理第一条记录
...
@@ -87,24 +87,24 @@ public class VectorAngleProcessor extends KeyedProcessFunction<Tuple2<String, Lo
...
@@ -87,24 +87,24 @@ public class VectorAngleProcessor extends KeyedProcessFunction<Tuple2<String, Lo
record
.
resolutionX
,
record
.
resolutionX
,
record
.
resolutionY
record
.
resolutionY
);
);
logger
.
info
(
"VectorAngleProcessor processElement >>>end! id:{},eventTime:{},rowNum:{},"
//
logger.info("VectorAngleProcessor processElement >>>end! id:{},eventTime:{},rowNum:{},"
+
"positionX:{},positionY:{},vectorX:{},vectorY:{},vectorM:{},"
//
+ "positionX:{},positionY:{},vectorX:{},vectorY:{},vectorM:{},"
+
"pointV:{},cosV:{},angleV:{},radianV:{},resolutionX:{},resolutionY:{}"
,
//
+ "pointV:{},cosV:{},angleV:{},radianV:{},resolutionX:{},resolutionY:{}",
record
.
id
,
//
record.id,
record
.
eventTime
,
//
record.eventTime,
record
.
rowNum
,
//
record.rowNum,
record
.
positionX
,
//
record.positionX,
record
.
positionY
,
//
record.positionY,
vectorX
,
//
vectorX,
vectorY
,
//
vectorY,
vectorM
,
//
vectorM,
pointV
,
//
pointV,
cosV
,
//
cosV,
angleV
,
//
angleV,
radianV
,
//
radianV,
record
.
resolutionX
,
//
record.resolutionX,
record
.
resolutionY
//
record.resolutionY
);
//
);
out
.
collect
(
result
);
out
.
collect
(
result
);
// 更新状态(当前记录成为下一条的"前一条")
// 更新状态(当前记录成为下一条的"前一条")
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorDifferenceProcessor.java
View file @
44507015
...
@@ -93,7 +93,7 @@ public class VectorDifferenceProcessor extends KeyedProcessFunction<Tuple2<Strin
...
@@ -93,7 +93,7 @@ public class VectorDifferenceProcessor extends KeyedProcessFunction<Tuple2<Strin
// 创建包含两个元素的数组[x, y]
// 创建包含两个元素的数组[x, y]
double
[]
coordinate
=
{
convertedX
,
convertedY
};
double
[]
coordinate
=
{
convertedX
,
convertedY
};
coordinateList
.
add
(
coordinate
);
coordinateList
.
add
(
coordinate
);
logger
.
info
(
">>>>>>>>准备输出要计算的数据: vectorDiffX:{},resolutionX:{},vectorDiffY:{},resolutionY:{}"
,
point
.
vectorDiffX
,
point
.
resolutionX
,
point
.
vectorDiffY
,
point
.
resolutionY
);
//
logger.info(">>>>>>>>准备输出要计算的数据: vectorDiffX:{},resolutionX:{},vectorDiffY:{},resolutionY:{}",point.vectorDiffX,point.resolutionX,point.vectorDiffY,point.resolutionY);
}
}
// 生成[[x1,y1],[x2,y2],...]格式的二维数组
// 生成[[x1,y1],[x2,y2],...]格式的二维数组
// 注意:实际输出时可以直接使用coordinateList.toString(),但格式需要微调
// 注意:实际输出时可以直接使用coordinateList.toString(),但格式需要微调
...
@@ -114,7 +114,7 @@ public class VectorDifferenceProcessor extends KeyedProcessFunction<Tuple2<Strin
...
@@ -114,7 +114,7 @@ public class VectorDifferenceProcessor extends KeyedProcessFunction<Tuple2<Strin
Long
eventTime
=
ctx
.
getCurrentKey
().
f1
;
Long
eventTime
=
ctx
.
getCurrentKey
().
f1
;
// String dt = String.format("%tF", eventTime); // yyyy-MM-dd格式
// String dt = String.format("%tF", eventTime); // yyyy-MM-dd格式
logger
.
info
(
">>>>>>>>准备输出 最终结果: id:{},eventTime:{},vectorArray:{}"
,
ctx
.
getCurrentKey
().
f0
,
eventTime
,
resultBuilder
.
toString
());
//
logger.info(">>>>>>>>准备输出 最终结果: id:{},eventTime:{},vectorArray:{}",ctx.getCurrentKey().f0,eventTime,resultBuilder.toString());
out
.
collect
(
new
ResultOutput
(
out
.
collect
(
new
ResultOutput
(
ctx
.
getCurrentKey
().
f0
,
// id
ctx
.
getCurrentKey
().
f0
,
// id
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/VectorSimilarityProcessor.java
View file @
44507015
...
@@ -90,7 +90,7 @@ public class VectorSimilarityProcessor extends KeyedProcessFunction<String, Vect
...
@@ -90,7 +90,7 @@ public class VectorSimilarityProcessor extends KeyedProcessFunction<String, Vect
// 3. 输出结果
// 3. 输出结果
for
(
SimilarityResult
result
:
results
)
{
for
(
SimilarityResult
result
:
results
)
{
logger
.
info
(
"VectorSimilarityProcessor 结果输入>>>>>>>>>>>>>> pairId:{},isSimilar:{},avgDistance:{} "
,
result
.
pairId
,
result
.
isSimilar
,
result
.
avgDistance
);
//
logger.info("VectorSimilarityProcessor 结果输入>>>>>>>>>>>>>> pairId:{},isSimilar:{},avgDistance:{} ",result.pairId,result.isSimilar,result.avgDistance);
out
.
collect
(
result
);
out
.
collect
(
result
);
}
}
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/function/WindowResultFunction.java
View file @
44507015
...
@@ -57,7 +57,7 @@ public class WindowResultFunction extends ProcessWindowFunction<Integer, DeviceR
...
@@ -57,7 +57,7 @@ public class WindowResultFunction extends ProcessWindowFunction<Integer, DeviceR
if
(!
state
.
contains
(
deviceId
))
{
if
(!
state
.
contains
(
deviceId
))
{
state
.
put
(
deviceId
,
true
);
state
.
put
(
deviceId
,
true
);
}
}
logger
.
info
(
"窗口窗口结果处理》》》 deviceId:{},windowEnd:{},count:{}"
,
deviceId
,
System
.
currentTimeMillis
(),
userCount
);
//
logger.info("窗口窗口结果处理》》》 deviceId:{},windowEnd:{},count:{}",deviceId,System.currentTimeMillis(),userCount);
//输出窗口结果(设备ID, 窗口结束时间, 用户数)
//输出窗口结果(设备ID, 窗口结束时间, 用户数)
out
.
collect
(
new
DeviceRegistrationResult
(
out
.
collect
(
new
DeviceRegistrationResult
(
deviceId
,
deviceId
,
...
...
eagleEye-flink_kafka/src/main/java/com/flink/processor/impl/OkHttpService.java
View file @
44507015
...
@@ -72,11 +72,11 @@ public class OkHttpService{
...
@@ -72,11 +72,11 @@ public class OkHttpService{
Response
response
=
client
.
newCall
(
request
).
execute
();
Response
response
=
client
.
newCall
(
request
).
execute
();
if
(
response
.
isSuccessful
())
{
if
(
response
.
isSuccessful
())
{
String
resultStr
=
response
.
body
().
string
();
String
resultStr
=
response
.
body
().
string
();
logger
.
info
(
"OkHttpService 响应内容 body:{},friendsType:{},cid:{}"
,
resultStr
,
cid
);
//
logger.info("OkHttpService 响应内容 body:{},friendsType:{},cid:{}",resultStr,cid);
SimiInterfaceBase
resBody
=
JSONObject
.
parseObject
(
resultStr
,
new
TypeReference
<
SimiInterfaceBase
>(){});
SimiInterfaceBase
resBody
=
JSONObject
.
parseObject
(
resultStr
,
new
TypeReference
<
SimiInterfaceBase
>(){});
result
=
resBody
.
getData
();
result
=
resBody
.
getData
();
}
else
{
}
else
{
logger
.
info
(
"OkHttpService error 响应内容friendsType:{},cid:{}"
,
cid
);
//
logger.info("OkHttpService error 响应内容friendsType:{},cid:{}",cid);
}
}
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
logger
.
error
(
"OkHttpService get e:{}"
,
e
.
toString
());
logger
.
error
(
"OkHttpService get e:{}"
,
e
.
toString
());
...
...
eagleEye-flink_kafka/src/main/java/com/flink/util/ip2region/SearcherUtil.java
View file @
44507015
...
@@ -89,7 +89,7 @@ public class SearcherUtil {
...
@@ -89,7 +89,7 @@ public class SearcherUtil {
public
static
void
main
(
String
[]
args
)
throws
Exception
{
public
static
void
main
(
String
[]
args
)
throws
Exception
{
// getCityInfoByFile("1.9.241.214");
// getCityInfoByFile("1.9.241.214");
String
str
=
"1
.1.1.1,2.2.2.2,5.5.5.5
"
;
String
str
=
"1
92.168.1.1,111.193.48.161,61.48.41.5,124.65.242.25,221.222.117.237,27.221.85.218
"
;
System
.
out
.
println
(
convertStringToList
(
str
));
System
.
out
.
println
(
convertStringToList
(
str
));
}
}
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/PcProperties.java
View file @
44507015
...
@@ -18,7 +18,7 @@ public class PcProperties implements Serializable{
...
@@ -18,7 +18,7 @@ public class PcProperties implements Serializable{
*/
*/
private
static
final
long
serialVersionUID
=
1L
;
private
static
final
long
serialVersionUID
=
1L
;
private
Stri
ng
r9
;
private
Lo
ng
r9
;
private
String
r4
;
private
String
r4
;
private
String
r3
;
private
String
r3
;
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/Properties.java
View file @
44507015
...
@@ -26,6 +26,8 @@ public class Properties implements Serializable{
...
@@ -26,6 +26,8 @@ public class Properties implements Serializable{
private
String
timeDifference
;
private
String
timeDifference
;
private
Long
endTime
;
private
Long
endTime
;
private
String
userId
;
private
String
userId
;
private
String
id
;
private
String
type
;
public
String
getR1
()
{
public
String
getR1
()
{
...
@@ -94,5 +96,16 @@ public class Properties implements Serializable{
...
@@ -94,5 +96,16 @@ public class Properties implements Serializable{
public
void
setUserId
(
String
userId
)
{
public
void
setUserId
(
String
userId
)
{
this
.
userId
=
userId
;
this
.
userId
=
userId
;
}
}
public
String
getId
()
{
return
id
;
}
public
void
setId
(
String
id
)
{
this
.
id
=
id
;
}
public
String
getType
()
{
return
type
;
}
public
void
setType
(
String
type
)
{
this
.
type
=
type
;
}
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/RealStaking.java
0 → 100644
View file @
44507015
package
com
.
flink
.
vo
;
import
java.io.Serializable
;
import
lombok.Data
;
import
lombok.ToString
;
/**
* @author wjs
* @version 创建时间:2025-8-14 14:32:26
* 类说明
*/
@Data
@ToString
public
class
RealStaking
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
tx_index
;
private
String
tx_hash
;
private
String
block_height
;
private
String
block_timestamp
;
private
String
from_account_id
;
private
String
to_account_id
;
private
String
is_relayer
;
private
String
amount
;
private
String
symbol
;
private
String
post_time
;
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/BulidDailyParams.java
0 → 100644
View file @
44507015
package
com
.
flink
.
vo
.
userDailyActivity
;
import
java.util.List
;
import
org.apache.commons.collections.CollectionUtils
;
import
org.apache.commons.lang3.StringUtils
;
import
com.alibaba.fastjson.JSON
;
import
com.alibaba.fastjson.JSONObject
;
import
com.alibaba.fastjson.TypeReference
;
import
com.flink.processor.function.GenDeviceIdProcessor
;
import
com.flink.processor.function.UserPropertiesProcessor
;
import
com.flink.util.CompareUtils
;
import
com.flink.util.TimeConvertUtil
;
import
com.flink.util.ip2region.SearcherUtil
;
import
com.flink.vo.DeviceIdInfo
;
import
com.flink.vo.EventList
;
import
com.flink.vo.OdsCollectLog
;
import
com.flink.vo.OdsEventLog
;
import
com.flink.vo.PcCollectLog
;
import
com.flink.vo.PcDeviceInfo
;
import
com.flink.vo.PcEventInfo
;
import
com.flink.vo.PcOdsEventLog
;
import
com.flink.vo.PcProperties
;
import
com.flink.vo.UserProperties
;
/**
* @author wjs
* @version 创建时间:2025-8-13 17:37:32 类说明 构建日活参数
*/
public
class
BulidDailyParams
{
// 处理APP设备ID数据
public
static
DailyActivityDeviceInfo
handleDeviceData
(
String
value
)
throws
Exception
{
OdsCollectLog
log
=
JSON
.
parseObject
(
value
,
OdsCollectLog
.
class
);
if
(
log
==
null
)
return
null
;
String
deviceId
=
log
.
getDevice_id
();
String
uniqueId
=
log
.
getUnique_id
();
String
appType
=
log
.
getApp_type
();
String
appKey
=
log
.
getApp_key
();
String
other_info
=
log
.
getOther_info
();
String
device_info
=
log
.
getDevice_info
();
String
env_info
=
log
.
getEnv_info
();
String
createTime
=
log
.
getCreate_time
();
DeviceIdInfo
deviceIdInfo
=
GenDeviceIdProcessor
.
genDeviceId
(
appType
,
appKey
,
other_info
,
device_info
,
env_info
);
UserProperties
userProperties
=
UserPropertiesProcessor
.
userPropertiesToJson
(
log
.
getUser_properties
());
if
(
null
==
deviceIdInfo
)
{
return
null
;
}
if
(
StringUtils
.
isEmpty
(
deviceIdInfo
.
getDeviceIdV1
()))
{
return
null
;
}
if
(
null
==
userProperties
)
{
return
null
;
}
if
(
StringUtils
.
isEmpty
(
userProperties
.
getCid
()))
{
return
null
;
}
if
(
StringUtils
.
isEmpty
(
userProperties
.
getPhone
()))
{
return
null
;
}
return
new
DailyActivityDeviceInfo
(
deviceId
,
deviceIdInfo
.
getDeviceIdV1
(),
appKey
,
uniqueId
,
appType
,
createTime
.
substring
(
0
,
10
),
deviceIdInfo
.
getModel
(),
deviceIdInfo
.
getBrand
(),
deviceIdInfo
.
getOsRelease
(),
deviceIdInfo
.
getAppVersion
(),
TimeConvertUtil
.
convertToTimestamp
(
createTime
),
log
.
getZone_name
(),
log
.
getZone_type
(),
log
.
getZone_code
(),
getPlatformByAppKey
(
appKey
),
null
);
}
// 处理PC设备ID数据
public
static
DailyActivityDeviceInfo
handlePcDeviceData
(
String
value
)
throws
Exception
{
PcCollectLog
log
=
JSONObject
.
parseObject
(
value
,
new
TypeReference
<
PcCollectLog
>()
{
});
if
(
null
==
log
)
{
return
null
;
}
String
appType
=
log
.
getApp_type
();
String
appKey
=
log
.
getApp_key
();
String
device_info
=
log
.
getDevice_info
();
String
createTime
=
log
.
getCreate_time
();
if
(
StringUtils
.
isEmpty
(
device_info
))
{
return
null
;
}
PcDeviceInfo
pcDeviceInfo
=
JSONObject
.
parseObject
(
device_info
,
new
TypeReference
<
PcDeviceInfo
>()
{
});
if
(
null
==
pcDeviceInfo
)
{
return
null
;
}
if
(
StringUtils
.
isEmpty
(
pcDeviceInfo
.
getCid
()))
{
return
null
;
}
if
(
StringUtils
.
isEmpty
(
pcDeviceInfo
.
getPhone
()))
{
return
null
;
}
String
deviceId
=
pcDeviceInfo
.
getI8
();
String
deviceName
=
pcDeviceInfo
.
getB2
()
+
"-"
+
pcDeviceInfo
.
getB3
();
return
new
DailyActivityDeviceInfo
(
deviceId
,
deviceId
,
appKey
,
deviceId
,
appType
,
createTime
.
substring
(
0
,
10
),
pcDeviceInfo
.
getB3
(),
pcDeviceInfo
.
getB2
(),
pcDeviceInfo
.
getB4
(),
log
.
getApp_version
(),
TimeConvertUtil
.
convertToTimestamp
(
createTime
),
log
.
getZone_name
(),
log
.
getZone_type
(),
log
.
getZone_code
(),
getPlatformByAppKey
(
appKey
),
deviceName
);
}
// 处理APP事件数据
public
static
DailyActivityEventInfo
handleEventData
(
OdsEventLog
odsEventLog
,
EventList
event
)
throws
Exception
{
String
deviceId
=
odsEventLog
.
getDevice_id
();
String
uniqueId
=
odsEventLog
.
getUnique_id
();
String
appType
=
odsEventLog
.
getApp_type
();
String
appKey
=
odsEventLog
.
getApp_key
();
String
createTime
=
odsEventLog
.
getCreate_time
();
String
routeIp
=
odsEventLog
.
getRoute_ip
();
String
userProperties
=
odsEventLog
.
getUser_properties
();
if
(
StringUtils
.
isEmpty
(
appKey
)
||
StringUtils
.
equals
(
appKey
,
"C7jias27jias2"
))
{
appKey
=
"8ooOvXJo276"
;
}
String
cid
=
null
;
String
phone
=
null
;
String
nick
=
null
;
if
(
StringUtils
.
isNotEmpty
(
userProperties
))
{
List
<
UserProperties
>
userPropertiesList
=
JSONObject
.
parseObject
(
userProperties
,
new
TypeReference
<
List
<
UserProperties
>>()
{
});
if
(
userPropertiesList
!=
null
&&
userPropertiesList
.
size
()
>
0
)
{
for
(
UserProperties
user
:
userPropertiesList
)
{
if
(
StringUtils
.
isNotEmpty
(
user
.
getCid
()))
{
cid
=
user
.
getCid
();
}
else
if
(
StringUtils
.
isNotEmpty
(
user
.
getPhone
()))
{
phone
=
user
.
getPhone
();
}
else
if
(
StringUtils
.
isNotEmpty
(
user
.
getId
()))
{
cid
=
user
.
getId
();
}
else
if
(
StringUtils
.
isNotEmpty
(
user
.
getNick
()))
{
nick
=
user
.
getNick
();
}
else
if
(
StringUtils
.
isNotEmpty
(
user
.
getEmail
()))
{
nick
=
user
.
getEmail
();
}
}
}
}
List
<
String
>
ips
=
SearcherUtil
.
convertStringToList
(
routeIp
);
if
(
CollectionUtils
.
isEmpty
(
ips
))
{
return
null
;
}
String
ip_name
=
null
;
String
area_name
=
null
;
for
(
String
ip
:
ips
)
{
if
(!
SearcherUtil
.
ipv6
(
ip
))
{
area_name
=
SearcherUtil
.
getCityInfoByFile
(
ip
);
if
(!
CompareUtils
.
stringExists
(
area_name
,
"0|0|0|内网IP|内网IP"
,
"0|0|0|内网IP|Finance-and-Promoting-Technology"
))
{
ip_name
=
ip
;
break
;
}
else
{
ip_name
=
null
;
area_name
=
null
;
}
}
}
if
(
StringUtils
.
isEmpty
(
ip_name
))
{
return
null
;
}
return
new
DailyActivityEventInfo
(
deviceId
,
uniqueId
,
cid
,
phone
,
nick
,
ip_name
,
area_name
,
TimeConvertUtil
.
parseToStringSSS
(
event
.
getR9
()),
TimeConvertUtil
.
convertToTimestamp
(
createTime
),
appKey
,
appType
,
createTime
,
createTime
.
substring
(
0
,
10
),
getPlatformByAppKey
(
appKey
));
}
// 处理PC事件数据
public
static
DailyActivityEventInfo
handlePcEventData
(
PcOdsEventLog
pcOdsEventLog
,
PcEventInfo
pcEventInfo
,
PcProperties
pcProperties
)
throws
Exception
{
String
appKey
=
pcOdsEventLog
.
getApp_key
();
String
appType
=
pcOdsEventLog
.
getApp_type
();
String
createTime
=
pcOdsEventLog
.
getCreate_time
();
String
cid
=
pcEventInfo
.
getCid
();
String
phone
=
pcEventInfo
.
getPhone
();
String
nick
=
pcEventInfo
.
getNick
();
if
(
StringUtils
.
isEmpty
(
cid
))
{
return
null
;
}
if
(
StringUtils
.
isEmpty
(
phone
))
{
return
null
;
}
String
routeIp
=
pcEventInfo
.
getS1
();
if
(
StringUtils
.
isEmpty
(
routeIp
))
{
return
null
;
}
List
<
String
>
ips
=
SearcherUtil
.
convertStringToList
(
routeIp
);
if
(
CollectionUtils
.
isEmpty
(
ips
))
{
return
null
;
}
String
ip_name
=
null
;
String
area_name
=
null
;
for
(
String
ip
:
ips
)
{
if
(!
SearcherUtil
.
ipv6
(
ip
))
{
area_name
=
SearcherUtil
.
getCityInfoByFile
(
ip
);
if
(!
CompareUtils
.
stringExists
(
area_name
,
"0|0|0|内网IP|内网IP"
,
"0|0|0|内网IP|Finance-and-Promoting-Technology"
,
"Request timed out."
,
"*"
))
{
ip_name
=
ip
;
break
;
}
else
{
ip_name
=
null
;
area_name
=
null
;
}
}
}
if
(
StringUtils
.
isEmpty
(
ip_name
))
{
return
null
;
}
return
new
DailyActivityEventInfo
(
pcEventInfo
.
getI8
(),
pcEventInfo
.
getI8
(),
cid
,
phone
,
nick
,
ip_name
,
area_name
,
TimeConvertUtil
.
parseToStringSSS
(
pcProperties
.
getR9
()),
TimeConvertUtil
.
convertToTimestamp
(
createTime
),
appKey
,
appType
,
createTime
,
createTime
.
substring
(
0
,
10
),
getPlatformByAppKey
(
appKey
));
}
private
static
String
getPlatformByAppKey
(
String
appKey
)
{
switch
(
appKey
)
{
// 无链平台
case
"8ooOvXJo276"
:
return
"无链安卓国内版"
;
case
"9JQ3A7GA420"
:
return
"无链IOS海外版"
;
// 私米平台
case
"ptyzTPaV207"
:
return
"私米安卓国内版"
;
case
"giHQ1YLp925"
:
return
"私米IOS国内版"
;
case
"lOxLJYzx658"
:
return
"私米安卓海外版"
;
case
"lcALJYzx932"
:
return
"私米IOS海外版"
;
// pc 国内版
case
"pc1KPjmh951"
:
return
"Win国内版"
;
case
"pcrIjvC5805"
:
return
"Linux国内版"
;
case
"pcUXtmMh356"
:
return
"MacIntel国内版"
;
case
"pcrPGB1z531"
:
return
"MacArm国内版"
;
// pc 海外版
case
"pcRIhwh1380"
:
return
"Win海外版"
;
case
"pcQmdNl0952"
:
return
"Linux海外版"
;
case
"pc1etTC6207"
:
return
"MacIntel海外版"
;
case
"pcd9Sa8T989"
:
return
"MacArm海外版"
;
default
:
return
"未知平台"
;
}
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/DailyActivityCombinedLog.java
0 → 100644
View file @
44507015
package
com
.
flink
.
vo
.
userDailyActivity
;
import
java.io.Serializable
;
import
lombok.Data
;
import
lombok.ToString
;
/**
* @author wjs
* @version 创建时间:2025-8-1 16:17:44 类说明
*/
@Data
@ToString
public
class
DailyActivityCombinedLog
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
deviceIdV1
;
private
String
appKey
;
private
String
appType
;
private
String
dt
;
private
String
model
;
private
String
brand
;
private
String
osRelease
;
private
String
appVersion
;
private
String
cid
;
private
String
phone
;
private
String
nick
;
private
String
ip
;
private
String
areaName
;
private
long
waterMarkTime
;
private
String
zoneName
;
private
String
zoneType
;
private
String
zoneCode
;
private
long
firstTime
;
private
long
latestTime
;
private
String
platform
;
private
String
deviceName
;
private
String
countryCode
;
public
DailyActivityCombinedLog
(
String
deviceIdV1
,
String
appKey
,
String
appType
,
String
dt
,
String
model
,
String
brand
,
String
osRelease
,
String
appVersion
,
String
cid
,
String
phone
,
String
nick
,
String
ip
,
String
areaName
,
long
waterMarkTime
,
String
zoneName
,
String
zoneType
,
String
zoneCode
,
long
firstTime
,
long
latestTime
,
String
deviceName
,
String
platform
,
String
countryCode
)
{
super
();
this
.
deviceIdV1
=
deviceIdV1
;
this
.
appKey
=
appKey
;
this
.
appType
=
appType
;
this
.
dt
=
dt
;
this
.
model
=
model
;
this
.
brand
=
brand
;
this
.
osRelease
=
osRelease
;
this
.
appVersion
=
appVersion
;
this
.
cid
=
cid
;
this
.
phone
=
phone
;
this
.
nick
=
nick
;
this
.
ip
=
ip
;
this
.
areaName
=
areaName
;
this
.
waterMarkTime
=
waterMarkTime
;
this
.
zoneName
=
zoneName
;
this
.
zoneType
=
zoneType
;
this
.
zoneCode
=
zoneCode
;
this
.
firstTime
=
firstTime
;
this
.
latestTime
=
latestTime
;
this
.
deviceName
=
deviceName
;
this
.
platform
=
platform
;
this
.
countryCode
=
countryCode
;
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/DailyActivityDeviceInfo.java
0 → 100644
View file @
44507015
package
com
.
flink
.
vo
.
userDailyActivity
;
import
java.io.Serializable
;
import
lombok.Data
;
import
lombok.ToString
;
/**
* @author wjs
* @version 创建时间:2025-8-13 17:19:11
* 类说明 日活设备信息
*/
@Data
@ToString
public
class
DailyActivityDeviceInfo
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
deviceId
;
private
String
deviceIdV1
;
private
String
appKey
;
private
String
uniqueId
;
private
String
appType
;
private
String
dt
;
private
String
model
;
private
String
brand
;
private
String
osRelease
;
private
String
appVersion
;
private
String
zoneName
;
private
String
zoneType
;
private
String
zoneCode
;
private
Long
waterMarkTime
;
private
String
platform
;
private
String
deviceName
;
public
DailyActivityDeviceInfo
(
String
deviceId
,
String
deviceIdV1
,
String
appKey
,
String
uniqueId
,
String
appType
,
String
dt
,
String
model
,
String
brand
,
String
osRelease
,
String
appVersion
,
Long
waterMarkTime
,
String
zoneName
,
String
zoneType
,
String
zoneCode
,
String
platform
,
String
deviceName
)
{
this
.
deviceId
=
deviceId
;
this
.
deviceIdV1
=
deviceIdV1
;
this
.
appKey
=
appKey
;
this
.
uniqueId
=
uniqueId
;
this
.
appType
=
appType
;
this
.
dt
=
dt
;
this
.
model
=
model
;
this
.
brand
=
brand
;
this
.
osRelease
=
osRelease
;
this
.
appVersion
=
appVersion
;
this
.
waterMarkTime
=
waterMarkTime
;
this
.
zoneName
=
zoneName
;
this
.
zoneType
=
zoneType
;
this
.
zoneCode
=
zoneCode
;
this
.
platform
=
platform
;
this
.
deviceName
=
deviceName
;
}
}
eagleEye-flink_kafka/src/main/java/com/flink/vo/EnrichedLog.java
→
eagleEye-flink_kafka/src/main/java/com/flink/vo/
userDailyActivity/DailyActivity
EnrichedLog.java
View file @
44507015
package
com
.
flink
.
vo
;
package
com
.
flink
.
vo
.
userDailyActivity
;
import
java.io.Serializable
;
import
java.io.Serializable
;
...
@@ -12,7 +12,7 @@ import lombok.ToString;
...
@@ -12,7 +12,7 @@ import lombok.ToString;
*/
*/
@Data
@Data
@ToString
(
callSuper
=
true
)
@ToString
(
callSuper
=
true
)
public
class
EnrichedLog
extends
CombinedLog
implements
Serializable
{
public
class
DailyActivityEnrichedLog
extends
DailyActivity
CombinedLog
implements
Serializable
{
/**
/**
*
*
...
@@ -27,7 +27,7 @@ public class EnrichedLog extends CombinedLog implements Serializable{
...
@@ -27,7 +27,7 @@ public class EnrichedLog extends CombinedLog implements Serializable{
* @param phoneName 品牌手机名称(可空)
* @param phoneName 品牌手机名称(可空)
* @param networkModel 入网型号(可空)
* @param networkModel 入网型号(可空)
*/
*/
public
EnrichedLog
(
CombinedLog
baseLog
,
String
phoneName
,
String
networkModel
)
{
public
DailyActivityEnrichedLog
(
DailyActivity
CombinedLog
baseLog
,
String
phoneName
,
String
networkModel
)
{
// 调用父类构造方法初始化基础字段
// 调用父类构造方法初始化基础字段
super
(
super
(
baseLog
.
getDeviceIdV1
(),
baseLog
.
getAppKey
(),
baseLog
.
getAppType
(),
baseLog
.
getDeviceIdV1
(),
baseLog
.
getAppKey
(),
baseLog
.
getAppType
(),
...
@@ -36,7 +36,7 @@ public class EnrichedLog extends CombinedLog implements Serializable{
...
@@ -36,7 +36,7 @@ public class EnrichedLog extends CombinedLog implements Serializable{
baseLog
.
getPhone
(),
baseLog
.
getNick
(),
baseLog
.
getIp
(),
baseLog
.
getPhone
(),
baseLog
.
getNick
(),
baseLog
.
getIp
(),
baseLog
.
getAreaName
(),
baseLog
.
getWaterMarkTime
(),
baseLog
.
getAreaName
(),
baseLog
.
getWaterMarkTime
(),
baseLog
.
getZoneName
(),
baseLog
.
getZoneType
(),
baseLog
.
getZoneCode
(),
baseLog
.
getZoneName
(),
baseLog
.
getZoneType
(),
baseLog
.
getZoneCode
(),
baseLog
.
getFirstTime
(),
baseLog
.
getLatestTime
()
baseLog
.
getFirstTime
(),
baseLog
.
getLatestTime
()
,
baseLog
.
getDeviceName
(),
baseLog
.
getPlatform
(),
baseLog
.
getCountryCode
()
);
);
this
.
phoneName
=
phoneName
;
this
.
phoneName
=
phoneName
;
this
.
networkModel
=
networkModel
;
this
.
networkModel
=
networkModel
;
...
...
eagleEye-flink_kafka/src/main/java/com/flink/vo/userDailyActivity/DailyActivityEventInfo.java
0 → 100644
View file @
44507015
package
com
.
flink
.
vo
.
userDailyActivity
;
import
java.io.Serializable
;
import
lombok.Data
;
import
lombok.ToString
;
/**
* @author wjs
* @version 创建时间:2025-8-13 17:40:32
* 类说明
*/
@Data
@ToString
public
class
DailyActivityEventInfo
implements
Serializable
{
/**
*
*/
private
static
final
long
serialVersionUID
=
1L
;
private
String
deviceId
;
private
String
uniqueId
;
private
String
cid
;
private
String
phone
;
private
String
nick
;
private
String
ip
;
private
String
areaName
;
private
String
eventTime
;
private
Long
waterMarkTime
;
private
String
appKey
;
private
String
appType
;
private
String
createTime
;
private
String
dt
;
private
String
platform
;
public
DailyActivityEventInfo
(
String
deviceId
,
String
uniqueId
,
String
cid
,
String
phone
,
String
nick
,
String
ip
,
String
areaName
,
String
eventTime
,
Long
waterMarkTime
,
String
appKey
,
String
appType
,
String
createTime
,
String
dt
,
String
platform
)
{
this
.
deviceId
=
deviceId
;
this
.
uniqueId
=
uniqueId
;
this
.
cid
=
cid
;
this
.
phone
=
phone
;
this
.
nick
=
nick
;
this
.
ip
=
ip
;
this
.
areaName
=
areaName
;
this
.
eventTime
=
eventTime
;
this
.
waterMarkTime
=
waterMarkTime
;
this
.
appKey
=
appKey
;
this
.
appType
=
appType
;
this
.
createTime
=
createTime
;
this
.
dt
=
dt
;
this
.
platform
=
platform
;
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment