summaryrefslogtreecommitdiff
path: root/src/main/java/com/zdjizhi/etl/DnsRelationProcessFunction.java
diff options
context:
space:
mode:
authorzhanghongqing <[email protected]>2022-07-13 16:46:58 +0800
committerzhanghongqing <[email protected]>2022-07-13 16:46:58 +0800
commit95eefbd8b791f91f2b38e335dd77ce2816d81a1c (patch)
tree8995c46179f7d4950cad905416f53329833c3a46 /src/main/java/com/zdjizhi/etl/DnsRelationProcessFunction.java
parent06042db9b11bf3a17eaec455b3daf5b31de679d7 (diff)
优化代码:去除无使用的类
Diffstat (limited to 'src/main/java/com/zdjizhi/etl/DnsRelationProcessFunction.java')
-rw-r--r--src/main/java/com/zdjizhi/etl/DnsRelationProcessFunction.java58
1 files changed, 0 insertions, 58 deletions
diff --git a/src/main/java/com/zdjizhi/etl/DnsRelationProcessFunction.java b/src/main/java/com/zdjizhi/etl/DnsRelationProcessFunction.java
deleted file mode 100644
index 04e45f8..0000000
--- a/src/main/java/com/zdjizhi/etl/DnsRelationProcessFunction.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package com.zdjizhi.etl;
-
-import cn.hutool.core.convert.Convert;
-import cn.hutool.core.date.DateUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import org.apache.flink.api.java.tuple.Tuple3;
-import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
-import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
-import org.apache.flink.util.Collector;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import static com.zdjizhi.common.FlowWriteConfig.LOG_AGGREGATE_DURATION;
-
-
-/**
- * @author 94976
- */
-public class DnsRelationProcessFunction extends ProcessWindowFunction<Map<String, Object>, Map<String, Object>, Tuple3<String, String, String>, TimeWindow> {
-
- private static final Log logger = LogFactory.get();
-
- /**
- * 拆分dns_record
- * 聚合统计
- * 五种:a/aaaa/cname/mx/ns
- *
- * @param elements
- * @return
- */
- @Override
- public void process(Tuple3<String, String, String> keys, Context context, Iterable<Map<String, Object>> elements, Collector<Map<String, Object>> out) {
-
- try {
- long sessions = 0L;
- long startTime = DateUtil.currentSeconds();
- long endTime = DateUtil.currentSeconds();
- for (Map<String, Object> log : elements) {
- sessions++;
- long logStartTime = Convert.toLong(log.get("start_time"));
- startTime = logStartTime < startTime ? logStartTime : startTime;
- endTime = logStartTime > endTime ? logStartTime : endTime;
- }
- Map<String, Object> newDns = new LinkedHashMap<>();
- newDns.put("start_time", startTime);
- newDns.put("end_time", endTime + LOG_AGGREGATE_DURATION);
- newDns.put("record_type", keys.f0);
- newDns.put("qname", keys.f1);
- newDns.put("record", keys.f2);
- newDns.put("sessions", sessions);
- out.collect(newDns);
- } catch (Exception e) {
- logger.error("dns relation 日志聚合失败: {}", e);
- }
- }
-}