summaryrefslogtreecommitdiff
path: root/src/main/java/com/zdjizhi/etl/DnsGraphProcessFunction.java
blob: 18d7a71d7af1d78fc1033771c39465648a57bf0d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
package com.zdjizhi.etl;

import cn.hutool.core.convert.Convert;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.util.LinkedHashMap;
import java.util.Map;


/**
 * 去重
 */
public class DnsGraphProcessFunction extends ProcessWindowFunction<Map<String, Object>, Map<String, Object>, Tuple3<String, String, String>, TimeWindow> {

    private static final Log logger = LogFactory.get();

    @Override
    public void process(Tuple3<String, String, String> keys, Context context, Iterable<Map<String, Object>> elements, Collector<Map<String, Object>> out) {

        try {
            Long tmpTime = 0L;
            for (Map<String, Object> log : elements) {
                Long startTime = Convert.toLong(log.get("start_time"));
                tmpTime = startTime > tmpTime ? startTime : tmpTime;
            }
            Map newLog = new LinkedHashMap<>();
            newLog.put("record_type", keys.f0);
            newLog.put("qname", keys.f1);
            newLog.put("record", keys.f2);
            newLog.put("last_found_time", tmpTime);
            out.collect(newLog);
            logger.debug("获取中间聚合结果:{}", newLog.toString());
        } catch (Exception e) {
            logger.error("获取中间聚合结果失败,middleResult: {}", e);
        }
    }

}