summaryrefslogtreecommitdiff
path: root/src/main
diff options
context:
space:
mode:
Diffstat (limited to 'src/main')
-rw-r--r--src/main/java/cn/ac/iie/bean/CommonLog.java400
-rw-r--r--src/main/java/cn/ac/iie/bean/ConfigCompile.java173
-rw-r--r--src/main/java/cn/ac/iie/bean/PzTable.java33
-rw-r--r--src/main/java/cn/ac/iie/bean/ntc/NTC_CONN_RECORD_LOG.java142
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipCount/VoipCoding.java31
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipCount/VoipIpLocation.java52
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipCount/VoipIpType.java31
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipCount/VoipMethod.java31
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipCount/VoipResStat.java40
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipCount/VoipServer.java31
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipCount/VoipServiceDomain.java40
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipCount/VoipUa.java31
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipFromToLog/RouteRelationLog.java34
-rw-r--r--src/main/java/cn/ac/iie/bean/voipSipOrigin/SipOriginALL.java762
-rw-r--r--src/main/java/cn/ac/iie/bolt/FromSpoutBufferBoltDC.java40
-rw-r--r--src/main/java/cn/ac/iie/bolt/GetSipOriBoltDC.java1027
-rw-r--r--src/main/java/cn/ac/iie/bolt/SipInsertBoltDC.java136
-rw-r--r--src/main/java/cn/ac/iie/bolt/SipRealTimeCountBoltDC.java182
-rw-r--r--src/main/java/cn/ac/iie/bolt/SipRealTimeMergeBoltDC.java604
-rw-r--r--src/main/java/cn/ac/iie/common/CommonService.java68
-rw-r--r--src/main/java/cn/ac/iie/common/DataCenterLoad.java93
-rw-r--r--src/main/java/cn/ac/iie/common/HashTableConfig.java354
-rw-r--r--src/main/java/cn/ac/iie/common/HttpManager.java218
-rw-r--r--src/main/java/cn/ac/iie/common/RealtimeCountConfig.java99
-rw-r--r--src/main/java/cn/ac/iie/dao/DataBaseBusiness.java1604
-rw-r--r--src/main/java/cn/ac/iie/dao/DataBaseLoad.java166
-rw-r--r--src/main/java/cn/ac/iie/dao/DataBasePzBusiness.java122
-rw-r--r--src/main/java/cn/ac/iie/dao/DbConnect.java102
-rw-r--r--src/main/java/cn/ac/iie/dao/JdbcConnectionManager.java392
-rw-r--r--src/main/java/cn/ac/iie/dao/JdbcPzConnectionManager.java392
-rw-r--r--src/main/java/cn/ac/iie/dao/KafkaDB.java81
-rw-r--r--src/main/java/cn/ac/iie/spout/CustomizedKafkaSpout.java66
-rw-r--r--src/main/java/cn/ac/iie/spout/sip/SIP_ORIGIN_ALL_KafkaSpout.java79
-rw-r--r--src/main/java/cn/ac/iie/topology/LogRealtimeCountTopology.java94
-rw-r--r--src/main/java/cn/ac/iie/topology/StormRunner.java32
-rw-r--r--src/main/java/cn/ac/iie/utils/CSVAlarm.java77
-rw-r--r--src/main/java/cn/ac/iie/utils/HiveDao/HdfsDataLoad_Avro.java240
-rw-r--r--src/main/java/cn/ac/iie/utils/HiveDao/HiveDataSourceUtil.java187
-rw-r--r--src/main/java/cn/ac/iie/utils/IPIPLibrary/Ipip.java189
-rw-r--r--src/main/java/cn/ac/iie/utils/RealtimeCountConfigurations.java67
-rw-r--r--src/main/java/cn/ac/iie/utils/TupleUtils.java13
-rw-r--r--src/main/java/cn/ac/iie/utils/dao/ClickHouseUtils.java47
-rw-r--r--src/main/java/cn/ac/iie/utils/getjson/GetJsonToKafkaUtils.java835
-rw-r--r--src/main/java/cn/ac/iie/utils/getjson/GetStrToClickHouseUtils.java585
-rw-r--r--src/main/java/cn/ac/iie/utils/redis/RedisPollUtils.java61
-rw-r--r--src/main/java/cn/ac/iie/utils/redis/RedisUrlPollUtils.java61
-rw-r--r--src/main/java/log4j.properties16
-rw-r--r--src/main/resources/core-site.xml56
-rw-r--r--src/main/resources/hdfs-site.xml116
-rw-r--r--src/main/resources/mapred-site.xml33
-rw-r--r--src/main/resources/yarn-site.xml61
51 files changed, 10426 insertions, 0 deletions
diff --git a/src/main/java/cn/ac/iie/bean/CommonLog.java b/src/main/java/cn/ac/iie/bean/CommonLog.java
new file mode 100644
index 0000000..ead6e8c
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/CommonLog.java
@@ -0,0 +1,400 @@
+package cn.ac.iie.bean;
+
+/**
+ * 公共字段类
+ *
+ * @author Administrator
+ */
+public class CommonLog {
+
+ private int cfg_id;//建表,需要,即配置ID,但数据中没有
+
+ private int found_time;//建表,必需
+ private int recv_time;//建表,必需
+
+ private String trans_proto;//建表,必需,即PROTOCOL
+
+ private String d_ip;//建表,必需,即SERVER_IP,服务端IP
+ private String s_ip;//建表,必需,即CLIENT_IP,客户端IP
+ private int d_port;//建表,必需,即SERVER_PORT,服务端端口
+ private int s_port;//建表,必需,即CLIENT_PORT,客户端端口
+
+ private int entrance_id;//建表,必需
+ private String cap_ip;//建表,必需
+
+ private String server_locate;//建表,方法获取
+ private String client_locate;//建表,方法获取
+
+ //以下为无法接收到的字段或者可以直接置空的字段(即非必要字段)
+ private long over_id;//建表
+ private String nest_protocol;//建表
+ private String nest_server_ip;//建表
+ private String nest_client_ip;//建表
+ private int nest_server_port;//建表
+ private int nest_client_port;//建表
+ private int service;//建表,已有
+ private String scene_file;//建表
+ private String injected_pkt_file;//建表
+ private String nest_addr_list;//建表
+ private int action;//建表
+
+ private String user_region;//20190507,需要建表,获取reset标记字段
+ private int stream_dir;//20190507,需要建表,获取reset标记字段
+ private String addr_list;//20190507,需要建表,获取reset标记字段
+
+ //表中没有,即未加入建表的,但数据中有;//20190507旧表新增字段
+ private int addr_type;//未建表,目前数据中ipv6特有;//20190507,需要建表,获取reset标记字段
+
+
+ //user_region内含字段
+ private String killed;//20190507新增,killed=0表示未rst;killed=1表示rst过了;
+ private String thread;//20190507新增
+ private String index;//20190507新增
+ private String hash;//20190507新增
+
+
+// private int device_id;
+// private int link_id;
+// private int encap_type;
+// private int direction;
+// private String inner_smac;
+// private String inner_dmac;
+
+// private String s_asn;
+// private String d_asn;
+// private String s_subscribe_id;
+// private String d_subscribe_id;
+
+
+ public int getCfg_id() {
+ return cfg_id;
+ }
+
+ public void setCfg_id(int cfg_id) {
+ this.cfg_id = cfg_id;
+ }
+
+ public int getFound_time() {
+ return found_time;
+ }
+
+ public void setFound_time(int found_time) {
+ this.found_time = found_time;
+ }
+
+ public int getRecv_time() {
+ return recv_time;
+ }
+
+ public void setRecv_time(int recv_time) {
+ this.recv_time = recv_time;
+ }
+
+ public String getTrans_proto() {
+ return trans_proto;
+ }
+
+ public void setTrans_proto(String trans_proto) {
+ this.trans_proto = trans_proto;
+ }
+
+ public String getD_ip() {
+ return d_ip;
+ }
+
+ public void setD_ip(String d_ip) {
+ this.d_ip = d_ip;
+ }
+
+ public String getS_ip() {
+ return s_ip;
+ }
+
+ public void setS_ip(String s_ip) {
+ this.s_ip = s_ip;
+ }
+
+ public int getD_port() {
+ return d_port;
+ }
+
+ public void setD_port(int d_port) {
+ this.d_port = d_port;
+ }
+
+ public int getS_port() {
+ return s_port;
+ }
+
+ public void setS_port(int s_port) {
+ this.s_port = s_port;
+ }
+
+ public int getEntrance_id() {
+ return entrance_id;
+ }
+
+ public void setEntrance_id(int entrance_id) {
+ this.entrance_id = entrance_id;
+ }
+
+ public String getCap_ip() {
+ return cap_ip;
+ }
+
+ public void setCap_ip(String cap_ip) {
+ this.cap_ip = cap_ip;
+ }
+
+ public String getServer_locate() {
+ return server_locate;
+ }
+
+ public void setServer_locate(String server_locate) {
+ this.server_locate = server_locate;
+ }
+
+ public String getClient_locate() {
+ return client_locate;
+ }
+
+ public void setClient_locate(String client_locate) {
+ this.client_locate = client_locate;
+ }
+
+ public long getOver_id() {
+ return over_id;
+ }
+
+ public void setOver_id(long over_id) {
+ this.over_id = over_id;
+ }
+
+ public String getNest_protocol() {
+ return nest_protocol;
+ }
+
+ public void setNest_protocol(String nest_protocol) {
+ this.nest_protocol = nest_protocol;
+ }
+
+ public String getNest_server_ip() {
+ return nest_server_ip;
+ }
+
+ public void setNest_server_ip(String nest_server_ip) {
+ this.nest_server_ip = nest_server_ip;
+ }
+
+ public String getNest_client_ip() {
+ return nest_client_ip;
+ }
+
+ public void setNest_client_ip(String nest_client_ip) {
+ this.nest_client_ip = nest_client_ip;
+ }
+
+ public int getNest_server_port() {
+ return nest_server_port;
+ }
+
+ public void setNest_server_port(int nest_server_port) {
+ this.nest_server_port = nest_server_port;
+ }
+
+ public int getNest_client_port() {
+ return nest_client_port;
+ }
+
+ public void setNest_client_port(int nest_client_port) {
+ this.nest_client_port = nest_client_port;
+ }
+
+ public int getService() {
+ return service;
+ }
+
+ public void setService(int service) {
+ this.service = service;
+ }
+
+ public String getScene_file() {
+ return scene_file;
+ }
+
+ public void setScene_file(String scene_file) {
+ this.scene_file = scene_file;
+ }
+
+ public String getInjected_pkt_file() {
+ return injected_pkt_file;
+ }
+
+ public void setInjected_pkt_file(String injected_pkt_file) {
+ this.injected_pkt_file = injected_pkt_file;
+ }
+
+ public String getNest_addr_list() {
+ return nest_addr_list;
+ }
+
+ public void setNest_addr_list(String nest_addr_list) {
+ this.nest_addr_list = nest_addr_list;
+ }
+
+ public int getAction() {
+ return action;
+ }
+
+ public void setAction(int action) {
+ this.action = action;
+ }
+
+ public int getAddr_type() {
+ return addr_type;
+ }
+
+ public void setAddr_type(int addr_type) {
+ this.addr_type = addr_type;
+ }
+
+ public int getStream_dir() {
+ return stream_dir;
+ }
+
+ public void setStream_dir(int stream_dir) {
+ this.stream_dir = stream_dir;
+ }
+
+ public String getAddr_list() {
+ return addr_list;
+ }
+
+ public void setAddr_list(String addr_list) {
+ this.addr_list = addr_list;
+ }
+
+ public String getUser_region() {
+ return user_region;
+ }
+
+ public void setUser_region(String user_region) {
+ this.user_region = user_region;
+ }
+
+ public String getKilled() {
+ return killed;
+ }
+
+ public void setKilled(String killed) {
+ this.killed = killed;
+ }
+
+ public String getThread() {
+ return thread;
+ }
+
+ public void setThread(String thread) {
+ this.thread = thread;
+ }
+
+ public String getIndex() {
+ return index;
+ }
+
+ public void setIndex(String index) {
+ this.index = index;
+ }
+
+ public String getHash() {
+ return hash;
+ }
+
+ public void setHash(String hash) {
+ this.hash = hash;
+ }
+
+ @Override
+ public String toString() {
+ return cfg_id + "#" +
+ found_time + "#" +
+ recv_time + "#" +
+ trans_proto + "#" +
+ d_ip + "#" +
+ s_ip + "#" +
+ d_port + "#" +
+ s_port + "#" +
+ entrance_id + "#" +
+ cap_ip + "#" +
+ server_locate + "#" +
+ client_locate + "#" +
+ over_id + "#" +
+ nest_protocol + "#" +
+ nest_server_ip + "#" +
+ nest_client_ip + "#" +
+ nest_server_port + "#" +
+ nest_client_port + "#" +
+ service + "#" +
+ scene_file + "#" +
+ injected_pkt_file + "#" +
+ nest_addr_list + "#" +
+ action + "##" +
+ addr_type + "##" +
+ stream_dir + "##" +
+ addr_list + "##" +
+ user_region
+// device_id + "#" +
+// link_id + "#" +
+// encap_type + "#" +
+// direction + "#" +
+// inner_smac + "#" +
+// inner_dmac + "#" +
+// s_asn + "#" +
+// d_asn + "#" +
+// s_subscribe_id + "#" +
+// d_subscribe_id + "#" +
+ ;
+ }
+
+ //获取字段打印
+ public String field() {
+ return cfg_id + "#" +
+ found_time + "#" +
+ recv_time + "#" +
+ trans_proto + "#" +
+ d_ip + "#" +
+ s_ip + "#" +
+ d_port + "#" +
+ s_port + "#" +
+ entrance_id + "#" +
+ cap_ip + "#" +
+ server_locate + "#" +
+ client_locate + "#" +
+ over_id + "#" +
+ nest_protocol + "#" +
+ nest_server_ip + "#" +
+ nest_client_ip + "#" +
+ nest_server_port + "#" +
+ nest_client_port + "#" +
+ service + "#" +
+ scene_file + "#" +
+ injected_pkt_file + "#" +
+ nest_addr_list + "#" +
+ action + "##" +
+ addr_type + "##" +
+ stream_dir + "##" +
+ addr_list + "##" +
+ user_region
+// device_id + "#" +
+// link_id + "#" +
+// encap_type + "#" +
+// direction + "#" +
+// inner_smac + "#" +
+// inner_dmac + "#" +
+// s_asn + "#" +
+// d_asn + "#" +
+// s_subscribe_id + "#" +
+// d_subscribe_id + "#" +
+ ;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/ConfigCompile.java b/src/main/java/cn/ac/iie/bean/ConfigCompile.java
new file mode 100644
index 0000000..80d3a87
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/ConfigCompile.java
@@ -0,0 +1,173 @@
+package cn.ac.iie.bean;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+
+import cn.ac.iie.common.RealtimeCountConfig;
+
+public class ConfigCompile implements Serializable{
+
+ private static final long serialVersionUID = 2492616231566889401L;
+ private String COMPILE_ID;
+ private String CONT_TYPE;
+ private String ATTR_TYPE;
+ private String CONT_LABEL;
+ private String TASK_ID;
+ private String GUARANTEE_ID;
+ private String AFFAIR_ID;
+ private String TOPIC_ID;
+ private String IS_VALID;
+ private String SERVICE;
+ private String LWHH;
+ private ArrayList<String> TAG;
+
+
+ public ConfigCompile(String[] config){
+ this.COMPILE_ID = config[0];
+ this.CONT_TYPE = config[1];
+ this.ATTR_TYPE = config[2];
+ this.CONT_LABEL = config[3];
+ this.TASK_ID = config[4];
+ this.GUARANTEE_ID = config[5];
+ this.AFFAIR_ID = config[6];
+ this.TOPIC_ID = config[7];
+ this.IS_VALID = config[8];
+ String[] tagList = config[3].split(",");
+ this.SERVICE = makeService(tagList);
+ this.LWHH = makeLwhh(tagList);
+ this.TAG = makeTag(tagList);
+ }
+
+ private String makeService(String[] tagList){
+ for(int i=0; i<tagList.length; i++){
+ int aTag = Integer.parseInt(tagList[i].trim());
+ if(aTag>0 && aTag<=10000){
+ return tagList[i].trim();
+ }
+ }
+ return RealtimeCountConfig.EMPTY_OPTION_CHARACTER;
+ }
+
+ private String makeLwhh(String[] tagList){
+ for(int i=0; i<tagList.length; i++){
+ int aTag = Integer.parseInt(tagList[i].trim());
+ if(aTag>=1000000){
+ return tagList[i].trim();
+ }
+ }
+ return RealtimeCountConfig.EMPTY_OPTION_CHARACTER;
+ }
+
+ private ArrayList<String> makeTag(String[] tagList){
+ ArrayList<String> al = new ArrayList<String>();
+ for(int i=0; i<tagList.length; i++){
+ int aTag = Integer.parseInt(tagList[i].trim());
+ if(aTag>10000 && aTag<1000000){
+ al.add(tagList[i].trim());
+ }
+ }
+ return al;
+ }
+
+ public String getCOMPILE_ID() {
+ return COMPILE_ID;
+ }
+
+ public void setCOMPILE_ID(String cOMPILE_ID) {
+ COMPILE_ID = cOMPILE_ID;
+ }
+
+ public String getCONT_TYPE() {
+ return CONT_TYPE;
+ }
+
+ public void setCONT_TYPE(String cONT_TYPE) {
+ CONT_TYPE = cONT_TYPE;
+ }
+
+ public String getATTR_TYPE() {
+ return ATTR_TYPE;
+ }
+
+ public void setATTR_TYPE(String aTTR_TYPE) {
+ ATTR_TYPE = aTTR_TYPE;
+ }
+
+ public String getCONT_LABEL() {
+ return CONT_LABEL;
+ }
+
+ public void setCONT_LABEL(String cONT_LABEL) {
+ CONT_LABEL = cONT_LABEL;
+ }
+
+ public String getTASK_ID() {
+ return TASK_ID;
+ }
+
+ public void setTASK_ID(String tASK_ID) {
+ TASK_ID = tASK_ID;
+ }
+
+ public String getGUARANTEE_ID() {
+ return GUARANTEE_ID;
+ }
+
+ public void setGUARANTEE_ID(String gUARANTEE_ID) {
+ GUARANTEE_ID = gUARANTEE_ID;
+ }
+
+ public String getAFFAIR_ID() {
+ return AFFAIR_ID;
+ }
+
+ public void setAFFAIR_ID(String aFFAIR_ID) {
+ AFFAIR_ID = aFFAIR_ID;
+ }
+
+ public String getTOPIC_ID() {
+ return TOPIC_ID;
+ }
+
+ public void setTOPIC_ID(String tOPIC_ID) {
+ TOPIC_ID = tOPIC_ID;
+ }
+
+
+ public String getIS_VALID() {
+ return IS_VALID;
+ }
+
+
+ public void setIS_VALID(String iS_VALID) {
+ IS_VALID = iS_VALID;
+ }
+
+
+ public String getSERVICE() {
+ return SERVICE;
+ }
+
+
+ public void setSERVICE(String sERVICE) {
+ SERVICE = sERVICE;
+ }
+
+ public String getLWHH() {
+ return LWHH;
+ }
+
+ public void setLWHH(String lWHH) {
+ LWHH = lWHH;
+ }
+
+ public ArrayList<String> getTAG() {
+ return TAG;
+ }
+
+ public void setTAG(ArrayList<String> tAG) {
+ TAG = tAG;
+ }
+
+
+}
diff --git a/src/main/java/cn/ac/iie/bean/PzTable.java b/src/main/java/cn/ac/iie/bean/PzTable.java
new file mode 100644
index 0000000..abfaef5
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/PzTable.java
@@ -0,0 +1,33 @@
+package cn.ac.iie.bean;
+import java.util.HashMap;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import cn.ac.iie.dao.DataBasePzBusiness;
+import org.apache.log4j.Logger;
+
+public class PzTable {
+ public static HashMap<String, String> pzMap = new HashMap<String, String>();
+ public static Lock trylock = new ReentrantLock();
+ private static final Logger logger = Logger.getLogger(PzTable.class);
+ public static long lastUpdateTime = 0L;
+ public static long seq = 0L;
+
+ public static void updatePzMap(){
+ if (trylock.tryLock()) {
+ try {
+ long timeNow = System.currentTimeMillis();
+ if(((timeNow-lastUpdateTime)/1000) > 240){
+ //update pzMap and lastUpdateTime
+ new DataBasePzBusiness().getPzToMap(seq);
+ lastUpdateTime = timeNow;
+ logger.info("pzMap.size: "+pzMap.size());
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ trylock.unlock();
+ }
+ }
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/ntc/NTC_CONN_RECORD_LOG.java b/src/main/java/cn/ac/iie/bean/ntc/NTC_CONN_RECORD_LOG.java
new file mode 100644
index 0000000..61d731d
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/ntc/NTC_CONN_RECORD_LOG.java
@@ -0,0 +1,142 @@
+package cn.ac.iie.bean.ntc;
+
+import cn.ac.iie.bean.CommonLog;
+
+public class NTC_CONN_RECORD_LOG extends CommonLog {
+
+ private String app_label;
+ private int proto_id;
+ private int app_id;
+ private int os_id;
+ private int bs_id;
+ private int web_id;
+ private int behav_id;
+ private long c2s_pkt_num;
+ private long s2c_pkt_num;
+ private long c2s_byte_num;
+ private long s2c_byte_num;
+
+ private int create_time;
+ private int lastmtime;
+
+// private int c2s_pkt_num;
+// private int s2c_pkt_num;
+// private int c2s_byte_num;
+// private int s2c_byte_num;
+
+ public String getApp_label() {
+ return app_label;
+ }
+
+ public void setApp_label(String app_label) {
+ this.app_label = app_label;
+ }
+
+ public int getProto_id() {
+ return proto_id;
+ }
+
+ public void setProto_id(int proto_id) {
+ this.proto_id = proto_id;
+ }
+
+ public int getApp_id() {
+ return app_id;
+ }
+
+ public void setApp_id(int app_id) {
+ this.app_id = app_id;
+ }
+
+ public int getOs_id() {
+ return os_id;
+ }
+
+ public void setOs_id(int os_id) {
+ this.os_id = os_id;
+ }
+
+ public int getBs_id() {
+ return bs_id;
+ }
+
+ public void setBs_id(int bs_id) {
+ this.bs_id = bs_id;
+ }
+
+ public int getWeb_id() {
+ return web_id;
+ }
+
+ public void setWeb_id(int web_id) {
+ this.web_id = web_id;
+ }
+
+ public int getBehav_id() {
+ return behav_id;
+ }
+
+ public void setBehav_id(int behav_id) {
+ this.behav_id = behav_id;
+ }
+
+ public long getC2s_pkt_num() {
+ return c2s_pkt_num;
+ }
+
+ public void setC2s_pkt_num(long c2s_pkt_num) {
+ this.c2s_pkt_num = c2s_pkt_num;
+ }
+
+ public long getS2c_pkt_num() {
+ return s2c_pkt_num;
+ }
+
+ public void setS2c_pkt_num(long s2c_pkt_num) {
+ this.s2c_pkt_num = s2c_pkt_num;
+ }
+
+ public long getC2s_byte_num() {
+ return c2s_byte_num;
+ }
+
+ public void setC2s_byte_num(long c2s_byte_num) {
+ this.c2s_byte_num = c2s_byte_num;
+ }
+
+ public long getS2c_byte_num() {
+ return s2c_byte_num;
+ }
+
+ public void setS2c_byte_num(long s2c_byte_num) {
+ this.s2c_byte_num = s2c_byte_num;
+ }
+
+ public int getCreate_time() {
+ return create_time;
+ }
+
+ public void setCreate_time(int create_time) {
+ this.create_time = create_time;
+ }
+
+ public int getLastmtime() {
+ return lastmtime;
+ }
+
+ public void setLastmtime(int lastmtime) {
+ this.lastmtime = lastmtime;
+ }
+
+ @Override
+ public String toString() {
+ return field() +
+ app_label + "\t" +
+ c2s_pkt_num + "\t" +
+ s2c_pkt_num + "\t" +
+ c2s_byte_num + "\t" +
+ s2c_byte_num + "\t" +
+ create_time + "\t" +
+ lastmtime + "\n";
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipCount/VoipCoding.java b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipCoding.java
new file mode 100644
index 0000000..a54120c
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipCoding.java
@@ -0,0 +1,31 @@
+package cn.ac.iie.bean.voipSipCount;
+
+public class VoipCoding {
+ private String coding;
+ private long count;
+ private String interval_time;
+
+ public String getCoding() {
+ return coding;
+ }
+
+ public void setCoding(String coding) {
+ this.coding = coding;
+ }
+
+ public long getCount() {
+ return count;
+ }
+
+ public void setCount(long count) {
+ this.count = count;
+ }
+
+ public String getInterval_time() {
+ return interval_time;
+ }
+
+ public void setInterval_time(String interval_time) {
+ this.interval_time = interval_time;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipCount/VoipIpLocation.java b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipIpLocation.java
new file mode 100644
index 0000000..128af68
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipIpLocation.java
@@ -0,0 +1,52 @@
+package cn.ac.iie.bean.voipSipCount;
+
+public class VoipIpLocation {
+ private String country;
+ private String region;
+ private String nationCode;
+ private long count;
+ private String interval_time;
+
+
+ public String getCountry() {
+ return country;
+ }
+
+ public void setCountry(String country) {
+ this.country = country;
+ }
+
+ public String getRegion() {
+ return region;
+ }
+
+ public void setRegion(String region) {
+ this.region = region;
+ }
+
+ public String getNationCode() {
+ return nationCode;
+ }
+
+ public void setNationCode(String nationCode) {
+ this.nationCode = nationCode;
+ }
+
+ public long getCount() {
+ return count;
+ }
+
+ public void setCount(long count) {
+ this.count = count;
+ }
+
+ public String getInterval_time() {
+ return interval_time;
+ }
+
+ public void setInterval_time(String interval_time) {
+ this.interval_time = interval_time;
+ }
+
+
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipCount/VoipIpType.java b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipIpType.java
new file mode 100644
index 0000000..d3f946b
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipIpType.java
@@ -0,0 +1,31 @@
+package cn.ac.iie.bean.voipSipCount;
+
+public class VoipIpType {
+ private String type;
+ private long count;
+ private String interval_time;
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public long getCount() {
+ return count;
+ }
+
+ public void setCount(long count) {
+ this.count = count;
+ }
+
+ public String getInterval_time() {
+ return interval_time;
+ }
+
+ public void setInterval_time(String interval_time) {
+ this.interval_time = interval_time;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipCount/VoipMethod.java b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipMethod.java
new file mode 100644
index 0000000..7cbefd3
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipMethod.java
@@ -0,0 +1,31 @@
+package cn.ac.iie.bean.voipSipCount;
+
+public class VoipMethod {
+ private String method;
+ private long count;
+ private String interval_time;
+
+ public String getMethod() {
+ return method;
+ }
+
+ public void setMethod(String method) {
+ this.method = method;
+ }
+
+ public long getCount() {
+ return count;
+ }
+
+ public void setCount(long count) {
+ this.count = count;
+ }
+
+ public String getInterval_time() {
+ return interval_time;
+ }
+
+ public void setInterval_time(String interval_time) {
+ this.interval_time = interval_time;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipCount/VoipResStat.java b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipResStat.java
new file mode 100644
index 0000000..9611272
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipResStat.java
@@ -0,0 +1,40 @@
+package cn.ac.iie.bean.voipSipCount;
+
+public class VoipResStat {
+ private String res_stat;
+ private String cseq;
+ private long count;
+ private String interval_time;
+
+ public String getCseq() {
+ return cseq;
+ }
+
+ public void setCseq(String cseq) {
+ this.cseq = cseq;
+ }
+
+ public long getCount() {
+ return count;
+ }
+
+ public void setCount(long count) {
+ this.count = count;
+ }
+
+ public String getInterval_time() {
+ return interval_time;
+ }
+
+ public void setInterval_time(String interval_time) {
+ this.interval_time = interval_time;
+ }
+
+ public String getRes_stat() {
+ return res_stat;
+ }
+
+ public void setRes_stat(String res_stat) {
+ this.res_stat = res_stat;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipCount/VoipServer.java b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipServer.java
new file mode 100644
index 0000000..fe4dd96
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipServer.java
@@ -0,0 +1,31 @@
+package cn.ac.iie.bean.voipSipCount;
+
+public class VoipServer {
+ private String server;
+ private long count;
+ private String interval_time;
+
+ public String getServer() {
+ return server;
+ }
+
+ public void setServer(String server) {
+ this.server = server;
+ }
+
+ public long getCount() {
+ return count;
+ }
+
+ public void setCount(long count) {
+ this.count = count;
+ }
+
+ public String getInterval_time() {
+ return interval_time;
+ }
+
+ public void setInterval_time(String interval_time) {
+ this.interval_time = interval_time;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipCount/VoipServiceDomain.java b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipServiceDomain.java
new file mode 100644
index 0000000..5d675c2
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipServiceDomain.java
@@ -0,0 +1,40 @@
+package cn.ac.iie.bean.voipSipCount;
+
+public class VoipServiceDomain {
+ private String service;
+ private String type;
+ private Long count;
+ private String interval_time;
+
+ public String getService() {
+ return service;
+ }
+
+ public void setService(String service) {
+ this.service = service;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public Long getCount() {
+ return count;
+ }
+
+ public void setCount(Long count) {
+ this.count = count;
+ }
+
+ public String getInterval_time() {
+ return interval_time;
+ }
+
+ public void setInterval_time(String interval_time) {
+ this.interval_time = interval_time;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipCount/VoipUa.java b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipUa.java
new file mode 100644
index 0000000..a57bb67
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipCount/VoipUa.java
@@ -0,0 +1,31 @@
+package cn.ac.iie.bean.voipSipCount;
+
+public class VoipUa {
+ private String ua;
+ private long count;
+ private String interval_time;
+
+ public String getUa() {
+ return ua;
+ }
+
+ public void setUa(String ua) {
+ this.ua = ua;
+ }
+
+ public long getCount() {
+ return count;
+ }
+
+ public void setCount(long count) {
+ this.count = count;
+ }
+
+ public String getInterval_time() {
+ return interval_time;
+ }
+
+ public void setInterval_time(String interval_time) {
+ this.interval_time = interval_time;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipFromToLog/RouteRelationLog.java b/src/main/java/cn/ac/iie/bean/voipSipFromToLog/RouteRelationLog.java
new file mode 100644
index 0000000..9b46ae0
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipFromToLog/RouteRelationLog.java
@@ -0,0 +1,34 @@
+package cn.ac.iie.bean.voipSipFromToLog;
+
+/**
+ * voip路由关系表
+ */
+public class RouteRelationLog {
+ private long timestamp;
+ private String from_domain;
+ private String to_domain;
+
+ public long getTimestamp() {
+ return timestamp;
+ }
+
+ public void setTimestamp(long timestamp) {
+ this.timestamp = timestamp;
+ }
+
+ public String getFrom_domain() {
+ return from_domain;
+ }
+
+ public void setFrom_domain(String from_domain) {
+ this.from_domain = from_domain;
+ }
+
+ public String getTo_domain() {
+ return to_domain;
+ }
+
+ public void setTo_domain(String to_domain) {
+ this.to_domain = to_domain;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bean/voipSipOrigin/SipOriginALL.java b/src/main/java/cn/ac/iie/bean/voipSipOrigin/SipOriginALL.java
new file mode 100644
index 0000000..dd0ba42
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bean/voipSipOrigin/SipOriginALL.java
@@ -0,0 +1,762 @@
+package cn.ac.iie.bean.voipSipOrigin;
+
+import com.alibaba.fastjson.annotation.JSONField;
+
+/**
+ * SIP的原始日志扩展表
+ */
+public class SipOriginALL {
+
+ @JSONField(name = "Call_ID")
+ private String Call_ID;//Call-ID
+ @JSONField(name = "CLJ_IP")
+ private String CLJ_IP;
+ @JSONField(name = "Found_Time")
+ private long Found_Time;
+ @JSONField(name = "SRC_IP")
+ private String SRC_IP;
+ @JSONField(name = "SRC_LOCATION_NATION")
+ private String SRC_LOCATION_NATION;
+ @JSONField(name = "SRC_LOCATION_NATION_CODE")
+ private String SRC_LOCATION_NATION_CODE;
+ @JSONField(name = "SRC_LOCATION_REGION")
+ private String SRC_LOCATION_REGION;
+ @JSONField(name = "SRC_PORT")
+ private int SRC_PORT;
+ @JSONField(name = "DST_IP")
+ private String DST_IP;
+ @JSONField(name = "IP_TYPE")
+ private String IP_TYPE;
+ @JSONField(name = "DST_LOCATION_NATION")
+ private String DST_LOCATION_NATION;
+ @JSONField(name = "DST_LOCATION_NATION_CODE")
+ private String DST_LOCATION_NATION_CODE;
+ @JSONField(name = "DST_LOCATION_REGION")
+ private String DST_LOCATION_REGION;
+ @JSONField(name = "DST_PORT")
+ private int DST_PORT;
+ @JSONField(name = "Method")
+ private String Method;
+ @JSONField(name = "Request_URI")
+ private String Request_URI;
+ @JSONField(name = "User_name")
+ private String User_name;
+ @JSONField(name = "Service_domain")
+ private String Service_domain;
+ @JSONField(name = "Service_domain_valid")
+ private String Service_domain_valid;
+ @JSONField(name = "Res_stat")
+ private String Res_stat;
+ @JSONField(name = "Res_stat_format")
+ private String Res_stat_format;
+ @JSONField(name = "From")
+ private String From;
+ @JSONField(name = "From_Nickname")
+ private String From_Nickname;
+ @JSONField(name = "From_usr_name")
+ private String From_usr_name;
+ @JSONField(name = "From_ser_domain")
+ private String From_ser_domain;
+
+ @JSONField(name = "From_ser_domain_valid")
+ private String From_ser_domain_valid;//新增是否替换
+
+ @JSONField(name = "From_tag")
+ private String From_tag;
+ @JSONField(name = "To")
+ private String To;
+ @JSONField(name = "To_Nickname")
+ private String To_Nickname;
+ @JSONField(name = "To_usr_name")
+ private String To_usr_name;
+ @JSONField(name = "To_ser_domain")
+ private String To_ser_domain;
+
+ @JSONField(name = "To_ser_domain_valid")
+ private String To_ser_domain_valid;//新增是否替换
+
+ @JSONField(name = "To_tag")
+ private String To_tag;
+ @JSONField(name = "Cseq")
+ private String Cseq;
+ @JSONField(name = "Cseq_method")
+ private String Cseq_method;
+ @JSONField(name = "User_Agent")
+ private String User_Agent;//User-Agent
+ @JSONField(name = "Device_type")
+ private String Device_type;
+ @JSONField(name = "Max_Forwards")
+ private String Max_Forwards;//Max-Forwards
+ @JSONField(name = "Server")
+ private String Server;
+ @JSONField(name = "Server_type")
+ private String Server_type;
+ @JSONField(name = "Req_Via")
+ private String[] Req_Via;
+
+ @JSONField(name = "Req_Via_Json")
+ private String Req_Via_Json;//新增字段
+
+ @JSONField(name = "Req_Contact")
+ private String Req_Contact;
+ @JSONField(name = "Req_Contact_Nickname")
+ private String Req_Contact_Nickname;
+ @JSONField(name = "Req_Contact_usr_name")
+ private String Req_Contact_usr_name;
+ @JSONField(name = "Req_Contact_ser_domain")
+ private String Req_Contact_ser_domain;
+
+ @JSONField(name = "Req_ser_domain_valid")
+ private String Req_ser_domain_valid;//新增是否替换
+
+ @JSONField(name = "Req_Record_Route")
+ private String[] Req_Record_Route;//Req_Record-Route
+
+ @JSONField(name = "Req_Record_Route_Json")
+ private String Req_Record_Route_Json;//新增字段
+
+ @JSONField(name = "Req_Route")
+ private String[] Req_Route;
+
+ @JSONField(name = "Req_Route_Json")
+ private String Req_Route_Json;//新增字段
+
+ @JSONField(name = "Req_Expires")
+ private String Req_Expires;
+ @JSONField(name = "Req_Others")
+ private String Req_Others;
+ @JSONField(name = "Req_Content_Type")
+ private String Req_Content_Type;//Req_Content-Type
+ @JSONField(name = "Req_Content")
+ private String Req_Content;
+ @JSONField(name = "Res_Via")
+ private String[] Res_Via;
+
+ @JSONField(name = "Res_Via_Json")
+ private String Res_Via_Json;//新增字段
+
+ @JSONField(name = "Res_Contact")
+ private String Res_Contact;
+ @JSONField(name = "Res_Contact_Nickname")
+ private String Res_Contact_Nickname;
+ @JSONField(name = "Res_Contact_usr_name")
+ private String Res_Contact_usr_name;
+ @JSONField(name = "Res_Contact_ser_domain")
+ private String Res_Contact_ser_domain;
+
+ @JSONField(name = "Res_ser_domain_valid")
+ private String Res_ser_domain_valid;//新增是否替换
+
+ @JSONField(name = "Res_Record_Route")
+ private String[] Res_Record_Route;//Res_Record-Route
+
+ @JSONField(name = "Res_Record_Route_Json")
+ private String Res_Record_Route_Json;//新增字段
+
+ @JSONField(name = "Res_Route")
+ private String[] Res_Route;
+
+ @JSONField(name = "Res_Route_Json")
+ private String Res_Route_Json;//新增字段
+
+ @JSONField(name = "Res_Expires")
+ private String Res_Expires;
+ @JSONField(name = "Res_Others")
+ private String Res_Others;
+ @JSONField(name = "Res_Content_Type")
+ private String Res_Content_Type;//Res_Content-Type
+ @JSONField(name = "Res_Content")
+ private String Res_Content;
+
+ @JSONField(name = "Req_coding")
+ private String Req_coding;
+ @JSONField(name = "Res_coding")
+ private String Res_coding;
+
+ private long stat_time;//新增时间字段
+
+
+ public String getCall_ID() {
+ return Call_ID;
+ }
+
+ public void setCall_ID(String call_ID) {
+ Call_ID = call_ID;
+ }
+
+ public String getCLJ_IP() {
+ return CLJ_IP;
+ }
+
+ public void setCLJ_IP(String CLJ_IP) {
+ this.CLJ_IP = CLJ_IP;
+ }
+
+ public long getFound_Time() {
+ return Found_Time;
+ }
+
+ public void setFound_Time(long found_Time) {
+ Found_Time = found_Time;
+ }
+
+ public String getSRC_IP() {
+ return SRC_IP;
+ }
+
+ public void setSRC_IP(String SRC_IP) {
+ this.SRC_IP = SRC_IP;
+ }
+
+ public String getSRC_LOCATION_NATION() {
+ return SRC_LOCATION_NATION;
+ }
+
+ public void setSRC_LOCATION_NATION(String SRC_LOCATION_NATION) {
+ this.SRC_LOCATION_NATION = SRC_LOCATION_NATION;
+ }
+
+ public String getSRC_LOCATION_NATION_CODE() {
+ return SRC_LOCATION_NATION_CODE;
+ }
+
+ public void setSRC_LOCATION_NATION_CODE(String SRC_LOCATION_NATION_CODE) {
+ this.SRC_LOCATION_NATION_CODE = SRC_LOCATION_NATION_CODE;
+ }
+
+ public String getSRC_LOCATION_REGION() {
+ return SRC_LOCATION_REGION;
+ }
+
+ public void setSRC_LOCATION_REGION(String SRC_LOCATION_REGION) {
+ this.SRC_LOCATION_REGION = SRC_LOCATION_REGION;
+ }
+
+ public int getSRC_PORT() {
+ return SRC_PORT;
+ }
+
+ public void setSRC_PORT(int SRC_PORT) {
+ this.SRC_PORT = SRC_PORT;
+ }
+
+ public String getDST_IP() {
+ return DST_IP;
+ }
+
+ public void setDST_IP(String DST_IP) {
+ this.DST_IP = DST_IP;
+ }
+
+ public String getIP_TYPE() {
+ return IP_TYPE;
+ }
+
+ public void setIP_TYPE(String IP_TYPE) {
+ this.IP_TYPE = IP_TYPE;
+ }
+
+ public String getDST_LOCATION_NATION() {
+ return DST_LOCATION_NATION;
+ }
+
+ public void setDST_LOCATION_NATION(String DST_LOCATION_NATION) {
+ this.DST_LOCATION_NATION = DST_LOCATION_NATION;
+ }
+
+ public String getDST_LOCATION_NATION_CODE() {
+ return DST_LOCATION_NATION_CODE;
+ }
+
+ public void setDST_LOCATION_NATION_CODE(String DST_LOCATION_NATION_CODE) {
+ this.DST_LOCATION_NATION_CODE = DST_LOCATION_NATION_CODE;
+ }
+
+ public String getDST_LOCATION_REGION() {
+ return DST_LOCATION_REGION;
+ }
+
+ public void setDST_LOCATION_REGION(String DST_LOCATION_REGION) {
+ this.DST_LOCATION_REGION = DST_LOCATION_REGION;
+ }
+
+ public int getDST_PORT() {
+ return DST_PORT;
+ }
+
+ public void setDST_PORT(int DST_PORT) {
+ this.DST_PORT = DST_PORT;
+ }
+
+ public String getMethod() {
+ return Method;
+ }
+
+ public void setMethod(String method) {
+ Method = method;
+ }
+
+ public String getRequest_URI() {
+ return Request_URI;
+ }
+
+ public void setRequest_URI(String request_URI) {
+ Request_URI = request_URI;
+ }
+
+ public String getUser_name() {
+ return User_name;
+ }
+
+ public void setUser_name(String user_name) {
+ User_name = user_name;
+ }
+
+ public String getService_domain() {
+ return Service_domain;
+ }
+
+ public void setService_domain(String service_domain) {
+ Service_domain = service_domain;
+ }
+
+ public String getService_domain_valid() {
+ return Service_domain_valid;
+ }
+
+ public void setService_domain_valid(String service_domain_valid) {
+ Service_domain_valid = service_domain_valid;
+ }
+
+ public String getRes_stat() {
+ return Res_stat;
+ }
+
+ public void setRes_stat(String res_stat) {
+ Res_stat = res_stat;
+ }
+
+ public String getRes_stat_format() {
+ return Res_stat_format;
+ }
+
+ public void setRes_stat_format(String res_stat_format) {
+ Res_stat_format = res_stat_format;
+ }
+
+ public String getFrom() {
+ return From;
+ }
+
+ public void setFrom(String from) {
+ From = from;
+ }
+
+ public String getFrom_Nickname() {
+ return From_Nickname;
+ }
+
+ public void setFrom_Nickname(String from_Nickname) {
+ From_Nickname = from_Nickname;
+ }
+
+ public String getFrom_usr_name() {
+ return From_usr_name;
+ }
+
+ public void setFrom_usr_name(String from_usr_name) {
+ From_usr_name = from_usr_name;
+ }
+
+ public String getFrom_ser_domain() {
+ return From_ser_domain;
+ }
+
+ public void setFrom_ser_domain(String from_ser_domain) {
+ From_ser_domain = from_ser_domain;
+ }
+
+ public String getFrom_ser_domain_valid() {
+ return From_ser_domain_valid;
+ }
+
+ public void setFrom_ser_domain_valid(String from_ser_domain_valid) {
+ From_ser_domain_valid = from_ser_domain_valid;
+ }
+
+ public String getFrom_tag() {
+ return From_tag;
+ }
+
+ public void setFrom_tag(String from_tag) {
+ From_tag = from_tag;
+ }
+
+ public String getTo() {
+ return To;
+ }
+
+ public void setTo(String to) {
+ To = to;
+ }
+
+ public String getTo_Nickname() {
+ return To_Nickname;
+ }
+
+ public void setTo_Nickname(String to_Nickname) {
+ To_Nickname = to_Nickname;
+ }
+
+ public String getTo_usr_name() {
+ return To_usr_name;
+ }
+
+ public void setTo_usr_name(String to_usr_name) {
+ To_usr_name = to_usr_name;
+ }
+
+ public String getTo_ser_domain() {
+ return To_ser_domain;
+ }
+
+ public void setTo_ser_domain(String to_ser_domain) {
+ To_ser_domain = to_ser_domain;
+ }
+
+ public String getTo_ser_domain_valid() {
+ return To_ser_domain_valid;
+ }
+
+ public void setTo_ser_domain_valid(String to_ser_domain_valid) {
+ To_ser_domain_valid = to_ser_domain_valid;
+ }
+
+ public String getTo_tag() {
+ return To_tag;
+ }
+
+ public void setTo_tag(String to_tag) {
+ To_tag = to_tag;
+ }
+
+ public String getCseq() {
+ return Cseq;
+ }
+
+ public void setCseq(String cseq) {
+ Cseq = cseq;
+ }
+
+ public String getCseq_method() {
+ return Cseq_method;
+ }
+
+ public void setCseq_method(String cseq_method) {
+ Cseq_method = cseq_method;
+ }
+
+ public String getUser_Agent() {
+ return User_Agent;
+ }
+
+ public void setUser_Agent(String user_Agent) {
+ User_Agent = user_Agent;
+ }
+
+ public String getDevice_type() {
+ return Device_type;
+ }
+
+ public void setDevice_type(String device_type) {
+ Device_type = device_type;
+ }
+
+ public String getMax_Forwards() {
+ return Max_Forwards;
+ }
+
+ public void setMax_Forwards(String max_Forwards) {
+ Max_Forwards = max_Forwards;
+ }
+
+ public String getServer() {
+ return Server;
+ }
+
+ public void setServer(String server) {
+ Server = server;
+ }
+
+ public String getServer_type() {
+ return Server_type;
+ }
+
+ public void setServer_type(String server_type) {
+ Server_type = server_type;
+ }
+
+ public String[] getReq_Via() {
+ return Req_Via;
+ }
+
+ public void setReq_Via(String[] req_Via) {
+ Req_Via = req_Via;
+ }
+
+ public String getReq_Via_Json() {
+ return Req_Via_Json;
+ }
+
+ public void setReq_Via_Json(String req_Via_Json) {
+ Req_Via_Json = req_Via_Json;
+ }
+
+ public String getReq_Contact() {
+ return Req_Contact;
+ }
+
+ public void setReq_Contact(String req_Contact) {
+ Req_Contact = req_Contact;
+ }
+
+ public String getReq_Contact_Nickname() {
+ return Req_Contact_Nickname;
+ }
+
+ public void setReq_Contact_Nickname(String req_Contact_Nickname) {
+ Req_Contact_Nickname = req_Contact_Nickname;
+ }
+
+ public String getReq_Contact_usr_name() {
+ return Req_Contact_usr_name;
+ }
+
+ public void setReq_Contact_usr_name(String req_Contact_usr_name) {
+ Req_Contact_usr_name = req_Contact_usr_name;
+ }
+
+ public String getReq_Contact_ser_domain() {
+ return Req_Contact_ser_domain;
+ }
+
+ public void setReq_Contact_ser_domain(String req_Contact_ser_domain) {
+ Req_Contact_ser_domain = req_Contact_ser_domain;
+ }
+
+ public String getReq_ser_domain_valid() {
+ return Req_ser_domain_valid;
+ }
+
+ public void setReq_ser_domain_valid(String req_ser_domain_valid) {
+ Req_ser_domain_valid = req_ser_domain_valid;
+ }
+
+ public String[] getReq_Record_Route() {
+ return Req_Record_Route;
+ }
+
+ public void setReq_Record_Route(String[] req_Record_Route) {
+ Req_Record_Route = req_Record_Route;
+ }
+
+ public String getReq_Record_Route_Json() {
+ return Req_Record_Route_Json;
+ }
+
+ public void setReq_Record_Route_Json(String req_Record_Route_Json) {
+ Req_Record_Route_Json = req_Record_Route_Json;
+ }
+
+ public String[] getReq_Route() {
+ return Req_Route;
+ }
+
+ public void setReq_Route(String[] req_Route) {
+ Req_Route = req_Route;
+ }
+
+ public String getReq_Route_Json() {
+ return Req_Route_Json;
+ }
+
+ public void setReq_Route_Json(String req_Route_Json) {
+ Req_Route_Json = req_Route_Json;
+ }
+
+ public String getReq_Expires() {
+ return Req_Expires;
+ }
+
+ public void setReq_Expires(String req_Expires) {
+ Req_Expires = req_Expires;
+ }
+
+ public String getReq_Others() {
+ return Req_Others;
+ }
+
+ public void setReq_Others(String req_Others) {
+ Req_Others = req_Others;
+ }
+
+ public String getReq_Content_Type() {
+ return Req_Content_Type;
+ }
+
+ public void setReq_Content_Type(String req_Content_Type) {
+ Req_Content_Type = req_Content_Type;
+ }
+
+ public String getReq_Content() {
+ return Req_Content;
+ }
+
+ public void setReq_Content(String req_Content) {
+ Req_Content = req_Content;
+ }
+
+ public String[] getRes_Via() {
+ return Res_Via;
+ }
+
+ public void setRes_Via(String[] res_Via) {
+ Res_Via = res_Via;
+ }
+
+ public String getRes_Via_Json() {
+ return Res_Via_Json;
+ }
+
+ public void setRes_Via_Json(String res_Via_Json) {
+ Res_Via_Json = res_Via_Json;
+ }
+
+ public String getRes_Contact() {
+ return Res_Contact;
+ }
+
+ public void setRes_Contact(String res_Contact) {
+ Res_Contact = res_Contact;
+ }
+
+ public String getRes_Contact_Nickname() {
+ return Res_Contact_Nickname;
+ }
+
+ public void setRes_Contact_Nickname(String res_Contact_Nickname) {
+ Res_Contact_Nickname = res_Contact_Nickname;
+ }
+
+ public String getRes_Contact_usr_name() {
+ return Res_Contact_usr_name;
+ }
+
+ public void setRes_Contact_usr_name(String res_Contact_usr_name) {
+ Res_Contact_usr_name = res_Contact_usr_name;
+ }
+
+ public String getRes_Contact_ser_domain() {
+ return Res_Contact_ser_domain;
+ }
+
+ public void setRes_Contact_ser_domain(String res_Contact_ser_domain) {
+ Res_Contact_ser_domain = res_Contact_ser_domain;
+ }
+
+ public String getRes_ser_domain_valid() {
+ return Res_ser_domain_valid;
+ }
+
+ public void setRes_ser_domain_valid(String res_ser_domain_valid) {
+ Res_ser_domain_valid = res_ser_domain_valid;
+ }
+
+ public String[] getRes_Record_Route() {
+ return Res_Record_Route;
+ }
+
+ public void setRes_Record_Route(String[] res_Record_Route) {
+ Res_Record_Route = res_Record_Route;
+ }
+
+ public String getRes_Record_Route_Json() {
+ return Res_Record_Route_Json;
+ }
+
+ public void setRes_Record_Route_Json(String res_Record_Route_Json) {
+ Res_Record_Route_Json = res_Record_Route_Json;
+ }
+
+ public String[] getRes_Route() {
+ return Res_Route;
+ }
+
+ public void setRes_Route(String[] res_Route) {
+ Res_Route = res_Route;
+ }
+
+ public String getRes_Route_Json() {
+ return Res_Route_Json;
+ }
+
+ public void setRes_Route_Json(String res_Route_Json) {
+ Res_Route_Json = res_Route_Json;
+ }
+
+ public String getRes_Expires() {
+ return Res_Expires;
+ }
+
+ public void setRes_Expires(String res_Expires) {
+ Res_Expires = res_Expires;
+ }
+
+ public String getRes_Others() {
+ return Res_Others;
+ }
+
+ public void setRes_Others(String res_Others) {
+ Res_Others = res_Others;
+ }
+
+ public String getRes_Content_Type() {
+ return Res_Content_Type;
+ }
+
+ public void setRes_Content_Type(String res_Content_Type) {
+ Res_Content_Type = res_Content_Type;
+ }
+
+ public String getRes_Content() {
+ return Res_Content;
+ }
+
+ public void setRes_Content(String res_Content) {
+ Res_Content = res_Content;
+ }
+
+ public String getReq_coding() {
+ return Req_coding;
+ }
+
+ public void setReq_coding(String req_coding) {
+ Req_coding = req_coding;
+ }
+
+ public String getRes_coding() {
+ return Res_coding;
+ }
+
+ public void setRes_coding(String res_coding) {
+ Res_coding = res_coding;
+ }
+
+ public long getStat_time() {
+ return stat_time;
+ }
+
+ public void setStat_time(long stat_time) {
+ this.stat_time = stat_time;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/bolt/FromSpoutBufferBoltDC.java b/src/main/java/cn/ac/iie/bolt/FromSpoutBufferBoltDC.java
new file mode 100644
index 0000000..4b82597
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bolt/FromSpoutBufferBoltDC.java
@@ -0,0 +1,40 @@
+package cn.ac.iie.bolt;
+
+
+import org.apache.log4j.Logger;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseBasicBolt;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+
+import java.util.Map;
+
+public class FromSpoutBufferBoltDC extends BaseBasicBolt {
+ private static final long serialVersionUID = -106783017834081712L;
+
+ private static Logger logger = Logger.getLogger(FromSpoutBufferBoltDC.class);
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context) {
+ }
+
+ @Override
+ public void execute(Tuple tuple, BasicOutputCollector collector) {
+ try {
+ collector.emit(new Values(tuple.getString(0)));
+ } catch (Exception e) {
+ logger.error("FromSpoutBufferBoltDC Get Log is error --->" + e);
+ e.printStackTrace();
+ }
+
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("json"));
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/bolt/GetSipOriBoltDC.java b/src/main/java/cn/ac/iie/bolt/GetSipOriBoltDC.java
new file mode 100644
index 0000000..7e7202b
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bolt/GetSipOriBoltDC.java
@@ -0,0 +1,1027 @@
+package cn.ac.iie.bolt;
+
+import cn.ac.iie.bean.voipSipFromToLog.RouteRelationLog;
+import cn.ac.iie.bean.voipSipOrigin.SipOriginALL;
+import cn.ac.iie.common.CommonService;
+import cn.ac.iie.common.HashTableConfig;
+import cn.ac.iie.common.RealtimeCountConfig;
+import cn.ac.iie.dao.DataBaseLoad;
+import cn.ac.iie.dao.KafkaDB;
+import cn.ac.iie.utils.IPIPLibrary.Ipip;
+import cn.ac.iie.utils.TupleUtils;
+import com.alibaba.fastjson.JSONObject;
+import com.zdjizhi.utils.StringUtil;
+import org.apache.log4j.Logger;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseBasicBolt;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class GetSipOriBoltDC extends BaseBasicBolt {
+
+ private static final long serialVersionUID = -5702741658721325473L;
+
+ private static Logger logger = Logger.getLogger(GetSipOriBoltDC.class);
+
+ private static Ipip ipIpLook = new Ipip();
+
+ private Long SipOriginAllCountSumMi = 0L;
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public void prepare(Map stormConf, TopologyContext context) {
+ RealtimeCountConfig.configShow();
+ //载入ipip库
+ ipIpLook.load(RealtimeCountConfig.IPIP_LIBRARY);
+ }
+
+ @Override
+ public void execute(Tuple tuple, BasicOutputCollector collector) {
+ if (TupleUtils.isTick(tuple)) {
+ logger.warn("Num of SipOriginALL Count(last one minutes): " + SipOriginAllCountSumMi);
+ SipOriginAllCountSumMi = 0L;
+ } else {
+ try {
+ String message = tuple.getString(0);
+ if (StringUtil.isNotBlank(message)) {
+ SipOriginALL sipOriginLogDisable = JSONObject.parseObject(message, SipOriginALL.class);
+ SipOriginALL sipOriginLog = completeAbnorKeyForSipOriLog(sipOriginLogDisable, message);
+ SipOriginAllCountSumMi++;
+ //填充voip日志并发送
+ completeSipOriginAllLogAndemit(sipOriginLog, collector);
+
+ //填充voip路由关系表并发送
+// completeRouteRelationLogAndemit(sipOriginLog, message, collector);
+ }
+ } catch (Exception e) {
+ if (RealtimeCountConfig.ALL_LOG_OUTPUT_CONTROLLER.equals("yes")) {
+ logger.error("GetSipOriBoltDC get " + RealtimeCountConfig.KAFKA_SIP_ORIGIN_TOPIC + " Data is error!!! --> {" + e + "} <--");
+ logger.error("GetSipOriBoltDC the " + RealtimeCountConfig.KAFKA_SIP_ORIGIN_TOPIC + " Data is --> {" + tuple.getString(0) + "} <--");
+ e.printStackTrace();
+ } else {
+ logger.error("GetSipOriBoltDC get " + RealtimeCountConfig.KAFKA_SIP_ORIGIN_TOPIC + " Data is error!!! --> {" + e + "} <--");
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ private SipOriginALL completeAbnorKeyForSipOriLog(SipOriginALL sipOriginLogDisable, String message) {
+ Map mapWithLedge = JSONObject.parseObject(message, Map.class);
+ sipOriginLogDisable.setCall_ID((String) (mapWithLedge.get("Call-ID")));
+ sipOriginLogDisable.setUser_Agent((String) (mapWithLedge.get("User-Agent")));
+ sipOriginLogDisable.setMax_Forwards((String) (mapWithLedge.get("Max-Forwards")));
+ sipOriginLogDisable.setReq_Record_Route((String[]) (mapWithLedge.get("Req_Record-Route")));
+ sipOriginLogDisable.setReq_Content_Type((String) (mapWithLedge.get("Req_Content-Type")));
+ sipOriginLogDisable.setRes_Record_Route((String[]) (mapWithLedge.get("Res_Record-Route")));
+ sipOriginLogDisable.setRes_Content_Type((String) (mapWithLedge.get("Res_Content-Type")));
+
+ //将字符串数组字段转化为json字段便于入库
+ sipOriginLogDisable.setReq_Via_Json(JSONObject.toJSONString(sipOriginLogDisable.getReq_Via()));
+ sipOriginLogDisable.setReq_Record_Route_Json(JSONObject.toJSONString(sipOriginLogDisable.getReq_Record_Route()));
+ sipOriginLogDisable.setReq_Route_Json(JSONObject.toJSONString(sipOriginLogDisable.getReq_Route()));
+ sipOriginLogDisable.setRes_Via_Json(JSONObject.toJSONString(sipOriginLogDisable.getRes_Via()));
+ sipOriginLogDisable.setRes_Record_Route_Json(JSONObject.toJSONString(sipOriginLogDisable.getRes_Record_Route()));
+ sipOriginLogDisable.setRes_Route_Json(JSONObject.toJSONString(sipOriginLogDisable.getRes_Route()));
+
+ return sipOriginLogDisable;
+ }
+
+ //填充SIP_ORIGIN_ALL日志并发送
+ private void completeSipOriginAllLogAndemit(SipOriginALL sipOriginLog, BasicOutputCollector collector) {
+ //获取SRC_IP相关定位
+ if (StringUtil.isNotBlank(sipOriginLog.getSRC_IP())
+ && sipOriginLog.getSRC_IP() != null
+ && sipOriginLog.getSRC_IP().length() != 0
+ && !(sipOriginLog.getSRC_IP().contains(":"))) {
+ String[] src_IpLocation = ipIpLook.find(sipOriginLog.getSRC_IP());
+ String src_Ip_nation = src_IpLocation[0];
+ String src_Ip_region = src_IpLocation[1];
+ sipOriginLog.setSRC_LOCATION_NATION(src_Ip_nation);
+ sipOriginLog.setSRC_LOCATION_REGION(src_Ip_region);
+
+ if (sipOriginLog.getSRC_LOCATION_REGION().equals("香港")
+ || sipOriginLog.getSRC_LOCATION_REGION().equals("台湾")
+ || sipOriginLog.getSRC_LOCATION_REGION().equals("澳门")) {
+ sipOriginLog.setSRC_LOCATION_NATION(sipOriginLog.getSRC_LOCATION_REGION());
+ }
+
+ //设置SRC_IP国家码
+ String src_Ip_nation_code = HashTableConfig.ISO_3166_1_ALPHA_2.get(sipOriginLog.getSRC_LOCATION_NATION());
+ if (StringUtil.isNotBlank(src_Ip_nation_code)
+ && src_Ip_nation_code != null
+ && src_Ip_nation_code.length() != 0) {
+ sipOriginLog.setSRC_LOCATION_NATION_CODE(src_Ip_nation_code);
+ }
+ }
+
+ //获取DST_IP相关定位
+ if (StringUtil.isNotBlank(sipOriginLog.getDST_IP())
+ && sipOriginLog.getDST_IP() != null
+ && sipOriginLog.getDST_IP().length() != 0
+ && !(sipOriginLog.getDST_IP().contains(":"))) {
+ String[] dst_IpLocation = ipIpLook.find(sipOriginLog.getDST_IP());
+ String dst_Ip_nation = dst_IpLocation[0];
+ String dst_Ip_region = dst_IpLocation[1];
+ sipOriginLog.setDST_LOCATION_NATION(dst_Ip_nation);
+ sipOriginLog.setDST_LOCATION_REGION(dst_Ip_region);
+
+ if (sipOriginLog.getDST_LOCATION_REGION().equals("香港")
+ || sipOriginLog.getDST_LOCATION_REGION().equals("台湾")
+ || sipOriginLog.getDST_LOCATION_REGION().equals("澳门")) {
+ sipOriginLog.setDST_LOCATION_NATION(sipOriginLog.getDST_LOCATION_REGION());
+ }
+
+ //设置DST_IP国家码
+ String dst_Ip_nation_code = HashTableConfig.ISO_3166_1_ALPHA_2.get(sipOriginLog.getDST_LOCATION_NATION());
+ if (StringUtil.isNotBlank(dst_Ip_nation_code)
+ && dst_Ip_nation_code != null
+ && dst_Ip_nation_code.length() != 0) {
+ sipOriginLog.setDST_LOCATION_NATION_CODE(dst_Ip_nation_code);
+ }
+ }
+
+ //设置IP_TYPE
+ if (sipOriginLog.getDST_IP().contains(":") && sipOriginLog.getDST_IP().split(":").length > 2) {
+ sipOriginLog.setIP_TYPE("6");
+ } else {
+ sipOriginLog.setIP_TYPE("4");
+ }
+
+ //处理Request_URI
+ if (StringUtil.isNotBlank(sipOriginLog.getRequest_URI())
+ && sipOriginLog.getRequest_URI() != null
+ && sipOriginLog.getRequest_URI().length() != 0) {
+
+ if (!(sipOriginLog.getRequest_URI().contains("sip:"))) {
+ if (sipOriginLog.getRequest_URI().contains("@")) {
+ String[] splitAlte = sipOriginLog.getRequest_URI().split("@");
+ String userName = splitAlte[0];
+ String ServiceDomain = splitAlte[1].split(";")[0];
+ sipOriginLog.setUser_name(userName);
+ sipOriginLog.setService_domain(ServiceDomain);
+
+ if (StringUtil.isNotBlank(sipOriginLog.getService_domain())
+ && sipOriginLog.getService_domain() != null
+ && sipOriginLog.getService_domain().length() != 0) {
+
+ if (sipOriginLog.getService_domain().contains(":")) {
+ if (sipOriginLog.getService_domain().split(":").length <= 2) {
+ if (isIp(sipOriginLog.getService_domain().split(":")[0])) {//0索引为ip或者url
+ String[] splitColon = sipOriginLog.getService_domain().split(":");
+ String service_domain_v4_ip = splitColon[0];
+ String service_domain_v4_port = splitColon[1];
+ boolean judgeInnerIP = isInnerIP(service_domain_v4_ip);
+ if (judgeInnerIP) {
+ String newServiceDomain = sipOriginLog.getDST_IP() + ":" + service_domain_v4_port;
+ sipOriginLog.setService_domain(newServiceDomain);
+ sipOriginLog.setService_domain_valid("1");
+ } else {
+ //外网ip
+ sipOriginLog.setService_domain_valid("0");
+ }
+ } else {
+ //url:端口号,暂时不做操作
+ }
+ } else {
+ //v6,暂时不做操作
+ }
+ } else {
+ boolean ipOrurl = isIp(sipOriginLog.getService_domain());
+ if (ipOrurl) {
+ String ipv4_serviceDomain = sipOriginLog.getService_domain();
+ boolean judgeInnerIP = isInnerIP(ipv4_serviceDomain);
+ if (judgeInnerIP) {
+ String newServiceDomain = sipOriginLog.getDST_IP();
+ sipOriginLog.setService_domain(newServiceDomain);
+ sipOriginLog.setService_domain_valid("1");
+ } else {
+ //外网ip
+ sipOriginLog.setService_domain_valid("0");
+ }
+ } else {
+ //url网址,暂不处理
+ }
+ }
+ }
+ } else if (sipOriginLog.getRequest_URI().contains(".") || sipOriginLog.getRequest_URI().contains(":")) {
+ String ServiceDomain = sipOriginLog.getRequest_URI();
+ sipOriginLog.setService_domain(ServiceDomain);
+
+ if (StringUtil.isNotBlank(sipOriginLog.getService_domain())
+ && sipOriginLog.getService_domain() != null
+ && sipOriginLog.getService_domain().length() != 0) {
+
+ if (sipOriginLog.getService_domain().contains(":")) {
+ if (sipOriginLog.getService_domain().split(":").length <= 2) {
+ if (isIp(sipOriginLog.getService_domain().split(":")[0])) {
+ String[] splitColon = sipOriginLog.getService_domain().split(":");
+ String service_domain_v4_ip = splitColon[0];
+ String service_domain_v4_port = splitColon[1];
+ boolean judgeInnerIP = isInnerIP(service_domain_v4_ip);
+ if (judgeInnerIP) {
+ String newServiceDomain = sipOriginLog.getDST_IP() + ":" + service_domain_v4_port;
+ sipOriginLog.setService_domain(newServiceDomain);
+ sipOriginLog.setService_domain_valid("1");
+ } else {
+ //外网ip
+ sipOriginLog.setService_domain_valid("0");
+ }
+ } else {
+ //url:端口号,暂时不做操作
+ }
+ } else {
+ //v6,暂时不做操作
+ }
+ } else {
+ boolean ipOrurl = isIp(sipOriginLog.getService_domain());
+ if (ipOrurl) {
+ String ipv4_serviceDomain = sipOriginLog.getService_domain();
+ boolean judgeInnerIP = isInnerIP(ipv4_serviceDomain);
+ if (judgeInnerIP) {
+ String newServiceDomain = sipOriginLog.getDST_IP();
+ sipOriginLog.setService_domain(newServiceDomain);
+ sipOriginLog.setService_domain_valid("1");
+ } else {
+ //外网ip
+ sipOriginLog.setService_domain_valid("0");
+ }
+ } else {
+ //url网址,暂不处理
+ }
+ }
+ }
+ } else if (!(sipOriginLog.getRequest_URI().contains(".")) && !(sipOriginLog.getRequest_URI().contains(":"))) {
+ //即是一个字符串或者数字串,没有具体价值,不做处理
+ } else {
+ if (RealtimeCountConfig.PART_LOG_OUTPUT_CONTROLLER.equals("yes")) {
+ logger.error("GetSipOriBoltDC--> " + RealtimeCountConfig.KAFKA_SIP_ORIGIN_TOPIC + " Request_URI no @ and is not ip , Request_URI is ---> " + sipOriginLog.getRequest_URI() + " <---");
+ }
+ }
+ } else {
+ //此处包含sip:的情况已经在前端代码中去除,故这里不做处理
+ }
+ }
+
+ //处理Res_stat_format
+ if (StringUtil.isNotBlank(sipOriginLog.getRes_stat())
+ && sipOriginLog.getRes_stat() != null
+ && sipOriginLog.getRes_stat().length() != 0) {
+
+ String replaceBlank = sipOriginLog.getRes_stat().toLowerCase().trim().replace(" ", "");//去除空格
+ //修整Res_stat_format字段异常格式
+ replaceBlank = repairResStatFormatStr(replaceBlank);
+ sipOriginLog.setRes_stat_format(replaceBlank);
+ }
+
+
+ //处理From
+ if (StringUtil.isNotBlank(sipOriginLog.getFrom())
+ && sipOriginLog.getFrom() != null
+ && sipOriginLog.getFrom().length() != 0) {
+ String useFromStr = sipOriginLog.getFrom().replace(" ", "").replace(",", "");
+
+ if (sipOriginLog.getFrom().contains("\"")) {
+ String nick_name = getNickname(useFromStr, "from");
+ sipOriginLog.setFrom_Nickname(nick_name);
+ } else {
+ //From字段不包含"",此时获取不到用户昵称,但能获取用户名,服务域名,下方统一获取
+ }
+ String user_name = getUserName(useFromStr, "from");
+ sipOriginLog.setFrom_usr_name(user_name);
+ String serDomainAndValid = getSerDomain(useFromStr, sipOriginLog, "from");
+ if (serDomainAndValid.contains("##%##")) {
+ String[] splitSpliter = serDomainAndValid.split("##%##");
+ String serDomainOrNull = splitSpliter[0];
+ String validStr = splitSpliter[1];
+ if ((!(serDomainOrNull.equals("null")))) {
+ sipOriginLog.setFrom_ser_domain(serDomainOrNull);
+ } else {
+ sipOriginLog.setFrom_ser_domain(null);
+ }
+ sipOriginLog.setFrom_ser_domain_valid(validStr);
+ }
+ }
+
+ //处理To
+ if (StringUtil.isNotBlank(sipOriginLog.getTo())
+ && sipOriginLog.getTo() != null
+ && sipOriginLog.getTo().length() != 0) {
+ String useToStr = sipOriginLog.getTo().replace(" ", "").replace(",", "");
+
+ if (sipOriginLog.getTo().contains("\"")) {
+ String nick_name = getNickname(useToStr, "to");
+ sipOriginLog.setTo_Nickname(nick_name);
+ } else {
+ //To字段不包含"",此时获取不到用户昵称,但能获取用户名,服务域名,下方统一获取
+ }
+ String user_name = getUserName(useToStr, "to");
+ sipOriginLog.setTo_usr_name(user_name);
+ String serDomainAndValid = getSerDomain(useToStr, sipOriginLog, "to");
+ if (serDomainAndValid.contains("##%##")) {
+ String[] splitSpliter = serDomainAndValid.split("##%##");
+ String serDomainOrNull = splitSpliter[0];
+ String validStr = splitSpliter[1];
+ if ((!(serDomainOrNull.equals("null")))) {
+ //补丁,针对性修改To_ser_domain格式
+ serDomainOrNull = pointSerDomainPatch(serDomainOrNull);
+ //设置To_ser_domain
+ sipOriginLog.setTo_ser_domain(serDomainOrNull);
+ } else {
+ sipOriginLog.setTo_ser_domain(null);
+ }
+ sipOriginLog.setTo_ser_domain_valid(validStr);
+ }
+ }
+
+ //获取Cseq_method
+ if (StringUtil.isNotBlank(sipOriginLog.getCseq())
+ && sipOriginLog.getCseq() != null
+ && sipOriginLog.getCseq().length() != 0) {
+ String[] splitCseq = sipOriginLog.getCseq().split(" ");
+ if (splitCseq.length == 2) {
+ sipOriginLog.setCseq_method(splitCseq[1]);
+ }
+ }
+
+ //处理Req_Contact
+ if (StringUtil.isNotBlank(sipOriginLog.getReq_Contact())
+ && sipOriginLog.getReq_Contact() != null
+ && sipOriginLog.getReq_Contact().length() != 0) {
+ String useReqConStr = sipOriginLog.getReq_Contact().replace(" ", "").replace(",", "");
+
+ if (sipOriginLog.getReq_Contact().contains("\"")) {
+ String nick_name = getNickname(useReqConStr, "req");
+ sipOriginLog.setReq_Contact_Nickname(nick_name);
+ } else {
+ //字段不包含"",此时获取不到用户昵称,但能获取用户名,服务域名,下方统一获取
+ }
+ String user_name = getUserName(useReqConStr, "req");
+ sipOriginLog.setReq_Contact_usr_name(user_name);
+ String serDomainAndValid = getSerDomain(useReqConStr, sipOriginLog, "req");
+ if (serDomainAndValid.contains("##%##")) {
+ String[] splitSpliter = serDomainAndValid.split("##%##");
+ String serDomainOrNull = splitSpliter[0];
+ String validStr = splitSpliter[1];
+ if ((!(serDomainOrNull.equals("null")))) {
+ sipOriginLog.setReq_Contact_ser_domain(serDomainOrNull);
+ } else {
+ sipOriginLog.setReq_Contact_ser_domain(null);
+ }
+ sipOriginLog.setReq_ser_domain_valid(validStr);
+ }
+ }
+
+ //处理Res_Contact
+ if (StringUtil.isNotBlank(sipOriginLog.getRes_Contact())
+ && sipOriginLog.getRes_Contact() != null
+ && sipOriginLog.getRes_Contact().length() != 0) {
+ String useResConStr = sipOriginLog.getRes_Contact().replace(" ", "").replace(",", "");
+
+ if (sipOriginLog.getRes_Contact().contains("\"")) {
+ String nick_name = getNickname(useResConStr, "res");
+ sipOriginLog.setRes_Contact_Nickname(nick_name);
+ } else {
+ //字段不包含"",此时获取不到用户昵称,但能获取用户名,服务域名,下方统一获取
+ }
+ String user_name = getUserName(useResConStr, "res");
+ sipOriginLog.setRes_Contact_usr_name(user_name);
+ String serDomainAndValid = getSerDomain(useResConStr, sipOriginLog, "res");
+ if (serDomainAndValid.contains("##%##")) {
+ String[] splitSpliter = serDomainAndValid.split("##%##");
+ String serDomainOrNull = splitSpliter[0];
+ String validStr = splitSpliter[1];
+ if ((!(serDomainOrNull.equals("null")))) {
+ sipOriginLog.setRes_Contact_ser_domain(serDomainOrNull);
+ } else {
+ sipOriginLog.setRes_Contact_ser_domain(null);
+ }
+ sipOriginLog.setRes_ser_domain_valid(validStr);
+ }
+ }
+
+ //根据Req_Content_Type设置Req_coding(主叫编码方式)
+ if (StringUtil.isNotBlank(sipOriginLog.getReq_Content_Type())
+ && sipOriginLog.getReq_Content_Type() != null
+ && sipOriginLog.getReq_Content_Type().length() != 0) {
+ if (sipOriginLog.getReq_Content_Type().equals("application/sdp")) {
+ if (StringUtil.isNotBlank(sipOriginLog.getReq_Content())
+ && sipOriginLog.getReq_Content() != null
+ && sipOriginLog.getReq_Content().length() != 0) {
+ String codingS = getCodingS(sipOriginLog.getReq_Content());
+ if (StringUtil.isNotBlank(codingS)) {
+ sipOriginLog.setReq_coding(codingS);
+ }
+ }
+ }
+ }
+
+ //根据Res_Content_Type设置Res_coding(被叫编码方式)
+ if (StringUtil.isNotBlank(sipOriginLog.getRes_Content_Type())
+ && sipOriginLog.getRes_Content_Type() != null
+ && sipOriginLog.getRes_Content_Type().length() != 0) {
+ if (sipOriginLog.getRes_Content_Type().equals("application/sdp")) {
+ if (StringUtil.isNotBlank(sipOriginLog.getRes_Content())
+ && sipOriginLog.getRes_Content() != null
+ && sipOriginLog.getRes_Content().length() != 0) {
+ String codingS = getCodingS(sipOriginLog.getRes_Content());
+ if (StringUtil.isNotBlank(codingS)) {
+ sipOriginLog.setRes_coding(codingS);
+ }
+ }
+ }
+ }
+
+ collector.emit(new Values(JSONObject.toJSONString(sipOriginLog), "origin"));
+ }
+
+ //填充voip路由关系表并发送
+ private void completeRouteRelationLogAndemit(SipOriginALL sipOriginLog, String message, BasicOutputCollector collector) {
+ RouteRelationLog routeRelationLog = new RouteRelationLog();
+ LinkedList<String> emitList = new LinkedList<String>();
+ String src_ip = sipOriginLog.getSRC_IP();
+ String dst_ip = sipOriginLog.getDST_IP();
+
+ if (StringUtil.isNotBlank(sipOriginLog.getMethod())
+ && sipOriginLog.getMethod() != null
+ && sipOriginLog.getMethod().length() != 0) {
+ //请求侧req,顺序 sip->last->….->first->dip
+ if (sipOriginLog.getReq_Route() != null && sipOriginLog.getReq_Route().length != 0) {
+ LinkedList<String> reqRouteList = getRoute(sipOriginLog.getReq_Route());
+ emitList.add(src_ip);
+ if (reqRouteList != null) {
+ if (reqRouteList.size() > 0) {
+ for (int i = reqRouteList.size() - 1; i >= 0; i--) {
+ emitList.add(reqRouteList.get(i));
+ }
+ }
+ }
+ emitList.add(dst_ip);
+ } else if (sipOriginLog.getReq_Via() != null && sipOriginLog.getReq_Via().length != 0) {
+ LinkedList<String> reqViaList = getVia(sipOriginLog.getReq_Via());
+ emitList.add(src_ip);
+ if (reqViaList != null) {
+ if (reqViaList.size() > 0) {
+ for (int i = reqViaList.size() - 1; i >= 0; i--) {
+ emitList.add(reqViaList.get(i));
+ }
+ }
+ }
+ emitList.add(dst_ip);
+ } else if (sipOriginLog.getReq_Record_Route() != null && sipOriginLog.getReq_Record_Route().length != 0) {
+ LinkedList<String> reqRecordRouteList = getRecordRoute(sipOriginLog.getReq_Record_Route());
+ emitList.add(src_ip);
+ if (reqRecordRouteList != null) {
+ if (reqRecordRouteList.size() > 0) {
+ for (int i = reqRecordRouteList.size() - 1; i >= 0; i--) {
+ emitList.add(reqRecordRouteList.get(i));
+ }
+ }
+ }
+ emitList.add(dst_ip);
+ } else {
+ //三个字段都没有则不做处理
+ }
+ } else {
+ //响应侧res,顺序 dip->first->….->last->sip
+ if (sipOriginLog.getRes_Route() != null && sipOriginLog.getRes_Route().length != 0) {
+ LinkedList<String> resRouteList = getRoute(sipOriginLog.getRes_Route());
+ emitList.add(dst_ip);
+ if (resRouteList != null) {
+ if (resRouteList.size() > 0) {
+ for (int i = 0; i < resRouteList.size(); i++) {
+ emitList.add(resRouteList.get(i));
+ }
+ }
+ }
+ emitList.add(src_ip);
+ } else if (sipOriginLog.getRes_Via() != null && sipOriginLog.getRes_Via().length != 0) {
+ LinkedList<String> resViaList = getVia(sipOriginLog.getRes_Via());
+ emitList.add(dst_ip);
+ if (resViaList != null) {
+ if (resViaList.size() > 0) {
+ for (int i = 0; i < resViaList.size(); i++) {
+ emitList.add(resViaList.get(i));
+ }
+ }
+ }
+ emitList.add(src_ip);
+ } else if (sipOriginLog.getRes_Record_Route() != null && sipOriginLog.getRes_Record_Route().length != 0) {
+ LinkedList<String> resRecordRouteList = getRecordRoute(sipOriginLog.getRes_Record_Route());
+ emitList.add(dst_ip);
+ if (resRecordRouteList != null) {
+ if (resRecordRouteList.size() > 0) {
+ for (int i = 0; i < resRecordRouteList.size(); i++) {
+ emitList.add(resRecordRouteList.get(i));
+ }
+ }
+ }
+ emitList.add(src_ip);
+ } else {
+ //三个字段都没有则不做处理
+ }
+ }
+ if (emitList.size() != 0) {
+ for (int i = 0; i < emitList.size(); i++) {
+ if (i != emitList.size() - 1) {
+ routeRelationLog.setFrom_domain(emitList.get(i));
+ routeRelationLog.setTo_domain(emitList.get(i + 1));
+ collector.emit(new Values(JSONObject.toJSONString(routeRelationLog), "route"));
+ }
+ }
+ }
+ }
+
+ private LinkedList<String> getRecordRoute(String[] record_route) {
+ LinkedList<String> recordRouteList = new LinkedList<>();
+ try {
+ for (int i = 0; i < record_route.length; i++) {
+ String str = record_route[i].replace("<", "").replace(">", "");
+ String splitSemi = str.split(";")[0];
+ if (splitSemi.split(":").length <= 3) {
+ String splitColon = splitSemi.split(":")[1];
+ recordRouteList.add(splitColon);
+ } else {
+ String splitSipColon = splitSemi.split("sip:")[1];
+ recordRouteList.add(splitSipColon);
+ }
+ }
+ } catch (Exception e) {
+ if (RealtimeCountConfig.PART_LOG_OUTPUT_CONTROLLER.equals("yes")) {
+ logger.error("GetSipOriBoltDC-->getRoute Split Route error ---> " + e + " <---");
+ logger.error("GetSipOriBoltDC-->getRoute Split Route data is ---> " + Arrays.toString(record_route) + " <---");
+ }
+ }
+ if (recordRouteList.size() != 0) {
+ return recordRouteList;
+ } else {
+ return null;
+ }
+ }
+
+ private LinkedList<String> getVia(String[] via) {
+ LinkedList<String> viaList = new LinkedList<>();
+ try {
+ for (int i = 0; i < via.length; i++) {
+ if (via[i].contains(" ")) {
+ String originDomain = via[i].split(";")[0].split(" ")[1];
+ if (originDomain.contains(":")) {
+ if (originDomain.split(":").length <= 2) {
+ viaList.add(originDomain.split(":")[0]);
+ } else {
+ viaList.add(originDomain);
+ }
+ } else {
+ viaList.add(originDomain);
+ }
+ }
+ }
+ } catch (Exception e) {
+ if (RealtimeCountConfig.PART_LOG_OUTPUT_CONTROLLER.equals("yes")) {
+ logger.error("GetSipOriBoltDC-->getVia Split Via error ---> " + e + " <---");
+ logger.error("GetSipOriBoltDC-->getVia Split Via data is ---> " + Arrays.toString(via) + " <---");
+ }
+ }
+ if (viaList.size() != 0) {
+ return viaList;
+ } else {
+ return null;
+ }
+ }
+
+ private LinkedList<String> getRoute(String[] route) {
+ LinkedList<String> routeList = new LinkedList<>();
+ try {
+ for (int i = 0; i < route.length; i++) {
+ String str = route[i].replace("<", "").replace(">", "");
+ String splitSemi = str.split(";")[0];
+ if (splitSemi.split(":").length <= 3) {
+ String splitColon = splitSemi.split(":")[1];
+ if (splitColon.contains("@") && splitColon.split("@")[1].contains(".")) {//例如"Req_Route": ["<sip:1.228.34.22;lr>", "<sip:114.207.73.139:5060;lr>", "<sip:[email protected]:5067;lr>"]这种形式
+ splitColon = splitColon.split("@")[1];
+ }
+ routeList.add(splitColon);
+ } else {
+ String splitSipColon = splitSemi.split("sip:")[1];
+ routeList.add(splitSipColon);
+ }
+ }
+ } catch (Exception e) {
+ if (RealtimeCountConfig.PART_LOG_OUTPUT_CONTROLLER.equals("yes")) {
+ logger.error("GetSipOriBoltDC-->getRoute Split Route error ---> " + e + " <---");
+ logger.error("GetSipOriBoltDC-->getRoute Split Route data is ---> " + Arrays.toString(route) + " <---");
+ }
+ }
+ if (routeList.size() != 0) {
+ return routeList;
+ } else {
+ return null;
+ }
+ }
+
+ //获取Nickname
+ private String getNickname(String useStr, String type) {
+ String nick_name = useStr.split("\"")[1];
+ return nick_name;
+ }
+
+ //获取UserName
+ private String getUserName(String useStr, String type) {
+ if (useStr.equals("None") || useStr.equals("*")) {
+ return null;
+ }
+ if (useStr.contains("@") && (useStr.contains("sip:") || useStr.contains("sips:"))) {
+ String userName = useStr.replace("sips:", "sip:").split("sip:")[1].split("@")[0];
+ return userName;
+ } else if (!(useStr.contains("@")) && useStr.contains("sip:")) {
+ try {
+ if (useStr.split(":").length <= 3) {
+ String userName = useStr.split(":")[1];
+ return userName;
+ } else {
+ if (useStr.contains(";")) {
+ String userName = useStr.split(";")[0].split(":")[1];
+ return userName;
+ } else {
+ String userName = useStr.replace("<", "")
+ .replace(">", "")
+ .split("sip:")[1];
+ return userName;
+ }
+ }
+ } catch (Exception e) {
+ if (RealtimeCountConfig.PART_LOG_OUTPUT_CONTROLLER.equals("yes")) {
+ logger.error("|~|~|GetSipOriBoltDC===getUserName can not split this data but it has sip: --->" + useStr + "<--- the type is --->" + type + "<---|~|~|");
+ }
+ return null;
+ }
+ } else if (useStr.contains("tel:")) {
+ String userName = useStr.split(">")[0].split("tel:")[1];
+ return userName;
+ } else if ((!(useStr.contains("."))) && (!(useStr.contains(":")))) {
+ return null;
+ } else {
+ if (RealtimeCountConfig.PART_LOG_OUTPUT_CONTROLLER.equals("yes")) {
+ logger.error("|~|~|GetSipOriBoltDC===getUserName can not split this data --->" + useStr + "<--- the type is --->" + type + "<---|~|~|");
+ }
+ return null;
+ }
+ }
+
+ //获取ser_domain
+ private String getSerDomain(String useStr, SipOriginALL sipOriginLog, String type) {
+ if (useStr.equals("None") || useStr.equals("*")) {
+ return "null##%##0";
+ }
+ String serDomain;
+ if (useStr.contains("<") && useStr.contains(">")) {
+ if (useStr.contains("@")) {
+// serDomain = useStr.split("@")[1].split(">")[0];//旧版,由于"From":"\"4903\" <sip:4903@>"以及"To":"\"1441519470648\" <sip:1441519470648@>"报索引越界异常
+ String splitFirst = useStr.split("@")[1];
+ if (!(">".equals(splitFirst))) {
+ serDomain = splitFirst.split(">")[0];
+ } else {
+ serDomain = null;
+ }
+ } else if (useStr.contains(".")) {
+ if (useStr.split(":").length == 3) {
+ String[] split = useStr.split(">")[0].split(":");
+ serDomain = split[1] + ":" + split[2];
+ } else {
+ serDomain = useStr.split(">")[0].split(":")[1];
+ }
+ } else if (useStr.contains("sip:") && useStr.split(":").length > 3) {
+ serDomain = useStr.split("sip:")[1].split(">")[0];
+ } else if (useStr.contains("tel:") && useStr.split(":").length >= 2) {
+ serDomain = useStr.split(">")[0].split("tel:")[1];
+ } else {
+ serDomain = useStr.split(">")[0].split(":")[1];
+ }
+ } else {
+ if (useStr.contains("@")) {
+ serDomain = useStr.split("@")[1];
+ } else if (useStr.contains(".")) {
+ if (useStr.split(":").length == 3) {
+ String[] split = useStr.split(":");
+ serDomain = split[1] + ":" + split[2];
+ } else {
+ serDomain = useStr.split(":")[1];
+ }
+ } else if (useStr.contains("sip:")) {
+ serDomain = useStr.split("sip:")[1];
+ } else if (useStr.contains("tel:")) {
+ if (useStr.contains(";")) {
+ serDomain = useStr.split(";")[0].split("tel:")[1];
+ } else {
+ serDomain = useStr.split("tel:")[1];
+ }
+ } else if ((!(useStr.contains(":"))) && (!(useStr.contains(".")))) {
+ serDomain = null;
+ } else {
+ if (useStr.contains(":")) {
+ serDomain = useStr.split(":")[1];
+ } else {
+ serDomain = null;
+ }
+ }
+ }
+
+ if (serDomain == null || serDomain.length() == 0) {
+ return "null##%##0";
+ }
+ if (serDomain.contains(":")) {
+ if (serDomain.split(":").length <= 2) {
+ String[] splitColon = serDomain.split(":");
+ String ipOrUrl = splitColon[0];
+ String port = splitColon[1];
+ if (isIp(ipOrUrl)) {
+ if (isInnerIP(ipOrUrl)) {
+ if (StringUtil.isNotBlank(sipOriginLog.getMethod())
+ && sipOriginLog.getMethod() != null
+ && sipOriginLog.getMethod().length() != 0) {
+ if (type.equals("from") || type.equals("req")) {
+ String newServiceDomain = sipOriginLog.getSRC_IP() + ":" + port;
+ return newServiceDomain + "##%##1";
+ } else {
+ String newServiceDomain = sipOriginLog.getDST_IP() + ":" + port;
+ return newServiceDomain + "##%##1";
+ }
+ } else {
+ if (type.equals("from") || type.equals("req")) {
+ String newServiceDomain = sipOriginLog.getDST_IP() + ":" + port;
+ return newServiceDomain + "##%##1";
+ } else {
+ String newServiceDomain = sipOriginLog.getSRC_IP() + ":" + port;
+ return newServiceDomain + "##%##1";
+ }
+ }
+ } else {
+ //外网ip
+ return serDomain + "##%##0";
+ }
+ } else {
+ //网址
+ return serDomain + "##%##0";
+ }
+ } else {
+ //v6
+ return serDomain + "##%##0";
+ }
+ } else {
+ boolean ipOrurl = isIp(serDomain);
+ if (ipOrurl) {
+ if (isInnerIP(serDomain)) {
+ if (StringUtil.isNotBlank(sipOriginLog.getMethod())
+ && sipOriginLog.getMethod() != null
+ && sipOriginLog.getMethod().length() != 0) {
+ if (type.equals("from") || type.equals("req")) {
+ String newServiceDomain = sipOriginLog.getSRC_IP();
+ return newServiceDomain + "##%##1";
+ } else {
+ String newServiceDomain = sipOriginLog.getDST_IP();
+ return newServiceDomain + "##%##1";
+ }
+ } else {
+ if (type.equals("from") || type.equals("req")) {
+ String newServiceDomain = sipOriginLog.getDST_IP();
+ return newServiceDomain + "##%##1";
+ } else {
+ String newServiceDomain = sipOriginLog.getSRC_IP();
+ return newServiceDomain + "##%##1";
+ }
+ }
+ } else {
+ return serDomain + "##%##0";
+ }
+ } else {
+ return serDomain + "##%##0";
+ }
+ }
+ }
+
+
+ //通用方法,传入url,返回domain
+ private String getDomainFromUrl(String oriUrl) {
+ String url = oriUrl.split("[?]")[0];
+ if (url.contains("http://") || url.contains("https://")) {
+ if (url.split("//")[1].split("/")[0].split(":").length <= 2) {
+ String v4Domain = url.split("//")[1]
+ .split("/")[0]
+ .split(":")[0];
+ return v4Domain;
+ } else {
+ String v6Domain = url.split("//")[1]
+ .split("/")[0];
+ return v6Domain;
+ }
+ } else {
+ if (url.split("/")[0].split(":").length <= 2) {
+ String v4Domain = url.split("/")[0].split(":")[0];
+ return v4Domain;
+ } else {
+ String v6Domain = url.split("/")[0];
+ return v6Domain;
+ }
+ }
+ }
+
+ //通用方法,传入url,返回file_path
+ private String getFilePathFromUrl(String oriUrl) {
+ String url = oriUrl.split("[?]")[0];
+ if (url.contains("http://") || url.contains("https://")) {
+ String filePath = url.split("//")[1];
+ return filePath;
+ } else {
+ String filePath = url;
+ return filePath;
+ }
+ }
+
+ //判断是否为内网ip
+ private boolean isInnerIP(String ipAddress) {
+ if (ipAddress.equals("1.1.1.1") || ipAddress.equals("127.0.0.1") || ipAddress.equals("0.0.0.0")) {
+ return true;
+ }
+ boolean isInnerIp = false;
+ long ipNum = getIpNum(ipAddress);
+
+ long aBegin = getIpNum("10.0.0.0");
+ long aEnd = getIpNum("10.255.255.255");
+ long bBegin = getIpNum("172.16.0.0");
+ long bEnd = getIpNum("172.31.255.255");
+ long cBegin = getIpNum("192.168.0.0");
+ long cEnd = getIpNum("192.168.255.255");
+ isInnerIp = isInner(ipNum, aBegin, aEnd) || isInner(ipNum, bBegin, bEnd) || isInner(ipNum, cBegin, cEnd);
+ return isInnerIp;
+ }
+
+ private long getIpNum(String ipAddress) {
+ String[] ip = ipAddress.split("\\.");
+ long a = Integer.parseInt(ip[0]);
+ long b = Integer.parseInt(ip[1]);
+ long c = Integer.parseInt(ip[2]);
+ long d = Integer.parseInt(ip[3]);
+
+ long ipNum = a * 256 * 256 * 256 + b * 256 * 256 + c * 256 + d;
+ return ipNum;
+ }
+
+ private boolean isInner(long userIp, long begin, long end) {
+ return (userIp >= begin) && (userIp <= end);
+ }
+
+ //判断是否是一个IP
+ private boolean isIp(String IP) {
+ IP = this.removeBlank(IP);
+ if (IP.length() < 7 || IP.length() > 15) {
+ return false;
+ }
+ if (IP.contains(".")) {
+ String[] arr = IP.split("\\.");
+ if (arr.length != 4) {
+ return false;
+ }
+ for (int i = 0; i < 4; i++) {
+ for (int j = 0; j < arr[i].length(); j++) {
+ char temp = arr[i].charAt(j);
+ if (!(temp >= '0' && temp <= '9')) {
+ return false;
+ }
+ }
+ }
+ for (int i = 0; i < 4; i++) {
+ int temp = Integer.parseInt(arr[i]);
+ if (temp < 0 || temp > 255) {
+ return false;
+ }
+ }
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ private String removeBlank(String IP) {
+ while (IP.startsWith(" ")) {
+ IP = IP.substring(1, IP.length()).trim();
+ }
+ while (IP.endsWith(" ")) {
+ IP = IP.substring(0, IP.length() - 1).trim();
+ }
+ return IP;
+ }
+
+ //获取编码
+ private String getCodingS(String contentStr) {
+ List<String> list = new ArrayList<String>();
+ Pattern pattern = Pattern.compile("a=rtpmap:(.*?)\\r\\n");
+ Matcher m = pattern.matcher(contentStr);
+ while (m.find()) {
+ list.add(m.group(1).replace(" ", "").toLowerCase());
+ }
+ if (list.size() >= 1) {
+ String[] strArray = list.toArray(new String[list.size()]);
+ return Arrays.toString(strArray);
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * 针对性修改ser_domain返回值的格式问题,此处主要是处理To_ser_domain字段
+ *
+ * @param serDomainOrNull
+ * @return
+ */
+ private String pointSerDomainPatch(String serDomainOrNull) {
+ if (serDomainOrNull.contains(";")) {
+ serDomainOrNull = serDomainOrNull.split(";")[0];
+ }
+ if (serDomainOrNull.contains("<sip:")) {
+ serDomainOrNull = serDomainOrNull.split("<sip:")[0].replace("\"", "");
+ }
+
+ if (serDomainOrNull.contains("\r") || serDomainOrNull.contains("\n")) {
+ serDomainOrNull = serDomainOrNull.split("[\\r\\n]")[0];
+ }
+ if (serDomainOrNull.contains("\\r") || serDomainOrNull.contains("\\n")) {
+ serDomainOrNull = serDomainOrNull.split("[\\\\r\\\\n]")[0];
+ }
+
+ return serDomainOrNull.replace("+", "")
+ .replace("[", "")
+ .replace("]", "")
+ .replace(" ", "")
+ ;
+ }
+
+ /**
+ * 修整 Res_stat_format 字段格式
+ *
+ * @param replaceBlank
+ * @return
+ */
+ private String repairResStatFormatStr(String replaceBlank) {
+ if (replaceBlank.contains(",")) {
+ replaceBlank = replaceBlank.split(",")[0];
+ } else if (replaceBlank.contains("'")) {
+ replaceBlank = replaceBlank.split("'")[0];
+ if (replaceBlank.contains("-")) {
+ if (replaceBlank.indexOf("-") == (replaceBlank.length() - 1)) {
+ replaceBlank = replaceBlank.replace("-", "");
+ }
+ }
+ } else if (replaceBlank.contains("(")) {
+ replaceBlank = replaceBlank.split("[(]")[0];
+ } else if (replaceBlank.contains("...")) {
+ replaceBlank = replaceBlank.split("\\.\\.\\.")[0];
+ } else if (replaceBlank.contains(".")) {
+ replaceBlank = replaceBlank.split("[.]")[0];
+ } else if (replaceBlank.contains("/")) {
+ replaceBlank = replaceBlank.split("/")[0];
+ } else if (replaceBlank.contains("[")) {
+ replaceBlank = replaceBlank.split("\\[")[0];
+ } else if (replaceBlank.contains("-")) {
+ replaceBlank = replaceBlank.split("-")[0];
+ } else if (replaceBlank.contains(":")) {
+ replaceBlank = replaceBlank.split(":")[0];
+ } else if (replaceBlank.contains(";")) {
+ replaceBlank = replaceBlank.split(";")[0];
+ } else if (replaceBlank.contains("!")) {
+ replaceBlank = replaceBlank.split("[!]")[0];
+ }
+
+ return replaceBlank.replace(" ", "")
+ .replace("-", "")
+ .replace(",", "")
+ .replace(".", "")
+ .replace("[", "")
+ .replace("]", "")
+ .replace("/", "")
+ .replace("!", "")
+ .replace(";", "")
+ .replace(":", "")
+ .replace("@", "")
+ ;
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ Map<String, Object> conf = new HashMap<String, Object>();
+ conf.put(org.apache.storm.Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS,
+ RealtimeCountConfig.TOPOLOGY_TICK_TUPLE_COMP_FREQ_SECS);
+ return conf;
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("jsonLog", "logType"));
+ }
+} \ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/bolt/SipInsertBoltDC.java b/src/main/java/cn/ac/iie/bolt/SipInsertBoltDC.java
new file mode 100644
index 0000000..d09243c
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bolt/SipInsertBoltDC.java
@@ -0,0 +1,136 @@
+package cn.ac.iie.bolt;
+
+import cn.ac.iie.common.RealtimeCountConfig;
+import cn.ac.iie.dao.KafkaDB;
+import cn.ac.iie.utils.HiveDao.HdfsDataLoad_Avro;
+import cn.ac.iie.utils.TupleUtils;
+import com.zdjizhi.utils.StringUtil;
+import org.apache.log4j.Logger;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseBasicBolt;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.Map;
+
+public class SipInsertBoltDC extends BaseBasicBolt {
+
+ private static final long serialVersionUID = -6795251425357896415L;
+ private static Logger logger = Logger.getLogger(SipInsertBoltDC.class);
+
+ private LinkedList<String> sipOriJsonList;//存放sip原始补全日志字符串
+ private LinkedList<String> routeRelationJsonList;//存放voip路由关系日志字符串
+
+ private HdfsDataLoad_Avro hdfsDataLoadAvro;
+
+ private Integer tickFreqSecs;
+
+ public SipInsertBoltDC(Integer tickFreqSecs) {
+ this.tickFreqSecs = tickFreqSecs;
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public void prepare(Map stormConf,
+ TopologyContext context) {
+ hdfsDataLoadAvro = HdfsDataLoad_Avro.getHdfsInstance();
+ sipOriJsonList = new LinkedList<String>();
+ routeRelationJsonList = new LinkedList<String>();
+ }
+
+ @Override
+ public void execute(Tuple tuple, BasicOutputCollector collector) {
+ try {
+ if (TupleUtils.isTick(tuple)) {
+ long time = System.currentTimeMillis() / 1000L;
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
+ String partition = sdf.format(new Date(time * 1000L)).replaceAll("-", "");//格式:20190722
+ if (!sipOriJsonList.isEmpty()) {
+ LinkedList<String> tmpListFreq = new LinkedList<String>();
+ tmpListFreq.addAll(sipOriJsonList);
+ sipOriJsonList.clear();
+ hdfsDataLoadAvro.dataSipToHdfsAvro(partition, tmpListFreq, RealtimeCountConfig.KAFKA_SIP_COMPLEMENT_TOPIC, "origin", time);
+ }
+
+ //定时写入voip路由关系日志---20190807废弃,路由关系转离线spark清洗获取
+// if (!routeRelationJsonList.isEmpty()) {
+//// Map<String, Long> tmpMap = new HashMap<String, Long>();
+// LinkedList<String> tmpFragListFreq = new LinkedList<String>();
+// tmpFragListFreq.addAll(routeRelationJsonList);
+// routeRelationJsonList.clear();
+// kafkaDB.routeRelatLog2KafkaFromSipInsertBoltDC(tmpFragListFreq);
+//// dcl.dfPzFlowBatchStorage(tmpMap);//正式用,直接入中心http,已验证可用
+//// dbl.dfPzFlowBatchStorage2CH(tmpMap);//测试入clickhouse20190220
+//// dbl.dfPzFlowBatchStorage2CH(tmpFragListFreq);//测试入clickhouse20190220
+// }
+ } else {
+ String jsonLog = tuple.getString(0);
+ String logType = tuple.getString(1);
+ switch (logType) {
+ case "origin":
+ if (StringUtil.isNotBlank(jsonLog)) {
+ sipOriJsonList.add(jsonLog);
+ collector.emit(new Values(jsonLog));
+ }
+ break;
+// case "route"://存放路由关系数据---20190807废弃,路由关系转离线spark清洗获取
+// if (StringUtil.isNotBlank(jsonLog)) {
+// routeRelationJsonList.add(jsonLog);
+// }
+// break;
+ default:
+ logger.error("SipInsertBoltDC logType is error !!!This logType is--->{" + logType + "}<---");
+ break;
+ }
+ long time = System.currentTimeMillis() / 1000L;
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
+ String partition = sdf.format(new Date(time * 1000L)).replaceAll("-", "");
+ if (sipOriJsonList.size() >= RealtimeCountConfig.BATCH_KAFKA_INSERT_NUM) {
+ LinkedList<String> tmpList = new LinkedList<String>();
+ tmpList.addAll(sipOriJsonList);
+ sipOriJsonList.clear();
+ hdfsDataLoadAvro.dataSipToHdfsAvro(partition, tmpList, RealtimeCountConfig.KAFKA_SIP_COMPLEMENT_TOPIC, "origin", time);
+ }
+ //写入voip路由关系日志---20190807废弃,路由关系转离线spark清洗获取
+// if (routeRelationJsonList.size() >= RealtimeCountConfig.BATCH_KAFKA_INSERT_NUM) {
+// LinkedList<String> tmpRouteList = new LinkedList<String>();
+// tmpRouteList.addAll(routeRelationJsonList);
+// routeRelationJsonList.clear();
+//// dbl.dfPzFlowBatchStorage2CH(tmpRouteList);//测试入clickhouse20190220
+// kafkaDB.routeRelatLog2KafkaFromSipInsertBoltDC(tmpRouteList);
+// }
+
+ }
+ } catch (Exception e) {
+ logger.error("SipInsertBoltDC to insert is error !!!--->{" + e + "}<---");
+ e.printStackTrace();
+ }
+ }
+
+ private void logCount(String key, Map<String, Long> hm) {
+ if (hm.containsKey(key)) {
+ hm.put(key, hm.get(key) + 1);
+ } else {
+ hm.put(key, 1l);
+ }
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ Map<String, Object> conf = new HashMap<String, Object>();
+ conf.put(org.apache.storm.Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, tickFreqSecs);
+ return conf;
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("countJsonLog"));
+ }
+} \ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/bolt/SipRealTimeCountBoltDC.java b/src/main/java/cn/ac/iie/bolt/SipRealTimeCountBoltDC.java
new file mode 100644
index 0000000..00a4f95
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bolt/SipRealTimeCountBoltDC.java
@@ -0,0 +1,182 @@
+package cn.ac.iie.bolt;
+
+import cn.ac.iie.bean.voipSipOrigin.SipOriginALL;
+import cn.ac.iie.utils.TupleUtils;
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.JSONObject;
+import org.apache.log4j.Logger;
+import org.apache.storm.Config;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseBasicBolt;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+
+import java.text.SimpleDateFormat;
+import java.util.HashMap;
+import java.util.Map;
+
+
+public class SipRealTimeCountBoltDC extends BaseBasicBolt {
+
+ private static Logger logger = Logger.getLogger(SipRealTimeCountBoltDC.class);
+ private final Integer tickFreqSecs;
+
+ private Map<String, Long> codingCount = null;
+ private Map<String, Long> ipLocationCount = null;
+ private Map<String, Long> ipTypeCount = null;
+ private Map<String, Long> methodCount = null;
+ private Map<String, Long> resStatCount = null;
+ private Map<String, Long> serverCount = null;
+ private Map<String, Long> serviceDomainCount = null;
+ private Map<String, Long> uaCount = null;
+
+
+ public SipRealTimeCountBoltDC(Integer tickFreqSecs) {
+ this.tickFreqSecs = tickFreqSecs;
+ }
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context) {
+ codingCount = new HashMap<>();
+ ipLocationCount = new HashMap<>();
+ ipTypeCount = new HashMap<>();
+ methodCount = new HashMap<>();
+ resStatCount = new HashMap<>();
+ serverCount = new HashMap<>();
+ serviceDomainCount = new HashMap<>();
+ uaCount = new HashMap<>();
+
+ }
+
+ public void execute(Tuple tuple, BasicOutputCollector collector) {
+ if (TupleUtils.isTick(tuple)) {
+
+ Map<String, Long> tmpServiceMap = new HashMap<String, Long>(serviceDomainCount);
+ serviceDomainCount.clear();
+ sendCount("service", tmpServiceMap, collector);
+
+ Map<String, Long> tmpServerMap = new HashMap<String, Long>(serverCount);
+ serverCount.clear();
+ sendCount("server", tmpServerMap, collector);
+
+ Map<String, Long> tmpUaMap = new HashMap<String, Long>(uaCount);
+ uaCount.clear();
+ sendCount("ua", tmpUaMap, collector);
+
+ Map<String, Long> tmpLocationMap = new HashMap<String, Long>(ipLocationCount);
+ ipLocationCount.clear();
+ sendCount("location", tmpLocationMap, collector);
+
+ Map<String, Long> tmpTypeMap = new HashMap<String, Long>(ipTypeCount);
+ ipTypeCount.clear();
+ sendCount("type", tmpTypeMap, collector);
+
+ Map<String, Long> tmpMethodMap = new HashMap<String, Long>(methodCount);
+ methodCount.clear();
+ sendCount("method", tmpMethodMap, collector);
+
+ Map<String, Long> tmpResStatMap = new HashMap<String, Long>(resStatCount);
+ resStatCount.clear();
+ sendCount("resStat", tmpResStatMap, collector);
+
+ Map<String, Long> tmpCodingMap = new HashMap<String, Long>(codingCount);
+ codingCount.clear();
+ sendCount("coding", tmpCodingMap, collector);
+
+ } else {
+ try {
+
+ String jsonLog = tuple.getString(0);
+
+ SipOriginALL sipOriginLog = JSONObject.parseObject(jsonLog, SipOriginALL.class);
+
+ //预处理cseq
+ String cSeq = "NULL";
+ String rawCSeq = sipOriginLog.getCseq();
+ if(null != rawCSeq) {
+ String[] splitCSeq = rawCSeq.toUpperCase().split("[\\s]+");
+ if(splitCSeq.length > 1) {
+ cSeq = splitCSeq[1];
+ }
+ }
+
+ //提取所需的字段
+ String service = sipOriginLog.getTo_ser_domain();
+ String server = sipOriginLog.getServer();
+ String ua = sipOriginLog.getUser_Agent();
+ String srcCtyReg = sipOriginLog.getSRC_LOCATION_NATION() + "+" + sipOriginLog.getSRC_LOCATION_REGION() + "+" + sipOriginLog.getSRC_LOCATION_NATION_CODE();
+ String dstCtyReg = sipOriginLog.getDST_LOCATION_NATION() + "+" + sipOriginLog.getDST_LOCATION_REGION() + "+" + sipOriginLog.getDST_LOCATION_NATION_CODE();
+ String type = sipOriginLog.getIP_TYPE();
+ String method = sipOriginLog.getMethod();
+ String resStat = sipOriginLog.getRes_stat_format() + "+" + cSeq;
+ String reqCodings = sipOriginLog.getReq_coding();
+ String resCodings = sipOriginLog.getRes_coding();
+
+ //计数
+ logCount(service, serviceDomainCount);
+ logCount(server, serverCount);
+ logCount(ua, uaCount);
+ logCount(srcCtyReg, ipLocationCount);
+ logCount(dstCtyReg, ipLocationCount);
+ logCount(type, ipTypeCount);
+ logCount(method, methodCount);
+ logCount(resStat, resStatCount);
+ if(null != reqCodings) {
+ String[] reqSplit = reqCodings.split("[,\\[\\]]");
+ for(int i = 1; i < reqSplit.length; i++) {
+ logCount(reqSplit[i].trim(), codingCount);
+ }
+ } else {
+ logCount("NULL", codingCount);
+ }
+ if(null != resCodings) {
+ String[] resSplit = resCodings.split("[,\\[\\]]");
+ for(int j = 1; j < resSplit.length; j++) {
+ logCount(resSplit[j].trim(), codingCount);
+ }
+ } else {
+ logCount("NULL", codingCount);
+ }
+ } catch (Exception e) {
+ logger.error("SipRealTimeCountBoltDC error !!!--->{" + e + "}<---");
+ logger.error("SipRealTimeCountBoltDC data is !!!--->{" + tuple.getString(0) + "}<---");
+ e.printStackTrace();
+ }
+ }
+
+ }
+
+ private void logCount(String key, Map<String, Long> hm) {
+ if (hm.containsKey(key)) {
+ hm.put(key, hm.get(key) + 1);
+ } else {
+ hm.put(key, 1l);
+ }
+ }
+
+ private void sendCount(String countType, Map<String, Long> hm, BasicOutputCollector collector) {
+
+ long time = System.currentTimeMillis();
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ String currentTime = sdf.format(time);
+
+ String jsonString = JSON.toJSONString(hm);
+ collector.emit(new Values(countType, jsonString, currentTime));
+
+ }
+
+
+ public Map<String, Object> getComponentConfiguration() {
+ Map<String, Object> conf = new HashMap<>();
+ conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, tickFreqSecs);
+ return conf;
+ }
+
+ public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
+ outputFieldsDeclarer.declare(new Fields("countType", "jsonCount", "currentTime"));
+
+ }
+} \ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/bolt/SipRealTimeMergeBoltDC.java b/src/main/java/cn/ac/iie/bolt/SipRealTimeMergeBoltDC.java
new file mode 100644
index 0000000..fcf9af9
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bolt/SipRealTimeMergeBoltDC.java
@@ -0,0 +1,604 @@
+package cn.ac.iie.bolt;
+
+import cn.ac.iie.bean.voipSipCount.*;
+import cn.ac.iie.dao.DbConnect;
+import cn.ac.iie.utils.TupleUtils;
+import com.alibaba.fastjson.JSONObject;
+import org.apache.log4j.Logger;
+import org.apache.storm.Config;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseBasicBolt;
+import org.apache.storm.tuple.Tuple;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.text.SimpleDateFormat;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+public class SipRealTimeMergeBoltDC extends BaseBasicBolt {
+
+ private static Logger logger = Logger.getLogger(SipRealTimeMergeBoltDC.class);
+
+ private static DbConnect manager = DbConnect.getInstance();
+ private Connection connection = null;
+ private PreparedStatement pstmt = null;
+
+ private final Integer tickFreqSecs;
+
+ private Map<String, VoipServiceDomain> serviceDomainCountResult;
+ private Map<String, VoipServer> serverCountResult;
+ private Map<String, VoipUa> uaCountResult;
+ private Map<String, VoipIpLocation> ipLocationCountResult;
+ private Map<String, VoipIpType> ipTypeCountResult;
+ private Map<String, VoipMethod> methodCountResult;
+ private Map<String, VoipResStat> resStatCountResult;
+ private Map<String, VoipCoding> codingCountResult;
+
+
+ public SipRealTimeMergeBoltDC(Integer tickFreqSecs) {
+ this.tickFreqSecs = tickFreqSecs;
+ }
+
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context) {
+
+ serviceDomainCountResult = new HashMap<>();
+ serverCountResult = new HashMap<>();
+ uaCountResult = new HashMap<>();
+ ipLocationCountResult = new HashMap<>();
+ ipTypeCountResult = new HashMap<>();
+ methodCountResult = new HashMap<>();
+ resStatCountResult = new HashMap<>();
+ codingCountResult = new HashMap<>();
+
+ }
+
+ @Override
+ public void execute(Tuple input, BasicOutputCollector collector) {
+
+
+ if (TupleUtils.isTick(input)) {
+
+ long time = System.currentTimeMillis();
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ String currentTime = sdf.format(time);
+
+ if (!serviceDomainCountResult.isEmpty()) {
+ Map<String, VoipServiceDomain> tmpMap = new HashMap<String, VoipServiceDomain>(serviceDomainCountResult);
+ serviceDomainCountResult.clear();
+ batchInsertServiceDomain(tmpMap, currentTime);
+ }
+ if (!serverCountResult.isEmpty()) {
+ Map<String, VoipServer> tmpMap = new HashMap<String, VoipServer>(serverCountResult);
+ serverCountResult.clear();
+ batchInsertServer(tmpMap, currentTime);
+ }
+ if (!uaCountResult.isEmpty()) {
+ Map<String, VoipUa> tmpMap = new HashMap<String, VoipUa>(uaCountResult);
+ uaCountResult.clear();
+ batchInsertUa(tmpMap, currentTime);
+ }
+ if (!ipLocationCountResult.isEmpty()) {
+ Map<String, VoipIpLocation> tmpMap = new HashMap<String, VoipIpLocation>(ipLocationCountResult);
+ ipLocationCountResult.clear();
+ batchInsertIpLocation(tmpMap, currentTime);
+ }
+ if (!ipTypeCountResult.isEmpty()) {
+ Map<String, VoipIpType> tmpMap = new HashMap<String, VoipIpType>(ipTypeCountResult);
+ ipTypeCountResult.clear();
+ batchInsertIpType(tmpMap, currentTime);
+ }
+ if (!methodCountResult.isEmpty()) {
+ Map<String, VoipMethod> tmpMap = new HashMap<String, VoipMethod>(methodCountResult);
+ methodCountResult.clear();
+ batchInsertMethod(tmpMap, currentTime);
+ }
+ if (!resStatCountResult.isEmpty()) {
+ Map<String, VoipResStat> tmpMap = new HashMap<String, VoipResStat>(resStatCountResult);
+ resStatCountResult.clear();
+ batchInsertResStat(tmpMap, currentTime);
+ }
+ if (!codingCountResult.isEmpty()) {
+ Map<String, VoipCoding> tmpMap = new HashMap<String, VoipCoding>(codingCountResult);
+ codingCountResult.clear();
+ batchInsertCoding(tmpMap, currentTime);
+ }
+
+ logger.warn("Real time count result Insert Clickhouse execute at " + currentTime);
+
+ } else {
+
+ String countType = input.getStringByField("countType");
+ String jsonCount = input.getStringByField("jsonCount");
+ String currentTime = input.getStringByField("currentTime");
+ JSONObject jsonObject = JSONObject.parseObject(jsonCount);
+ Map<String, Object> hmCount = (Map<String, Object>) jsonObject;
+
+
+ switch (countType) {
+ case "service":
+ mergeServiceDomainResult(hmCount, serviceDomainCountResult, currentTime);
+ break;
+ case "server":
+ mergeServerResult(hmCount, serverCountResult, currentTime);
+ break;
+ case "ua":
+ mergeUaResult(hmCount, uaCountResult, currentTime);
+ break;
+ case "location":
+ mergeIpLocationResult(hmCount, ipLocationCountResult, currentTime);
+ break;
+ case "type":
+ mergeIpTypeResult(hmCount, ipTypeCountResult, currentTime);
+ break;
+ case "method":
+ mergeMethodResult(hmCount, methodCountResult, currentTime);
+ break;
+ case "resStat":
+ mergeResStatResult(hmCount, resStatCountResult, currentTime);
+ break;
+ case "coding":
+ mergeCodingResult(hmCount, codingCountResult, currentTime);
+ break;
+
+ }
+
+ }
+
+ }
+
+ private String removeBlank(String IP) {
+ while (IP.startsWith(" ")) {
+ IP = IP.substring(1, IP.length()).trim();
+ }
+ while (IP.endsWith(" ")) {
+ IP = IP.substring(0, IP.length() - 1).trim();
+ }
+ return IP;
+ }
+
+ /**
+ * 判断IP
+ */
+ private boolean isIp(String IP) {
+ if (null == IP) {
+ return false;
+ }
+ IP = this.removeBlank(IP);
+ if (IP.length() < 7 || IP.length() > 21) {
+ return false;
+ }
+ if (IP.contains(".")) {
+ String[] arr = IP.split("[.:]");
+ if (!(arr.length == 4 || arr.length == 5)) {
+ return false;
+ }
+ for (int i = 0; i < 4; i++) {
+ for (int j = 0; j < arr[i].length(); j++) {
+ char temp = arr[i].charAt(j);
+ if (!(temp >= '0' && temp <= '9')) {
+ return false;
+ }
+ }
+ }
+ for (int i = 0; i < 4; i++) {
+ if("" == arr[i] || arr[i].length() > 3)
+ {
+ return false;
+ }
+ int temp = Integer.parseInt(arr[i]);
+ if (temp < 0 || temp > 255) {
+ return false;
+ }
+ }
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ private void mergeServiceDomainResult(Map<String, Object> hmCount, Map<String, VoipServiceDomain> hm, String currentTime) {
+
+ Iterator<Map.Entry<String, Object>> iter = hmCount.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, Object> entry = iter.next();
+ String key = entry.getKey();
+ Object val = entry.getValue();
+ String str = String.valueOf(val);
+ Long count = Long.valueOf(str);
+
+ if (hm.containsKey(key)) {
+ VoipServiceDomain object = hm.get(key);
+ object.setCount(object.getCount() + count);
+ } else {
+ VoipServiceDomain object = new VoipServiceDomain();
+ object.setService(key);
+ if (isIp(key)) {
+ object.setType("0");
+ } else {
+ object.setType("1");
+ }
+ object.setCount(count);
+ object.setInterval_time(currentTime);
+ hm.put(key, object);
+ }
+ }
+ }
+
+ private void mergeServerResult(Map<String, Object> hmCount, Map<String, VoipServer> hm, String currentTime) {
+
+ Iterator<Map.Entry<String, Object>> iter = hmCount.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, Object> entry = iter.next();
+ String key = entry.getKey();
+ Object val = entry.getValue();
+ String str = String.valueOf(val);
+ Long count = Long.parseLong(str);
+
+ if (hm.containsKey(key)) {
+ VoipServer object = hm.get(key);
+ object.setCount(object.getCount() + count);
+ } else {
+ VoipServer object = new VoipServer();
+ object.setServer(key);
+ object.setCount(count);
+ object.setInterval_time(currentTime);
+ hm.put(key, object);
+ }
+ }
+ }
+
+ private void mergeUaResult(Map<String, Object> hmCount, Map<String, VoipUa> hm, String currentTime) {
+
+ Iterator<Map.Entry<String, Object>> iter = hmCount.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, Object> entry = iter.next();
+ String key = entry.getKey();
+ Object val = entry.getValue();
+ String str = String.valueOf(val);
+ Long count = Long.valueOf(str);
+
+ if (hm.containsKey(key)) {
+ VoipUa object = hm.get(key);
+ object.setCount(object.getCount() + count);
+ } else {
+ VoipUa object = new VoipUa();
+ object.setUa(key);
+ object.setCount(count);
+ object.setInterval_time(currentTime);
+ hm.put(key, object);
+ }
+ }
+ }
+
+ private void mergeIpLocationResult(Map<String, Object> hmCount, Map<String, VoipIpLocation> hm, String currentTime) {
+
+ Iterator<Map.Entry<String, Object>> iter = hmCount.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, Object> entry = iter.next();
+ String key = entry.getKey();
+ Object val = entry.getValue();
+ String str = String.valueOf(val);
+ Long count = Long.valueOf(str);
+
+ if (hm.containsKey(key)) {
+ VoipIpLocation object = hm.get(key);
+ object.setCount(object.getCount() + count);
+ } else {
+ VoipIpLocation object = new VoipIpLocation();
+ String[] items = key.split("\\+");
+ String country = items[0];
+ String region = items[1];
+ String code = items[2];
+ object.setCountry(country);
+ object.setRegion(region);
+ object.setNationCode(code);
+ object.setCount(count);
+ object.setInterval_time(currentTime);
+ hm.put(key, object);
+ }
+ }
+ }
+
+ private void mergeIpTypeResult(Map<String, Object> hmCount, Map<String, VoipIpType> hm, String currentTime) {
+
+ Iterator<Map.Entry<String, Object>> iter = hmCount.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, Object> entry = iter.next();
+ String key = entry.getKey();
+ Object val = entry.getValue();
+ String str = String.valueOf(val);
+ Long count = Long.valueOf(str);
+
+ if (hm.containsKey(key)) {
+ VoipIpType object = hm.get(key);
+ object.setCount(object.getCount() + count);
+ } else {
+ VoipIpType object = new VoipIpType();
+ object.setType(key);
+ object.setCount(count);
+ object.setInterval_time(currentTime);
+ hm.put(key, object);
+ }
+ }
+ }
+
+ private void mergeMethodResult(Map<String, Object> hmCount, Map<String, VoipMethod> hm, String currentTime) {
+
+ Iterator<Map.Entry<String, Object>> iter = hmCount.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, Object> entry = iter.next();
+ String key = entry.getKey();
+ Object val = entry.getValue();
+ String str = String.valueOf(val);
+ Long count = Long.valueOf(str);
+
+ if (hm.containsKey(key)) {
+ VoipMethod object = hm.get(key);
+ object.setCount(object.getCount() + count);
+ } else {
+ VoipMethod object = new VoipMethod();
+ object.setMethod(key);
+ object.setCount(count);
+ object.setInterval_time(currentTime);
+ hm.put(key, object);
+ }
+ }
+ }
+
+ private void mergeResStatResult(Map<String, Object> hmCount, Map<String, VoipResStat> hm, String currentTime) {
+
+ Iterator<Map.Entry<String, Object>> iter = hmCount.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, Object> entry = iter.next();
+ String key = entry.getKey();
+ Object val = entry.getValue();
+ String str = String.valueOf(val);
+ Long count = Long.valueOf(str);
+
+ if (hm.containsKey(key)) {
+ VoipResStat object = hm.get(key);
+ object.setCount(object.getCount() + count);
+ } else {
+ VoipResStat object = new VoipResStat();
+ String[] items = key.split("\\+");
+ String res_stat = items[0];
+ String cseq = items[1];
+ object.setRes_stat(res_stat);
+ object.setCseq(cseq);
+ object.setCount(count);
+ object.setInterval_time(currentTime);
+ hm.put(key, object);
+ }
+ }
+ }
+
+ private void mergeCodingResult(Map<String, Object> hmCount, Map<String, VoipCoding> hm, String currentTime) {
+
+ Iterator<Map.Entry<String, Object>> iter = hmCount.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, Object> entry = iter.next();
+ String key = entry.getKey();
+ Object val = entry.getValue();
+ String str = String.valueOf(val);
+ Long count = Long.valueOf(str);
+
+ if (hm.containsKey(key)) {
+ VoipCoding object = hm.get(key);
+ object.setCount(object.getCount() + count);
+ } else {
+ VoipCoding object = new VoipCoding();
+ object.setCoding(key);
+ object.setCount(count);
+ object.setInterval_time(currentTime);
+ hm.put(key, object);
+ }
+ }
+ }
+
+ private void batchInsertServiceDomain(Map<String, VoipServiceDomain> hm, String currentTime) {
+ String sql = "insert into voip_knowledge.voip_service_domain values(?, ?, ?, ?)";
+ try {
+ connection = manager.getConnection();
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ for (Map.Entry<String, VoipServiceDomain> entry : hm.entrySet()) {
+ VoipServiceDomain object = entry.getValue();
+ pstmt.setString(1, object.getService());
+ pstmt.setString(2, object.getType());
+ pstmt.setLong(3, object.getCount());
+ pstmt.setString(4, currentTime);
+ pstmt.addBatch();
+ }
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ logger.error("+++++++++insert to voip_service_domain Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, connection);
+ }
+ }
+
+ private void batchInsertCoding(Map<String, VoipCoding> hm, String currentTime) {
+ String sql = "insert into voip_knowledge.voip_coding values(?, ?, ?)";
+ try {
+ connection = manager.getConnection();
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ for (Map.Entry<String, VoipCoding> entry : hm.entrySet()) {
+ VoipCoding object = entry.getValue();
+ pstmt.setString(1, object.getCoding());
+ pstmt.setLong(2, object.getCount());
+ pstmt.setString(3, currentTime);
+ pstmt.addBatch();
+ }
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ logger.error("+++++++++insert to voip_ua Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, connection);
+ }
+ }
+
+ private void batchInsertIpLocation(Map<String, VoipIpLocation> hm, String currentTime) {
+ String sql = "insert into voip_knowledge.voip_ip_location values(?, ?, ?, ?, ?)";
+ try {
+ connection = manager.getConnection();
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ for (Map.Entry<String, VoipIpLocation> entry : hm.entrySet()) {
+ VoipIpLocation object = entry.getValue();
+ pstmt.setString(1, object.getCountry());
+ pstmt.setString(2, object.getRegion());
+ pstmt.setString(3, object.getNationCode());
+ pstmt.setLong(4, object.getCount());
+ pstmt.setString(5, currentTime);
+ pstmt.addBatch();
+ }
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ logger.error("+++++++++insert to voip_ip_location Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, connection);
+ }
+ }
+
+ private void batchInsertIpType(Map<String, VoipIpType> hm, String currentTime) {
+ String sql = "insert into voip_knowledge.voip_ip_type values(?, ?, ?)";
+ try {
+ connection = manager.getConnection();
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ for (Map.Entry<String, VoipIpType> entry : hm.entrySet()) {
+ VoipIpType object = entry.getValue();
+ pstmt.setString(1, object.getType());
+ pstmt.setLong(2, object.getCount());
+ pstmt.setString(3, currentTime);
+ pstmt.addBatch();
+ }
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ logger.error("+++++++++insert to voip_ip_type Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, connection);
+ }
+ }
+
+ private void batchInsertMethod(Map<String, VoipMethod> hm, String currentTime) {
+ String sql = "insert into voip_knowledge.voip_method values(?, ?, ?)";
+ try {
+ connection = manager.getConnection();
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ for (Map.Entry<String, VoipMethod> entry : hm.entrySet()) {
+ VoipMethod object = entry.getValue();
+ pstmt.setString(1, object.getMethod());
+ pstmt.setLong(2, object.getCount());
+ pstmt.setString(3, currentTime);
+ pstmt.addBatch();
+ }
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ logger.error("+++++++++insert to voip_method Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, connection);
+ }
+ }
+
+ private void batchInsertResStat(Map<String, VoipResStat> hm, String currentTime) {
+ String sql = "insert into voip_knowledge.voip_res_stat values(?, ?, ?, ?)";
+ try {
+ connection = manager.getConnection();
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ for (Map.Entry<String, VoipResStat> entry : hm.entrySet()) {
+ VoipResStat object = entry.getValue();
+ pstmt.setString(1, object.getRes_stat());
+ pstmt.setString(2, object.getCseq());
+ pstmt.setLong(3, object.getCount());
+ pstmt.setString(4, currentTime);
+ pstmt.addBatch();
+ }
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ logger.error("+++++++++insert to voip_res_stat Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, connection);
+ }
+ }
+
+ private void batchInsertServer(Map<String, VoipServer> hm, String currentTime) {
+ String sql = "insert into voip_knowledge.voip_server values(?, ?, ?)";
+ try {
+ connection = manager.getConnection();
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ for (Map.Entry<String, VoipServer> entry : hm.entrySet()) {
+ VoipServer object = entry.getValue();
+ pstmt.setString(1, object.getServer());
+ pstmt.setLong(2, object.getCount());
+ pstmt.setString(3, currentTime);
+ pstmt.addBatch();
+ }
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ logger.error("+++++++++insert to voip_server Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, connection);
+ }
+ }
+
+ private void batchInsertUa(Map<String, VoipUa> hm, String currentTime) {
+ String sql = "insert into voip_knowledge.voip_ua values(?, ?, ?)";
+ try {
+ connection = manager.getConnection();
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ for (Map.Entry<String, VoipUa> entry : hm.entrySet()) {
+ VoipUa object = entry.getValue();
+ pstmt.setString(1, object.getUa());
+ pstmt.setLong(2, object.getCount());
+ pstmt.setString(3, currentTime);
+ pstmt.addBatch();
+ }
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ logger.error("+++++++++insert to voip_ua Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, connection);
+ }
+ }
+
+
+ public Map<String, Object> getComponentConfiguration() {
+ Map<String, Object> conf = new HashMap<>();
+ conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, tickFreqSecs);
+ return conf;
+ }
+
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+
+ }
+}
diff --git a/src/main/java/cn/ac/iie/common/CommonService.java b/src/main/java/cn/ac/iie/common/CommonService.java
new file mode 100644
index 0000000..4cf8ec0
--- /dev/null
+++ b/src/main/java/cn/ac/iie/common/CommonService.java
@@ -0,0 +1,68 @@
+package cn.ac.iie.common;
+
+import java.io.Serializable;
+
+import org.apache.log4j.Logger;
+import cn.ac.iie.bean.ConfigCompile;;
+
+public class CommonService implements Serializable{
+
+ private static final long serialVersionUID = 6106510579752162633L;
+ private static Logger logger = Logger.getLogger(CommonService.class);
+ private static final Integer COMPILE_OPTION_NUMBER = 9;
+
+ public String[] splitMessageWithLogType(String message,String logType) {
+ Integer ZiYong_LOG_LENGTH = RealtimeCountConfig.LOG_COMMON_FIELD_NUM + HashTableConfig.LOG_SUB_OPTION_NUM_MAP.get(logType);
+ if(message!=null && !message.isEmpty()) {
+ String[] values = message.split(RealtimeCountConfig.LOG_STRING_SPLITTER);//分割符"\t"
+ if(values.length == ZiYong_LOG_LENGTH) {
+ return values;
+ } else {
+ logger.error(logType+"--->message length = " + values.length + " is illegal");//测试用
+ return null;
+ }
+ }
+ return null;
+ }
+
+ public ConfigCompile checkReplyFromRedis(String redisReply){
+ if(redisReply == null){
+ return null;
+ }
+ String[] str = redisReply.split("\t");
+ if(str.length == COMPILE_OPTION_NUMBER){
+ try{
+ ConfigCompile cc = new ConfigCompile(str);
+ if(cc.getSERVICE().equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER)){
+ logger.error("私有标签为空, 配置id为: "+ cc.getCOMPILE_ID());
+ return null;
+ }
+ return cc;
+ } catch (Exception e){
+ e.printStackTrace();
+ return null;
+ }
+ }else{
+ return null;
+ }
+ }
+
+ public ConfigCompile checkPz(String pzStr){
+ String[] str = pzStr.split("\t");
+ if(str.length == COMPILE_OPTION_NUMBER){
+ try{
+ ConfigCompile cc = new ConfigCompile(str);
+ if(cc.getSERVICE().equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER)){
+ logger.error("私有标签为空, 配置id为: "+ cc.getCOMPILE_ID());
+ return null;
+ }
+ return cc;
+ } catch (Exception e){
+ e.printStackTrace();
+ return null;
+ }
+ }else{
+ return null;
+ }
+ }
+}
diff --git a/src/main/java/cn/ac/iie/common/DataCenterLoad.java b/src/main/java/cn/ac/iie/common/DataCenterLoad.java
new file mode 100644
index 0000000..eeb1a3a
--- /dev/null
+++ b/src/main/java/cn/ac/iie/common/DataCenterLoad.java
@@ -0,0 +1,93 @@
+package cn.ac.iie.common;
+
+import org.apache.log4j.Logger;
+
+import java.text.SimpleDateFormat;
+import java.util.Map;
+
+public class DataCenterLoad {
+ private static final String url = HttpManager.getInfoLoadInstance().getAddress();
+ private static Logger logger = Logger.getLogger(DataCenterLoad.class);
+
+ public DataCenterLoad() {
+ }
+
+ private String generateTimeWithInterval() {
+ Long stamp = System.currentTimeMillis() + 300000L;
+ Long stamp5 = stamp / 300000 * 300000;
+ SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ return df.format(stamp5);
+ }
+
+ public void dfPzFlowBatchStorage(Map<String, Long> pzMap) {
+ //String sql = " insert into DF_PZ_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, SERVICE, SUM, REPORT_TIME) " +
+ // " VALUES(SEQ_DF_PZ_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+ StringBuffer sb = new StringBuffer();
+ String time5 = generateTimeWithInterval();
+ int nums = 0;
+ for (String key : pzMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+ if (options[0] != null && options[0] != "" && options[1] != null && options[1] != "" && options[2] != null && options[2] != "" && options[3] != null && options[3] != "" && options[4] != null && options[4] != "" && options[5] != null && options[5] != "") {
+ String aItem = options[0] + "," + options[1] + "," + options[2] + "," + options[3] + "," + options[4] + "," + options[5] + "," + pzMap.get(key) + "," + time5;
+ sb.append(aItem + "\n");
+ nums++;
+ if (nums >= RealtimeCountConfig.BATCH_INSERT_NUM) {
+ String data = sb.substring(0, sb.length() - 1);
+ //输出的topic
+ logger.info("start to post data to dc---------> " + data);
+ System.out.println("start to post data to dc---------> " + data);
+ HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//原本的方法
+ sb.setLength(0);
+ nums = 0;
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ try {
+ if (nums != 0) {
+ String data = sb.substring(0, sb.length() - 1);
+ HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);
+ sb.setLength(0);
+ nums = 0;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void dfPzBatchStorage(Map<String, Long> pzMap) {
+ //String sql = " insert into DF_PZ_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, SERVICE, SUM, REPORT_TIME) " +
+ // " VALUES(SEQ_DF_PZ_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+ StringBuffer sb = new StringBuffer();
+ String time5 = generateTimeWithInterval();
+ int nums = 0;
+ for (String key : pzMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+ String aItem = options[2] + "\t" + options[1] + "\t" + options[3] + "\t" + pzMap.get(key) + "\t" + time5;
+ sb.append(aItem + "\n");
+ nums++;
+ if (nums >= RealtimeCountConfig.BATCH_INSERT_NUM) {
+ String data = sb.substring(0, sb.length() - 1);
+ HttpManager.getInfoLoadInstance().postToDataCenter(url, "t_xa_df_pz_report_dt", data);
+ sb.setLength(0);
+ nums = 0;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ try {
+ if (nums != 0) {
+ String data = sb.substring(0, sb.length() - 1);
+ HttpManager.getInfoLoadInstance().postToDataCenter(url, "t_xa_df_pz_report_dt", data);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/common/HashTableConfig.java b/src/main/java/cn/ac/iie/common/HashTableConfig.java
new file mode 100644
index 0000000..4ee640f
--- /dev/null
+++ b/src/main/java/cn/ac/iie/common/HashTableConfig.java
@@ -0,0 +1,354 @@
+package cn.ac.iie.common;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+
+public class HashTableConfig implements Serializable {
+
+ private static final long serialVersionUID = -6843770738516927321L;
+
+ /**
+ * ISO 3166-1 alpha2
+ */
+ public static final Map<String, String> ISO_3166_1_ALPHA_2 = new HashMap<String, String>() {
+
+ // private static final long serialVersionUID = 5231960246987011322L;
+ private static final long serialVersionUID = -6972673762779232428L;
+
+ {
+
+ put("阿富汗","AF");
+ put("奥兰","AX");
+ put("阿尔巴尼亚","AL");
+ put("阿尔及利亚","DZ");
+ put("美属萨摩亚","AS");
+ put("安道尔","AD");
+ put("安哥拉","AO");
+ put("安圭拉","AI");
+ put("南极洲","AQ");
+ put("安提瓜和巴布达","AG");
+ put("阿根廷","AR");
+ put("亚美尼亚","AM");
+ put("阿鲁巴","AW");
+ put("澳大利亚","AU");
+ put("奥地利","AT");
+ put("阿塞拜疆","AZ");
+ put("巴哈马","BS");
+ put("巴林","BH");
+ put("孟加拉国","BD");
+ put("巴巴多斯","BB");
+ put("白俄罗斯","BY");
+ put("比利时","BE");
+ put("伯利兹","BZ");
+ put("贝宁","BJ");
+ put("百慕大","BM");
+ put("不丹","BT");
+ put("玻利维亚","BO");
+ put("荷兰加勒比区","BQ");
+ put("波斯尼亚和黑塞哥维那","BA");
+ put("博茨瓦纳","BW");
+ put("布韦岛","BV");
+ put("巴西","BR");
+ put("英属印度洋领地","IO");
+ put("文莱","BN");
+ put("保加利亚","BG");
+ put("布基纳法索","BF");
+ put("布隆迪","BI");
+ put("佛得角","CV");
+ put("柬埔寨","KH");
+ put("喀麦隆","CM");
+ put("加拿大","CA");
+ put("开曼群岛","KY");
+ put("中非","CF");
+ put("乍得","TD");
+ put("智利","CL");
+ put("中国","CN");
+ put("圣诞岛","CX");
+ put("科科斯(基林)群岛","CC");
+ put("哥伦比亚","CO");
+ put("科摩罗","KM");
+ put("刚果(布)","CG");
+ put("刚果(金)","CD");
+ put("库克群岛","CK");
+ put("哥斯达黎加","CR");
+ put("科特迪瓦","CI");
+ put("克罗地亚","HR");
+ put("古巴","CU");
+ put("库拉索","CW");
+ put("塞浦路斯","CY");
+ put("捷克","CZ");
+ put("丹麦","DK");
+ put("吉布提","DJ");
+ put("多米尼克","DM");
+ put("多米尼加","DO");
+ put("厄瓜多尔","EC");
+ put("埃及","EG");
+ put("萨尔瓦多","SV");
+ put("赤道几内亚","GQ");
+ put("厄立特里亚","ER");
+ put("爱沙尼亚","EE");
+ put("埃塞俄比亚","ET");
+ put("福克兰群岛","FK");
+ put("法罗群岛","FO");
+ put("斐济","FJ");
+ put("芬兰","FI");
+ put("法国","FR");
+ put("法属圭亚那","GF");
+ put("法属波利尼西亚","PF");
+ put("法属南方和南极洲领地","TF");
+ put("加蓬","GA");
+ put("冈比亚","GM");
+ put("格鲁吉亚","GE");
+ put("德国","DE");
+ put("加纳","GH");
+ put("直布罗陀","GI");
+ put("希腊","GR");
+ put("格陵兰","GL");
+ put("格林纳达","GD");
+ put("瓜德罗普","GP");
+ put("关岛","GU");
+ put("危地马拉","GT");
+ put("根西","GG");
+ put("几内亚","GN");
+ put("几内亚比绍","GW");
+ put("圭亚那","GY");
+ put("海地","HT");
+ put("赫德岛和麦克唐纳群岛","HM");
+ put("梵蒂冈","VA");
+ put("洪都拉斯","HN");
+ put("香港","HK");
+ put("匈牙利","HU");
+ put("冰岛","IS");
+ put("印度","IN");
+ put("印尼","ID");
+ put("伊朗","IR");
+ put("伊拉克","IQ");
+ put("爱尔兰","IE");
+ put("马恩岛","IM");
+ put("以色列","IL");
+ put("意大利","IT");
+ put("牙买加","JM");
+ put("日本","JP");
+ put("泽西","JE");
+ put("约旦","JO");
+ put("哈萨克斯坦","KZ");
+ put("肯尼亚","KE");
+ put("基里巴斯","KI");
+ put("朝鲜","KP");
+ put("韩国","KR");
+ put("科威特","KW");
+ put("吉尔吉斯斯坦","KG");
+ put("老挝","LA");
+ put("拉脱维亚","LV");
+ put("黎巴嫩","LB");
+ put("莱索托","LS");
+ put("利比里亚","LR");
+ put("利比亚","LY");
+ put("列支敦士登","LI");
+ put("立陶宛","LT");
+ put("卢森堡","LU");
+ put("澳门","MO");
+ put("马其顿","MK");
+ put("马达加斯加","MG");
+ put("马拉维","MW");
+ put("马来西亚","MY");
+ put("马尔代夫","MV");
+ put("马里","ML");
+ put("马耳他","MT");
+ put("马绍尔群岛","MH");
+ put("马提尼克","MQ");
+ put("毛里塔尼亚","MR");
+ put("毛里求斯","MU");
+ put("马约特","YT");
+ put("墨西哥","MX");
+ put("密克罗尼西亚联邦","FM");
+ put("摩尔多瓦","MD");
+ put("摩纳哥","MC");
+ put("蒙古国","MN");
+ put("黑山","ME");
+ put("蒙特塞拉特","MS");
+ put("摩洛哥","MA");
+ put("莫桑比克","MZ");
+ put("缅甸","MM");
+ put("纳米比亚","NA");
+ put("瑙鲁","NR");
+ put("尼泊尔","NP");
+ put("荷兰","NL");
+ put("新喀里多尼亚","NC");
+ put("新西兰","NZ");
+ put("尼加拉瓜","NI");
+ put("尼日尔","NE");
+ put("尼日利亚","NG");
+ put("纽埃","NU");
+ put("诺福克岛","NF");
+ put("北马里亚纳群岛","MP");
+ put("挪威","NO");
+ put("阿曼","OM");
+ put("巴基斯坦","PK");
+ put("帕劳","PW");
+ put("巴勒斯坦","PS");
+ put("巴拿马","PA");
+ put("巴布亚新几内亚","PG");
+ put("巴拉圭","PY");
+ put("秘鲁","PE");
+ put("菲律宾","PH");
+ put("皮特凯恩群岛","PN");
+ put("波兰","PL");
+ put("葡萄牙","PT");
+ put("波多黎各","PR");
+ put("卡塔尔","QA");
+ put("留尼汪","RE");
+ put("罗马尼亚","RO");
+ put("俄罗斯","RU");
+ put("卢旺达","RW");
+ put("圣巴泰勒米","BL");
+ put("圣赫勒拿、阿森松和特里斯坦-达库尼亚","SH");
+ put("圣基茨和尼维斯","KN");
+ put("圣卢西亚","LC");
+ put("法属圣马丁","MF");
+ put("圣皮埃尔和密克隆","PM");
+ put("圣文森特和格林纳丁斯","VC");
+ put("萨摩亚","WS");
+ put("圣马力诺","SM");
+ put("圣多美和普林西比","ST");
+ put("沙特阿拉伯","SA");
+ put("塞内加尔","SN");
+ put("塞尔维亚","RS");
+ put("塞舌尔","SC");
+ put("塞拉利昂","SL");
+ put("新加坡","SG");
+ put("圣马丁","SX");
+ put("斯洛伐克","SK");
+ put("斯洛文尼亚","SI");
+ put("所罗门群岛","SB");
+ put("索马里","SO");
+ put("南非","ZA");
+ put("南乔治亚和南桑威奇群岛","GS");
+ put("南苏丹","SS");
+ put("西班牙","ES");
+ put("斯里兰卡","LK");
+ put("苏丹","SD");
+ put("苏里南","SR");
+ put("斯瓦尔巴和扬马延","SJ");
+ put("斯威士兰","SZ");
+ put("瑞典","SE");
+ put("瑞士","CH");
+ put("叙利亚","SY");
+ put("台湾","TW");
+ put("塔吉克斯坦","TJ");
+ put("坦桑尼亚","TZ");
+ put("泰国","TH");
+ put("东帝汶","TL");
+ put("多哥","TG");
+ put("托克劳","TK");
+ put("汤加","TO");
+ put("特立尼达和多巴哥","TT");
+ put("突尼斯","TN");
+ put("土耳其","TR");
+ put("土库曼斯坦","TM");
+ put("特克斯和凯科斯群岛","TC");
+ put("图瓦卢","TV");
+ put("乌干达","UG");
+ put("乌克兰","UA");
+ put("阿联酋","AE");
+ put("英国","GB");
+ put("美国","US");
+ put("美国本土外小岛屿","UM");
+ put("乌拉圭","UY");
+ put("乌兹别克斯坦","UZ");
+ put("瓦努阿图","VU");
+ put("委内瑞拉","VE");
+ put("越南","VN");
+ put("英属维尔京群岛","VG");
+ put("美属维尔京群岛","VI");
+ put("瓦利斯和富图纳","WF");
+ put("西撒哈拉","EH");
+ put("也门","YE");
+ put("赞比亚","ZM");
+ put("津巴布韦","ZW");
+
+ }
+ };
+
+ /**
+ * 业务类型
+ */
+ public static final Map<String, Integer> SERVICE_TYPE_MAP = new HashMap<String, Integer>() {
+ private static final long serialVersionUID = 8445342694006806126L;
+
+ {
+ put("DF-IP-PORT-LOG", 1);
+ put("FX-IP-PORT", 2);
+ put("DF-HTTP-REQ-LOG", 3);
+ put("DF-HTTP-RES-LOG", 4);
+ put("DF-HTTP-KEYWORD-LOG", 5);
+ put("DF-DNS-LOG", 6);
+ put("DF-PPTP-LOG", 7);
+ put("DF-L2TP-LOG", 8);
+ put("DF-IPSEC-LOG", 9);
+ put("DF-OPENVPN-LOG", 10);
+ put("DF-SSH-LOG", 11);
+ put("DF-SSL-LOG", 12);
+ put("DF-MAIL-LOG", 13);
+ put("DF-FTP-LOG", 14);
+
+ put("DJ-IP-PORT-LOG", 48);
+ put("DJ-HTTP-REQ-LOG", 49);
+ put("DJ-HTTP-RES-LOG", 50);
+ put("DJ-HTTP-KEYWORD-LOG", 51);
+ put("DJ-DNS-LOG", 52);
+ put("DJ-FTP-LOG", 53);
+ put("DJ-PPTP-LOG", 54);
+ put("DJ-L2TP-LOG", 55);
+ put("DJ-IPSEC-LOG", 56);
+ put("DJ-OPENVPN-LOG", 57);
+ put("DJ-SSH-LOG", 58);
+ put("DJ-SSL-LOG", 59);
+ put("DJ-MAIL-LOG", 60);
+ }
+ };
+
+ //日志表字段数(不包括id,因为id前面不传回来)-----20181228修改
+ public static final Map<String, Integer> LOG_SUB_OPTION_NUM_MAP = new HashMap<String, Integer>() {
+
+ private static final long serialVersionUID = 5231960246987011322L;
+
+ {
+ put("DF-IP-PORT-LOG", 0);
+ put("DF-HTTP-REQ-LOG", 12);
+ put("DF-HTTP-RES-LOG", 15);
+ put("DF-HTTP-KEYWORD-LOG", 10);
+ put("DF-MAIL-LOG", 5);
+ put("DF-DNS-LOG", 9);
+ put("DF-FTP-LOG", 1);
+ put("DF-PPTP-LOG", 3);
+ put("DF-L2TP-LOG", 4);
+ put("DF-IPSEC-LOG", 2);
+ put("DF-OPENVPN-LOG", 4);
+ put("DF-SSH-LOG", 6);
+ put("DF-SSL-LOG", 6);
+ put("DF-TUNNEL-RANDOM-LOG", 1);
+
+ put("NTC-CONN-RECORD-LOG", 5);//正式用,这是表上的字段数
+// put("NTC-CONN-RECORD-LOG", 3);//测试用,这是测试数据上的字段数
+
+
+ put("DJ-IP-PORT-LOG", 0);
+ put("DJ-HTTP-REQ-LOG", 12);
+ put("DJ-HTTP-RES-LOG", 15);
+ put("DJ-HTTP-KEYWORD-LOG", 10);
+ put("DJ-MAIL-LOG", 5);
+ put("DJ-DNS-LOG", 9);
+ put("DJ-FTP-LOG", 1);
+ put("DJ-PPTP-LOG", 3);
+ put("DJ-L2TP-LOG", 4);
+ put("DJ-IPSEC-LOG", 2);
+ put("DJ-OPENVPN-LOG", 4);
+ put("DJ-SSH-LOG", 6);
+ put("DJ-SSL-LOG", 6);
+
+ }
+ };
+
+} \ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/common/HttpManager.java b/src/main/java/cn/ac/iie/common/HttpManager.java
new file mode 100644
index 0000000..a1aa590
--- /dev/null
+++ b/src/main/java/cn/ac/iie/common/HttpManager.java
@@ -0,0 +1,218 @@
+package cn.ac.iie.common;
+
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.net.MalformedURLException;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.util.Random;
+import javax.net.ssl.SSLException;
+
+//import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpEntityEnclosingRequest;
+import org.apache.http.HttpRequest;
+import org.apache.http.client.ClientProtocolException;
+import org.apache.http.client.HttpRequestRetryHandler;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.conn.ConnectTimeoutException;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.client.LaxRedirectStrategy;
+import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
+import org.apache.http.protocol.HttpContext;
+import org.apache.http.util.EntityUtils;
+import org.apache.log4j.Logger;
+
+
+public class HttpManager {
+ // 创建httpclient连接池
+ private PoolingHttpClientConnectionManager httpClientConnectionManager = null;
+ private CloseableHttpClient httpClient = null;
+ //类初始化时,自动实例化,饿汉单例模式
+ private static final HttpManager manager = new HttpManager();
+ private static Logger logger = Logger.getLogger(HttpManager.class);
+
+ public static HttpManager getInfoLoadInstance(){
+ return manager;
+ }
+
+ private HttpManager(){
+ //初始化httpClient
+ initHttpClient();
+ System.setProperty("sun.net.inetaddr.ttl", "300");
+ System.setProperty("sun.net.inetaddr.negative.ttl", "10");
+ }
+
+ public void initHttpClient(){
+ //创建httpclient连接池
+ httpClientConnectionManager = new PoolingHttpClientConnectionManager();
+ //设置连接池最大数量
+ httpClientConnectionManager.setMaxTotal(2000);
+ //设置单个路由最大连接数量
+ httpClientConnectionManager.setDefaultMaxPerRoute(400);
+
+ httpClient=getHttpClient();
+ }
+ //请求重试机制
+
+ HttpRequestRetryHandler myRetryHandler = new HttpRequestRetryHandler() {
+ @Override
+ public boolean retryRequest(IOException exception, int executionCount, HttpContext context) {
+ if (executionCount >= 2) {
+ // 超过两次则不再重试请求
+ logger.error("http连接已重试"+executionCount+"次, 重试失败");
+ return false;
+ }
+ if (exception instanceof InterruptedIOException) {
+ // Timeout
+ logger.error("InterruptedIOException, 重试连接。。。");
+ return true;
+ }
+ if (exception instanceof UnknownHostException) {
+ // Unknown host
+ return false;
+ }
+ if (exception instanceof ConnectTimeoutException) {
+ logger.error("ConnectTimeoutException, 重试连接。。。");
+ // Connection refused
+ return true;
+ }
+ if (exception instanceof SSLException) {
+ // SSL handshake exception
+ return false;
+ }
+ HttpClientContext clientContext = HttpClientContext.adapt(context);
+ HttpRequest request = clientContext.getRequest();
+ boolean idempotent = !(request instanceof HttpEntityEnclosingRequest);
+ if (idempotent) {
+ logger.error("request is idempotent, 重试连接。。。");
+ // Retry if the request is considered idempotent
+ return true;
+ }
+ return false;
+ }
+ };
+
+ public CloseableHttpClient getHttpClient(){
+ // 创建全局的requestConfig
+ RequestConfig requestConfig = RequestConfig.custom()
+ .setConnectTimeout(3000)
+ .setSocketTimeout(3000)
+ //.setCookieSpec(CookieSpecs.BEST_MATCH)
+ .build();
+ // 声明重定向策略对象
+ LaxRedirectStrategy redirectStrategy = new LaxRedirectStrategy();
+
+ CloseableHttpClient httpClient = HttpClients.custom().setConnectionManager(httpClientConnectionManager)
+ .setDefaultRequestConfig(requestConfig)
+ .setRedirectStrategy(redirectStrategy)
+ .setRetryHandler(myRetryHandler)
+ .build();
+ return httpClient;
+ }
+
+ public String getAddress(){
+ String[] addrs = RealtimeCountConfig.DATACENTER_ADDRS.split(",");
+
+ Random rnd = new Random();
+ Integer addrIndex = rnd.nextInt(addrs.length);
+ return addrs[addrIndex].trim();
+ }
+
+
+ public void postToDataCenter(String url, String topic, String data){
+ CloseableHttpResponse response = null;
+ HttpPost httpPost = null;
+ url = url.trim();
+ try {
+ httpPost = new HttpPost(url);
+// httpPost.addHeader("Connection","keep-alive");
+// httpPost.addHeader("Accept-Encoding", "gzip, deflate");
+ //httpPost.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36");
+
+// httpPost.addHeader("User", RealtimeCountConfig.DATACENTER_USERNAME);
+// httpPost.addHeader("Password", RealtimeCountConfig.DATACENTER_PASSWORD);
+ httpPost.addHeader("Topic", topic);
+ httpPost.addHeader("Schema-Version", "2");
+ httpPost.addHeader("Format", "csv");
+// httpPost.addHeader("Row-Split", "\\n");
+// httpPost.addHeader("Field-Split", "\\t");
+ httpPost.addHeader("Row-Split", "\\n");
+ httpPost.addHeader("Field-Split", ",");
+// StringEntity payload = new StringEntity(data, Charset.forName("utf-8"));
+ StringEntity payload = new StringEntity(data);
+ //payload.setContentType("text/xml; charset=UTF-8");
+// payload.setContentEncoding("utf-8");
+ httpPost.setEntity(payload);
+ logger.info("数据中心加载内容: " + data);
+ //执行请求
+ response = httpClient.execute(httpPost);
+ try{
+ int statuCode = response.getStatusLine().getStatusCode();
+ //Header[] headers = response.getAllHeaders();
+ //logger.info("<<response header>>:");
+ //System.out.println("<<response header>>:");
+ //for(int i=0; i<headers.length; i++){
+ // logger.info(headers[i].getName() +" : "+headers[i].getValue());
+ //System.out.println(headers[i].getName() +" : "+headers[i].getValue());
+ //}
+ HttpEntity entity = response.getEntity();
+ if(statuCode==200){
+ logger.info("数据中心加载成功, 返回码: "+ statuCode);
+ System.out.println("数据中心加载成功, 返回码: " + statuCode);
+ EntityUtils.consume(entity);
+ }else{
+ String ret = EntityUtils.toString(entity);
+ EntityUtils.consume(entity);
+ logger.info("数据中心加载失败: "+ret+" --- code: "+statuCode+" ---失败数据为: \n"+data);
+ System.out.println("数据中心加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data);
+ logger.error("数据中心加载失败: "+ret+" --- code: "+statuCode);
+ System.out.println("数据中心加载失败: " + ret + " --- code: " + statuCode);
+ }
+ } catch (Exception e){
+ e.printStackTrace();
+ }
+
+ } catch (MalformedURLException e) {
+ //执行URL url = new URL()的异常
+ e.printStackTrace();
+ } catch (ClientProtocolException e) {
+ // 执行httpClient.execute(httpGet)的异常
+ e.printStackTrace();
+ } catch (IOException e) {
+ // 执行httpClient.execute(httpGet)的异常
+ e.printStackTrace();
+ } finally{
+ if(response != null){
+ try {
+ response.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ httpPost.abort();
+ /**
+ * httpclient的链接有线程池管理,这里不用直接关闭
+ */
+// try {//关闭连接
+// httpClient.close();
+// } catch (IOException e) {
+// e.printStackTrace();
+// }
+ }
+ }
+
+// public static void main(String[] args) throws InterruptedException {
+// // TODO Auto-generated method stub
+// for(int i=0; i<100000; i++){
+// System.out.println("------------- "+i+" ------------");
+// DoubleWriteHttpManager.getInfoLoadInstance().postToDataCenter("http://www.runoob.com/try/ajax/demo_test_post.php", "topic", "data");
+// }
+// }
+
+}
diff --git a/src/main/java/cn/ac/iie/common/RealtimeCountConfig.java b/src/main/java/cn/ac/iie/common/RealtimeCountConfig.java
new file mode 100644
index 0000000..0184f45
--- /dev/null
+++ b/src/main/java/cn/ac/iie/common/RealtimeCountConfig.java
@@ -0,0 +1,99 @@
+package cn.ac.iie.common;
+
+import cn.ac.iie.utils.RealtimeCountConfigurations;
+
+import java.io.Serializable;
+
+public class RealtimeCountConfig implements Serializable{
+
+ private static final long serialVersionUID = -8649024767966235184L;
+ public static final String LOG_STRING_SPLITTER = "\t";
+ public static final String BETWEEN_BOLTS_SPLITTER = "~=~";
+ public static final String EMPTY_OPTION_CHARACTER = "-";
+ /**
+ * 通用log表字段数
+ */
+ public static final Integer LOG_COMMON_FIELD_NUM = 23;//公共表字段数(不包括id,因为id前面不传回来,id为自增)
+
+ //-----------------realtime_config.properties------------------
+ public static final String BOOTSTRAP_SERVERS = RealtimeCountConfigurations.getStringProperty(0, "bootstrap.servers");
+ public static final String BOOTSTRAP_OUTPUT_SERVERS = RealtimeCountConfigurations.getStringProperty(0, "bootstrap.output.servers");
+ public static final String ACTIVE_SYSTEM = RealtimeCountConfigurations.getStringProperty(0, "active.system");
+ public static final Integer BATCH_INSERT_NUM = RealtimeCountConfigurations.getIntProperty(0, "batch.insert.num");
+ public static final String GROUP_ID = RealtimeCountConfigurations.getStringProperty(0, "group.id");
+ public static final String KAFKA_TOPIC = RealtimeCountConfigurations.getStringProperty(0, "kafka.topic");
+ public static final String KAFKA_NTC_ORI_TOPIC = RealtimeCountConfigurations.getStringProperty(0, "kafka.ntc.ori.topic");
+ public static final String KAFKA_SIP_ORIGIN_TOPIC = RealtimeCountConfigurations.getStringProperty(0, "kafka.sip.origin.topic");
+ public static final String KAFKA_NTC_KILLED_TOPIC = RealtimeCountConfigurations.getStringProperty(0, "kafka.ntc.killed.topic");
+ public static final String KAFKA_SIP_COMPLEMENT_TOPIC = RealtimeCountConfigurations.getStringProperty(0, "kafka.sip.complement.topic");
+ public static final String KAFKA_ROUTE_RELATION_TOPIC = RealtimeCountConfigurations.getStringProperty(0, "kafka.route.relation.topic");
+
+
+ public static final String ALL_LOG_OUTPUT_CONTROLLER = RealtimeCountConfigurations.getStringProperty(0, "all.log.output.controller");//全局catch日志打印控制器
+ public static final String PART_LOG_OUTPUT_CONTROLLER = RealtimeCountConfigurations.getStringProperty(0, "part.log.output.controller");//局部日志输出控制器
+
+
+ public static final String GROUP_ID_PREFIX = RealtimeCountConfigurations.getStringProperty(0, "group.id.prefix");//groupid前缀
+ public static final String GROUP_ID_SUFFIX = RealtimeCountConfigurations.getStringProperty(0, "group.id.suffix");//groupid后缀
+ public static final String FETCH_MAX_BYTES = RealtimeCountConfigurations.getStringProperty(0, "fetch.max.bytes");
+ public static final String MAX_PARTITION_FETCH_BYTES = RealtimeCountConfigurations.getStringProperty(0, "max.partition.fetch.bytes");
+ public static final String MAX_POLL_INTERVAL_MS = RealtimeCountConfigurations.getStringProperty(0, "max.poll.interval.ms");
+ public static final String MAX_POLL_RECORDS = RealtimeCountConfigurations.getStringProperty(0, "max.poll.records");
+ public static final String SESSION_TIMEOUT_MS = RealtimeCountConfigurations.getStringProperty(0, "session.timeout.ms");
+ public static final String AUTO_OFFSET_RESET = RealtimeCountConfigurations.getStringProperty(0, "auto.offset.reset");
+ public static final String DATACENTER_ADDRS = RealtimeCountConfigurations.getStringProperty(0, "datacenter.addrs");
+ public static final String DATACENTER_USERNAME = RealtimeCountConfigurations.getStringProperty(0, "datacenter.username");
+ public static final String DATACENTER_PASSWORD = RealtimeCountConfigurations.getStringProperty(0, "datacenter.password");
+ public static final String TABLE_NAME = RealtimeCountConfigurations.getStringProperty(0, "table.name");
+ public static final String TABLE_KILLED_NAME = RealtimeCountConfigurations.getStringProperty(0, "table.killed.name");
+ public static final Integer BATCH_CHINSERT_NUM = RealtimeCountConfigurations.getIntProperty(0, "batch.chinsert.num");//clickhouse批量插入量
+ public static final Integer BATCH_KAFKA_INSERT_NUM = RealtimeCountConfigurations.getIntProperty(0, "batch.kafka.insert.num");//kafka批量插入量
+ public static final Integer BATCH_CHINSERT_KILLED_NUM = RealtimeCountConfigurations.getIntProperty(0, "batch.chinsert.killed.num");
+ public static final String IP_V4_LIBRARY = RealtimeCountConfigurations.getStringProperty(0, "ip.v4.library");//ipv4定位库
+ public static final String IP_V6_LIBRARY = RealtimeCountConfigurations.getStringProperty(0, "ip.v6.library");//ipv6定位库
+ public static final String IPIP_LIBRARY = RealtimeCountConfigurations.getStringProperty(0, "ipip.library");//ipip定位库
+
+ public static final String HDFS_URL = RealtimeCountConfigurations.getStringProperty(0,"hdfs.url");
+ public static final String HDFS_PATH = RealtimeCountConfigurations.getStringProperty(0,"hdfs.path");
+ public static final String HDFS_USER = RealtimeCountConfigurations.getStringProperty(0,"hdfs.user");
+ // public static final String HIVE_URL = RealtimeCountConfigurations.getStringProperty(0,"hive.url");
+// public static final String HIVE_USERNAME = RealtimeCountConfigurations.getStringProperty(0,"hive.username");
+// public static final String HIVE_PASSWORD = RealtimeCountConfigurations.getStringProperty(0,"hive.password");
+ public static final String HIVE_SIP_CLEAN_TABLE = RealtimeCountConfigurations.getStringProperty(0,"hive.sip.clean.table");
+// public static final String HIVE_SIP_ROUTE_TABLE = RealtimeCountConfigurations.getStringProperty(0,"hive.sip.route.table");
+
+ //---------------storm_config.properties---------------
+ public static final Integer SPOUT_PARALLELISM = RealtimeCountConfigurations.getIntProperty(1, "spout.parallelism");
+ public static final Integer FORMAT_BOLT_PARALLELISM = RealtimeCountConfigurations.getIntProperty(1, "format.bolt.parallelism");
+ public static final Integer BUFFER_BOLT_PARALLELISM = RealtimeCountConfigurations.getIntProperty(1, "buffer.bolt.parallelism");
+ public static final Integer DATABASE_BOLT_PARALLELISM = RealtimeCountConfigurations.getIntProperty(1, "database.bolt.parallelism");
+ public static final Integer COUNT_BOLT_PARALLELISM = RealtimeCountConfigurations.getIntProperty(1, "count.bolt.parallelism");
+ public static final Integer MERGE_BOLT_PARALLELISM = RealtimeCountConfigurations.getIntProperty(1, "merge.bolt.parallelism");
+ public static final Integer TOPOLOGY_WORKERS = RealtimeCountConfigurations.getIntProperty(1, "topology.workers");
+ public static final Integer GROUP_STRATEGY = RealtimeCountConfigurations.getIntProperty(1, "group.strategy");
+ public static final Integer TOPOLOGY_TICK_TUPLE_COMP_FREQ_SECS = RealtimeCountConfigurations.getIntProperty(1, "topology.tick.tuple.comp.freq.secs");
+ public static final Integer TOPOLOGY_TICK_TUPLE_FREQ_SECS = RealtimeCountConfigurations.getIntProperty(1, "topology.tick.tuple.freq.secs");
+ public static final Integer TOPOLOGY_TICK_TUPLE_COUNT_FREQ_SECS = RealtimeCountConfigurations.getIntProperty(1, "topology.tick.tuple.count.freq.secs");
+ public static final Integer TOPOLOGY_TICK_TUPLE_MERGE_FREQ_SECS = RealtimeCountConfigurations.getIntProperty(1, "topology.tick.tuple.merge.freq.secs");
+ public static final Integer TOPOLOGY_CONFIG_MAX_SPOUT_PENDING = RealtimeCountConfigurations.getIntProperty(1, "topology.config.max.spout.pending");
+ public static final Integer TOPOLOGY_NUM_ACKS = RealtimeCountConfigurations.getIntProperty(1, "topology.num.acks");
+
+ //参数展示
+ public static void configShow(){
+ System.out.println("BOOTSTRAP_SERVERS: "+BOOTSTRAP_SERVERS);
+ System.out.println("KAFKA_TOPIC: "+KAFKA_TOPIC);
+ System.out.println("ACTIVE_SYSTEM: "+ACTIVE_SYSTEM);
+ System.out.println("GROUP_ID: "+GROUP_ID);
+ System.out.println("GROUP_ID_PREFIX: "+GROUP_ID_PREFIX);
+ System.out.println("AUTO_OFFSET_RESET: "+AUTO_OFFSET_RESET);
+ System.out.println("TOPOLOGY_NUM_ACKS: "+TOPOLOGY_NUM_ACKS);
+ System.out.println("BATCH_INSERT_NUM: "+BATCH_INSERT_NUM);
+ System.out.println("TOPOLOGY_TICK_TUPLE_FREQ_SECS: "+TOPOLOGY_TICK_TUPLE_FREQ_SECS);
+ System.out.println("TOPOLOGY_CONFIG_MAX_SPOUT_PENDING: "+TOPOLOGY_CONFIG_MAX_SPOUT_PENDING);
+ System.out.println("TOPOLOGY_WORKERS: "+TOPOLOGY_WORKERS);
+ System.out.println("SPOUT_PARALLELISM: "+SPOUT_PARALLELISM);
+ System.out.println("FORMAT_BOLT_PARALLELISM: "+FORMAT_BOLT_PARALLELISM);
+ System.out.println("DATABASE_BOLT_PARALLELISM: "+DATABASE_BOLT_PARALLELISM);
+ System.out.println("GROUP_STRATEGY: "+GROUP_STRATEGY);
+ }
+} \ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/dao/DataBaseBusiness.java b/src/main/java/cn/ac/iie/dao/DataBaseBusiness.java
new file mode 100644
index 0000000..9fae542
--- /dev/null
+++ b/src/main/java/cn/ac/iie/dao/DataBaseBusiness.java
@@ -0,0 +1,1604 @@
+package cn.ac.iie.dao;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Map;
+import org.apache.log4j.Logger;
+import cn.ac.iie.common.RealtimeCountConfig;
+
+
+public final class DataBaseBusiness {
+ private static final JdbcConnectionManager manager = JdbcConnectionManager.getInstance();
+ private static final Logger logger = Logger.getLogger(DataBaseBusiness.class);
+ private Connection connection;
+ private PreparedStatement pstmt;
+
+ public DataBaseBusiness(){
+
+ }
+
+ private Long generateTimeWithInterval() throws ParseException{
+ SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ String dt = df.format(new Date());
+ System.out.println(dt);
+
+ String[] s = dt.split(":");
+ Integer i = Integer.valueOf(s[1]);
+ String ret = "";
+ if(i<5){
+ ret = s[0]+":00:00";
+ } else if(i<10){
+ ret = s[0]+":05:00";
+ } else if(i<15){
+ ret = s[0]+":10:00";
+ } else if(i<20){
+ ret = s[0]+":15:00";
+ } else if(i<25){
+ ret = s[0]+":20:00";
+ } else if(i<30){
+ ret = s[0]+":25:00";
+ } else if(i<35){
+ ret = s[0]+":30:00";
+ } else if(i<40){
+ ret = s[0]+":35:00";
+ } else if(i<45){
+ ret = s[0]+":40:00";
+ } else if(i<50){
+ ret = s[0]+":45:00";
+ } else if(i<55){
+ ret = s[0]+":50:00";
+ } else if(i<=59){
+ ret = s[0]+":55:00";
+ }
+
+ Date date=df.parse(ret);
+ Long l = date.getTime()+300000;
+ return l;
+ }
+
+ @SuppressWarnings("unused")
+ private void setBigDecimalNullable(PreparedStatement pstmts, int index, String str) throws Exception {
+ if (str.equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER) || str.isEmpty()) {
+ pstmts.setNull(index, Types.BIGINT);
+ } else {
+ pstmts.setBigDecimal(index, new BigDecimal(str));
+ }
+ }
+
+ @SuppressWarnings("unused")
+ private void setBigDecimal(PreparedStatement pstmts, int index, String str) throws Exception {
+ pstmts.setBigDecimal(index, new BigDecimal(str));
+ }
+
+ @SuppressWarnings("unused")
+ private void setLongNullable(PreparedStatement pstmts, int index, String str) throws Exception {
+ if (str.equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER) || str.isEmpty()) {
+ pstmts.setNull(index, Types.BIGINT);
+ } else {
+ pstmts.setLong(index, Long.parseLong(str));
+ }
+ }
+
+ private void setLong(PreparedStatement pstmts, int index, String str) throws Exception {
+ pstmts.setLong(index, Long.parseLong(str));
+ }
+
+ @SuppressWarnings("unused")
+ private void setIntNullable(PreparedStatement pstmts, int index, String str) throws Exception {
+ if (str.equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER) || str.isEmpty()) {
+ pstmts.setNull(index, Types.INTEGER);
+ } else {
+ pstmts.setInt(index, Integer.parseInt(str));
+ }
+ }
+
+ private void setInt(PreparedStatement pstmts, int index, String str) throws Exception {
+ pstmts.setInt(index, Integer.parseInt(str));
+ }
+
+ private void setStringNullable(PreparedStatement pstmts, int index, String str, Integer maxLen) throws Exception {
+ if(str.equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER) || str.isEmpty()){
+ pstmts.setString(index, "");
+ } else {
+ int byteLen = str.getBytes("utf-8").length;
+ if(byteLen>maxLen){
+ pstmts.setString(index, str.substring(0, str.length()-(byteLen-maxLen)));
+ } else {
+ pstmts.setString(index, str);
+ }
+ }
+ }
+
+ private void setString(PreparedStatement pstmts, int index, String str, Integer maxLen) throws Exception {
+ if(str.equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER) || str.isEmpty()){
+ throw new SQLException("字符串为空,如字符串可空,请使用setStringNullable函数");
+ } else {
+ int byteLen = str.getBytes("utf-8").length;
+ if(byteLen>maxLen){
+ pstmts.setString(index, str.substring(0, str.length()-(byteLen-maxLen)));
+ } else {
+ pstmts.setString(index, str);
+ }
+ }
+ }
+
+ @SuppressWarnings("unused")
+ private void setTimeStampNullable(PreparedStatement pstmts, int index, String str) throws Exception {
+ if (str.equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER) || str.isEmpty()) {
+ pstmts.setNull(index, Types.TIMESTAMP);
+ } else {
+ pstmts.setTimestamp(index, new Timestamp(Long.parseLong(str + "000")));
+ }
+ }
+
+ @SuppressWarnings("unused")
+ private void setTimeStamp(PreparedStatement pstmts, int index, String str) throws Exception {
+ if(str.equals(RealtimeCountConfig.EMPTY_OPTION_CHARACTER) || str.isEmpty()){
+ throw new SQLException("时间为空,如字符串可空,请使用setTimeStampNullable函数");
+ } else {
+ pstmts.setTimestamp(index, new Timestamp(Long.parseLong(str + "000")));
+ }
+ }
+
+ private void generateTimeStamp(PreparedStatement pstmts, int index) throws Exception {
+ pstmts.setTimestamp(index, new Timestamp(generateTimeWithInterval()));
+ }
+
+ public void dfPzBatchStorage(Map<String, Long> pzMap) {
+ String sql = " insert into DF_PZ_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, SERVICE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_PZ_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : pzMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setLong(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ pstmt.setLong(4, pzMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void dfServiceBatchStorage(Map<String, Long> serviceMap) {
+ String sql = " insert into DF_SERVICE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_SERVICE_REPORT.NEXTVAL, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : serviceMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ pstmt.setLong(3, serviceMap.get(key));
+ generateTimeStamp(pstmt, 4);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void dfTagBatchStorage(Map<String, Long> tagMap) {
+ String sql = " insert into DF_TAG_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, TAG, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_TAG_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : tagMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ pstmt.setLong(4, tagMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void dfSrcipDomesticBatchStorage(Map<String, Long> srcipMap) {
+ String sql = " insert into DF_SRCIP_DOMESTIC_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, SRC_PROVINCE, SRC_CITY, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_SRCIP_DOMESTIC_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : srcipMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setString(pstmt, 3, options[3], 256);
+ setStringNullable(pstmt, 4, options[4], 256);
+ pstmt.setLong(5, srcipMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void dfDestipCountryBatchStorage(Map<String, Long> srcipMap) {
+ String sql = " insert into DF_DESTIP_COUNTRY_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, DEST_COUNTRY, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_DESTIP_COUNTRY_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : srcipMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setString(pstmt, 3, options[3], 256);
+ pstmt.setLong(4, srcipMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void djPzBatchStorage(Map<String, Long> pzMap) {
+ String sql = " insert into DJ_PZ_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, SERVICE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_PZ_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : pzMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setLong(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ pstmt.setLong(4, pzMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfAttrTypeBatchStorage(Map<String, Long> attrTypeMap) {
+ String sql = " insert into DF_ATTR_TYPE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, ATTR_TYPE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_ATTR_TYPE_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : attrTypeMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ pstmt.setLong(4, attrTypeMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfEntranceBatchStorage(Map<String, Long> entranceMap) {
+ String sql = " insert into DF_ENTRANCE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, ENTRANCE_ID, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_ENTRANCE_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : entranceMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setLong(pstmt, 3, options[3]);
+ pstmt.setLong(4, entranceMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfLwhhBatchStorage(Map<String, Long> lwhhMap) {
+ String sql = " insert into DF_LWHH_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, LWHH, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_LWHH_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : lwhhMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ pstmt.setLong(4, lwhhMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfLwhhTypeBatchStorage(Map<String, Long> lwhhTypeMap) {
+ String sql = " insert into DF_LWHH_TYPE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, LWHH, ATTR_TYPE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_LWHH_TYPE_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : lwhhTypeMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, lwhhTypeMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfLwhhTagBatchStorage(Map<String, Long> lwhhTagMap) {
+ String sql = " insert into DF_LWHH_TAG_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, LWHH, TAG, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_LWHH_TAG_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : lwhhTagMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, lwhhTagMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfSrcipTypeBatchStorage(Map<String, Long> srcTypeMap) {
+ String sql = " insert into DF_SRCIP_DOMESTIC_TYPE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, SRC_PROVINCE, ATTR_TYPE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_SRCIP_TYPE_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : srcTypeMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setString(pstmt, 3, options[3], 256);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, srcTypeMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfSrcipTagBatchStorage(Map<String, Long> srcTagMap) {
+ String sql = " insert into DF_SRCIP_DOMESTIC_TAG_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, SRC_PROVINCE, TAG, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_SRCIP_TAG_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : srcTagMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setString(pstmt, 3, options[3], 256);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, srcTagMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfPzAttrBatchStorage(Map<String, Long> pzAttrMap) {
+ String sql = " insert into DF_PZ_ATTR_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, ATTR_TYPE, SERVICE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_PZ_ATTR_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : pzAttrMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setLong(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, pzAttrMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void dfPzTagBatchStorage(Map<String, Long> pzTagMap) {
+ String sql = " insert into DF_PZ_TAG_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, TAG, SERVICE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DF_PZ_TAG_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : pzTagMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setLong(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, pzTagMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void djAttrTypeBatchStorage(Map<String, Long> attrTypeMap) {
+ String sql = " insert into DJ_ATTR_TYPE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, ATTR_TYPE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_ATTR_TYPE_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : attrTypeMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ pstmt.setLong(4, attrTypeMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void djDestipCountryBatchStorage(Map<String, Long> srcipMap) {
+ String sql = " insert into DJ_DESTIP_COUNTRY_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, DEST_COUNTRY, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_DESTIP_COUNTRY_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : srcipMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setString(pstmt, 3, options[3], 256);
+ pstmt.setLong(4, srcipMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void djEntranceBatchStorage(Map<String, Long> entranceMap) {
+ String sql = " insert into DJ_ENTRANCE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, ENTRANCE_ID, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_ENTRANCE_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : entranceMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setLong(pstmt, 3, options[3]);
+ pstmt.setLong(4, entranceMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+
+ public void djLwhhBatchStorage(Map<String, Long> lwhhMap) {
+ String sql = " insert into DJ_LWHH_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, LWHH, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_LWHH_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : lwhhMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ pstmt.setLong(4, lwhhMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void djServiceBatchStorage(Map<String, Long> serviceMap) {
+ String sql = " insert into DJ_SERVICE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_SERVICE_REPORT.NEXTVAL, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : serviceMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ pstmt.setLong(3, serviceMap.get(key));
+ generateTimeStamp(pstmt, 4);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void djSrcipDomesticBatchStorage(Map<String, Long> srcipMap) {
+ String sql = " insert into DJ_SRCIP_DOMESTIC_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, SRC_PROVINCE, SRC_CITY, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_SRCIP_DOMESTIC_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : srcipMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setString(pstmt, 3, options[3], 256);
+ setStringNullable(pstmt, 4, options[4], 256);
+ pstmt.setLong(5, srcipMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void djTagBatchStorage(Map<String, Long> tagMap) {
+ String sql = " insert into DJ_TAG_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, TAG, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_TAG_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : tagMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ pstmt.setLong(4, tagMap.get(key));
+ generateTimeStamp(pstmt, 5);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+
+ public void djLwhhTypeBatchStorage(Map<String, Long> lwhhTypeMap) {
+ String sql = " insert into DJ_LWHH_TYPE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, LWHH, ATTR_TYPE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_LWHH_TYPE_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : lwhhTypeMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, lwhhTypeMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void djLwhhTagBatchStorage(Map<String, Long> lwhhTagMap) {
+ String sql = " insert into DJ_LWHH_TAG_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, LWHH, TAG, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_LWHH_TAG_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : lwhhTagMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, lwhhTagMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void djSrcipTypeBatchStorage(Map<String, Long> srcTypeMap) {
+ String sql = " insert into DJ_SRCIP_DOMESTIC_TYPE_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, SRC_PROVINCE, ATTR_TYPE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_SRCIP_TYPE_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : srcTypeMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setString(pstmt, 3, options[3], 256);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, srcTypeMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void djSrcipTagBatchStorage(Map<String, Long> srcTagMap) {
+ String sql = " insert into DJ_SRCIP_DOMESTIC_TAG_REPORT(STAT_ID, ACTIVE_SYS, SERVICE, SRC_PROVINCE, TAG, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_SRCIP_TAG_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : srcTagMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setInt(pstmt, 2, options[2]);
+ setString(pstmt, 3, options[3], 256);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, srcTagMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void djPzAttrBatchStorage(Map<String, Long> pzAttrMap) {
+ String sql = " insert into DJ_PZ_ATTR_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, ATTR_TYPE, SERVICE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_PZ_ATTR_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : pzAttrMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setLong(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, pzAttrMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+
+ public void djPzTagBatchStorage(Map<String, Long> pzTagMap) {
+ String sql = " insert into DJ_PZ_TAG_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, TAG, SERVICE, SUM, REPORT_TIME) " +
+ " VALUES(SEQ_DJ_PZ_TAG_REPORT.NEXTVAL, ?, ?, ?, ?, ?, ?)";
+
+ try {
+ connection = manager.getConnection("idb");
+ connection.setAutoCommit(false);
+ pstmt = connection.prepareStatement(sql);
+ } catch (Exception e) {
+ e.printStackTrace();
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ int nums = 0;
+ for (String key : pzTagMap.keySet()) {
+ try {
+ String[] options = key.split(RealtimeCountConfig.BETWEEN_BOLTS_SPLITTER);
+
+ setInt(pstmt, 1, options[1]);
+ setLong(pstmt, 2, options[2]);
+ setInt(pstmt, 3, options[3]);
+ setInt(pstmt, 4, options[4]);
+ pstmt.setLong(5, pzTagMap.get(key));
+ generateTimeStamp(pstmt, 6);
+
+ pstmt.addBatch();
+ nums++;
+ if(nums % RealtimeCountConfig.BATCH_INSERT_NUM == 0) {
+ pstmt.executeBatch();
+ connection.commit();
+ }
+ } catch (Exception e) {
+ logger.error("日志存在非法字段:"+key);
+ e.printStackTrace();
+ }
+ }
+ try {
+ pstmt.executeBatch();
+ connection.commit();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ manager.clear(pstmt, null, null);
+ if(connection!=null){
+ manager.freeConnection("idb", connection);
+ }
+ }
+ }
+}
diff --git a/src/main/java/cn/ac/iie/dao/DataBaseLoad.java b/src/main/java/cn/ac/iie/dao/DataBaseLoad.java
new file mode 100644
index 0000000..05b88b3
--- /dev/null
+++ b/src/main/java/cn/ac/iie/dao/DataBaseLoad.java
@@ -0,0 +1,166 @@
+package cn.ac.iie.dao;
+
+
+import cn.ac.iie.bean.ntc.NTC_CONN_RECORD_LOG;
+import cn.ac.iie.common.RealtimeCountConfig;
+import com.alibaba.fastjson.JSONObject;
+import org.apache.log4j.Logger;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.text.SimpleDateFormat;
+import java.util.LinkedList;
+import java.util.Map;
+
+
+/**
+ * 日志写入clickHouse insert类
+ *
+ * @author Administrator
+ * @create 2018-10-31 12:35
+ */
+public class DataBaseLoad {
+ private static final Logger logger = Logger.getLogger(DataBaseLoad.class);
+ private static DbConnect manger = DbConnect.getInstance();
+ private Connection connection;
+ private PreparedStatement pstm;
+
+ public DataBaseLoad() {
+ }
+
+ public void ntcKilledBatchStorage2CH(LinkedList<String> tmpList) {
+ String tableName = RealtimeCountConfig.TABLE_KILLED_NAME;
+ String sql = "INSERT INTO " + tableName + " VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
+ try {
+ connection = manger.getConnection();
+ connection.setAutoCommit(false);
+ pstm = connection.prepareStatement(sql);
+ int nums = 0;
+ for (String ntcLog : tmpList) {
+ NTC_CONN_RECORD_LOG ntcConnRecordLog = JSONObject.parseObject(ntcLog, NTC_CONN_RECORD_LOG.class);
+ pstm.setInt(1, ntcConnRecordLog.getCfg_id());
+ pstm.setInt(2, ntcConnRecordLog.getFound_time());
+ pstm.setInt(3, ntcConnRecordLog.getRecv_time());
+ pstm.setLong(4, ntcConnRecordLog.getOver_id());
+ pstm.setString(5, ntcConnRecordLog.getTrans_proto());
+ pstm.setString(6, ntcConnRecordLog.getD_ip());
+ pstm.setString(7, ntcConnRecordLog.getS_ip());
+ pstm.setInt(8, ntcConnRecordLog.getD_port());
+ pstm.setInt(9, ntcConnRecordLog.getS_port());
+ pstm.setString(10, ntcConnRecordLog.getNest_protocol());
+ pstm.setString(11, ntcConnRecordLog.getNest_server_ip());
+ pstm.setString(12, ntcConnRecordLog.getNest_client_ip());
+ pstm.setInt(13, ntcConnRecordLog.getNest_server_port());
+ pstm.setInt(14, ntcConnRecordLog.getNest_client_port());
+ pstm.setInt(15, ntcConnRecordLog.getService());
+ pstm.setInt(16, ntcConnRecordLog.getEntrance_id());
+ pstm.setString(17, ntcConnRecordLog.getCap_ip());
+ pstm.setString(18, ntcConnRecordLog.getScene_file());
+ pstm.setString(19, ntcConnRecordLog.getInjected_pkt_file());
+ pstm.setString(20, ntcConnRecordLog.getNest_addr_list());
+ pstm.setInt(21, ntcConnRecordLog.getAction());
+ pstm.setString(22, ntcConnRecordLog.getServer_locate());
+ pstm.setString(23, ntcConnRecordLog.getClient_locate());
+ pstm.setString(24, ntcConnRecordLog.getApp_label());
+ pstm.setLong(25, ntcConnRecordLog.getC2s_pkt_num());
+ pstm.setLong(26, ntcConnRecordLog.getS2c_pkt_num());
+ pstm.setLong(27, ntcConnRecordLog.getC2s_byte_num());
+ pstm.setLong(28, ntcConnRecordLog.getS2c_byte_num());
+
+ pstm.setString(29, ntcConnRecordLog.getUser_region());
+ pstm.setInt(30, ntcConnRecordLog.getStream_dir());
+ pstm.setString(31, ntcConnRecordLog.getAddr_list());
+ pstm.setInt(32, ntcConnRecordLog.getCreate_time());
+ pstm.setInt(33, ntcConnRecordLog.getLastmtime());
+
+ pstm.addBatch();
+ nums++;
+ if (nums >= RealtimeCountConfig.BATCH_CHINSERT_KILLED_NUM) {
+ pstm.executeBatch();
+ connection.commit();
+ nums = 0;
+ }
+ }
+ if (nums != 0) {
+ pstm.executeBatch();
+ connection.commit();
+ nums = 0;
+ }
+ } catch (Exception e) {
+ logger.error("+++++++++insert to " + RealtimeCountConfig.TABLE_KILLED_NAME + " Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manger.clear(pstm, connection);
+ }
+ }
+
+
+
+
+ public void dfPzFlowBatchStorage2CH(LinkedList<String> tmpList) {
+ String tableName = RealtimeCountConfig.TABLE_NAME;
+ String sql = "INSERT INTO " + tableName + " VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
+ try {
+ connection = manger.getConnection();
+ connection.setAutoCommit(false);
+ pstm = connection.prepareStatement(sql);
+ int nums = 0;
+ for (String ntcLog : tmpList) {
+ NTC_CONN_RECORD_LOG ntcConnRecordLog = JSONObject.parseObject(ntcLog, NTC_CONN_RECORD_LOG.class);
+ pstm.setInt(1, ntcConnRecordLog.getCfg_id());
+ pstm.setInt(2, ntcConnRecordLog.getFound_time());
+ pstm.setInt(3, ntcConnRecordLog.getRecv_time());
+ pstm.setLong(4, ntcConnRecordLog.getOver_id());
+ pstm.setString(5, ntcConnRecordLog.getTrans_proto());
+ pstm.setString(6, ntcConnRecordLog.getD_ip());
+ pstm.setString(7, ntcConnRecordLog.getS_ip());
+ pstm.setInt(8, ntcConnRecordLog.getD_port());
+ pstm.setInt(9, ntcConnRecordLog.getS_port());
+ pstm.setString(10, ntcConnRecordLog.getNest_protocol());
+ pstm.setString(11, ntcConnRecordLog.getNest_server_ip());
+ pstm.setString(12, ntcConnRecordLog.getNest_client_ip());
+ pstm.setInt(13, ntcConnRecordLog.getNest_server_port());
+ pstm.setInt(14, ntcConnRecordLog.getNest_client_port());
+ pstm.setInt(15, ntcConnRecordLog.getService());
+ pstm.setInt(16, ntcConnRecordLog.getEntrance_id());
+ pstm.setString(17, ntcConnRecordLog.getCap_ip());
+ pstm.setString(18, ntcConnRecordLog.getScene_file());
+ pstm.setString(19, ntcConnRecordLog.getInjected_pkt_file());
+ pstm.setString(20, ntcConnRecordLog.getNest_addr_list());
+ pstm.setInt(21, ntcConnRecordLog.getAction());
+ pstm.setString(22, ntcConnRecordLog.getServer_locate());
+ pstm.setString(23, ntcConnRecordLog.getClient_locate());
+ pstm.setString(24, ntcConnRecordLog.getApp_label());
+ pstm.setLong(25, ntcConnRecordLog.getC2s_pkt_num());
+ pstm.setLong(26, ntcConnRecordLog.getS2c_pkt_num());
+ pstm.setLong(27, ntcConnRecordLog.getC2s_byte_num());
+ pstm.setLong(28, ntcConnRecordLog.getS2c_byte_num());
+
+ pstm.setString(29, ntcConnRecordLog.getUser_region());
+ pstm.setInt(30, ntcConnRecordLog.getStream_dir());
+ pstm.setString(31, ntcConnRecordLog.getAddr_list());
+ pstm.setInt(32, ntcConnRecordLog.getCreate_time());
+ pstm.setInt(33, ntcConnRecordLog.getLastmtime());
+
+ pstm.addBatch();
+ nums++;
+ if (nums >= RealtimeCountConfig.BATCH_CHINSERT_NUM) {
+ pstm.executeBatch();
+ connection.commit();
+ nums = 0;
+ }
+ }
+ if (nums != 0) {
+ pstm.executeBatch();
+ connection.commit();
+ nums = 0;
+ }
+ } catch (Exception e) {
+ logger.error("+++++++++insert to " + RealtimeCountConfig.TABLE_NAME + " Log write failed!!!+++++++++");
+ e.printStackTrace();
+ } finally {
+ manger.clear(pstm, connection);
+ }
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/dao/DataBasePzBusiness.java b/src/main/java/cn/ac/iie/dao/DataBasePzBusiness.java
new file mode 100644
index 0000000..fd19201
--- /dev/null
+++ b/src/main/java/cn/ac/iie/dao/DataBasePzBusiness.java
@@ -0,0 +1,122 @@
+package cn.ac.iie.dao;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import org.apache.log4j.Logger;
+
+import cn.ac.iie.bean.PzTable;
+
+public final class DataBasePzBusiness {
+ private static final JdbcPzConnectionManager pzManager = JdbcPzConnectionManager.getInstance();
+ private static final Logger logger = Logger.getLogger(DataBasePzBusiness.class);
+ private Connection connection;
+ private Statement stmt = null;
+
+ public DataBasePzBusiness() {
+
+ }
+
+ public void getPzToMap(Long seq) {
+ String queryOptions = " COMPILE_ID, CONT_TYPE, ATTR_TYPE, CONT_LABEL, Task_id, Guarantee_ID, AFFAIR_ID, TOPIC_ID, IS_VALID, PROC_SEQ";
+ String sql = "select " + queryOptions + " from config_compile where PROC_SEQ > " + seq;
+ try {
+ connection = pzManager.getConnection("idb");
+ connection.setAutoCommit(false);
+ stmt = connection.createStatement();
+ } catch (Exception e) {
+ e.printStackTrace();
+ pzManager.clear(null, stmt, null);
+ if (connection != null) {
+ pzManager.freeConnection("idb", connection);
+ }
+ return;
+ }
+ try {
+ ResultSet rs = stmt.executeQuery(sql);
+ Long seqMax = seq;
+ while (rs.next()) {
+ try {
+ String sCOMPILE_ID = String.valueOf(rs.getLong(1));
+
+ Object oCONT_TYPE = rs.getObject(2);
+ if (oCONT_TYPE == null)
+ oCONT_TYPE = "";
+ String sCONT_TYPE = oCONT_TYPE.toString();
+ if (sCONT_TYPE == null || sCONT_TYPE.equals(""))
+ sCONT_TYPE = "-";
+
+ Object oATTR_TYPE = rs.getObject(3);
+ if (oATTR_TYPE == null)
+ oATTR_TYPE = "";
+ String sATTR_TYPE = oATTR_TYPE.toString();
+ if (sATTR_TYPE == null || sATTR_TYPE.equals(""))
+ sATTR_TYPE = "-";
+
+ Object oCONT_LABEL = rs.getObject(4);
+ if (oCONT_LABEL == null)
+ oCONT_LABEL = "";
+ String sCONT_LABEL = oCONT_LABEL.toString();
+ if (sCONT_LABEL == null || sCONT_LABEL.equals(""))
+ sCONT_LABEL = "-";
+
+ Object oTask_id = rs.getObject(5);
+ if (oTask_id == null)
+ oTask_id = "";
+ String sTask_id = oTask_id.toString();
+ if (sTask_id == null || sTask_id.equals(""))
+ sTask_id = "-";
+
+ Object oGuarantee_ID = rs.getObject(6);
+ if (oGuarantee_ID == null)
+ oGuarantee_ID = "";
+ String sGuarantee_ID = oGuarantee_ID.toString();
+ if (sGuarantee_ID == null || sGuarantee_ID.equals(""))
+ sGuarantee_ID = "-";
+
+ Object oAFFAIR_ID = rs.getObject(7);
+ if (oAFFAIR_ID == null)
+ oAFFAIR_ID = "";
+ String sAFFAIR_ID = oAFFAIR_ID.toString();
+ if (sAFFAIR_ID == null || sAFFAIR_ID.equals(""))
+ sAFFAIR_ID = "-";
+
+ Object oTOPIC_ID = rs.getObject(8);
+ if (oTOPIC_ID == null)
+ oTOPIC_ID = "";
+ String sTOPIC_ID = oTOPIC_ID.toString();
+ if (sTOPIC_ID == null || sTOPIC_ID.equals(""))
+ sTOPIC_ID = "-";
+
+ Integer iIS_VALID = rs.getInt(9);
+
+ Long lPROC_SEQ = rs.getLong(10);
+
+ if (lPROC_SEQ > seqMax) {
+ seqMax = lPROC_SEQ;
+ }
+
+ if (PzTable.seq == 0 && iIS_VALID == 0) {
+ continue;
+ }
+ String value = sCOMPILE_ID + "\t" + sCONT_TYPE + "\t" + sATTR_TYPE + "\t" + sCONT_LABEL + "\t"
+ + sTask_id + "\t" + sGuarantee_ID + "\t" + sAFFAIR_ID + "\t" + sTOPIC_ID + "\t" + iIS_VALID;
+ logger.info("put in map: "+value);
+ PzTable.pzMap.put(sCOMPILE_ID, value);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ PzTable.seq = seqMax;
+ logger.info("PZ UPDATE DONE.");
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ pzManager.clear(null, stmt, null);
+ if (connection != null) {
+ pzManager.freeConnection("idb", connection);
+ }
+ }
+ }
+}
diff --git a/src/main/java/cn/ac/iie/dao/DbConnect.java b/src/main/java/cn/ac/iie/dao/DbConnect.java
new file mode 100644
index 0000000..0635e04
--- /dev/null
+++ b/src/main/java/cn/ac/iie/dao/DbConnect.java
@@ -0,0 +1,102 @@
+package cn.ac.iie.dao;
+
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.druid.pool.DruidPooledConnection;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.Properties;
+
+/**
+ * Druid连接信息
+ *
+ * @author antlee
+ * @date 2018/8/20
+ */
+public class DbConnect {
+ private static DruidDataSource dataSource = null;
+ private static DbConnect dbConnect = null;
+ private static Properties props = new Properties();
+
+ static {
+ getDbConnect();
+ }
+
+ private static void getDbConnect() {
+ try {
+ if (dataSource == null) {
+ dataSource = new DruidDataSource();
+ props.load(DbConnect.class.getClassLoader().getResourceAsStream("clickhouse.properties"));
+ //设置连接参数
+ dataSource.setUrl("jdbc:clickhouse://" + props.getProperty("db.id"));
+ dataSource.setDriverClassName(props.getProperty("drivers"));
+ dataSource.setUsername(props.getProperty("mdb.user"));
+ dataSource.setPassword(props.getProperty("mdb.password"));
+ //配置初始化大小、最小、最大
+ dataSource.setInitialSize(Integer.parseInt(props.getProperty("initialsize")));
+ dataSource.setMinIdle(Integer.parseInt(props.getProperty("minidle")));
+ dataSource.setMaxActive(Integer.parseInt(props.getProperty("maxactive")));
+ //连接泄漏监测
+ dataSource.setRemoveAbandoned(true);
+ dataSource.setRemoveAbandonedTimeout(30);
+ dataSource.setDefaultAutoCommit(false);
+ //配置获取连接等待超时的时间
+ dataSource.setMaxWait(30000);
+ //配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+ dataSource.setTimeBetweenEvictionRunsMillis(2000);
+ //防止过期
+ dataSource.setValidationQuery("SELECT 1");
+ dataSource.setTestWhileIdle(true);
+ dataSource.setTestOnBorrow(true);
+ dataSource.setKeepAlive(true);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+
+ }
+ }
+
+ /**
+ * 数据库连接池单例
+ *
+ * @return dbConnect
+ */
+ public static synchronized DbConnect getInstance() {
+ if (null == dbConnect) {
+ dbConnect = new DbConnect();
+ }
+ return dbConnect;
+ }
+
+ /**
+ * 返回druid数据库连接
+ *
+ * @return 连接
+ * @throws SQLException sql异常
+ */
+ public DruidPooledConnection getConnection() throws SQLException {
+ return dataSource.getConnection();
+ }
+
+ /**
+ * 清空PreparedStatement、Connection对象,未定义的置空。
+ *
+ * @param pstmt PreparedStatement对象
+ * @param connection Connection对象
+ */
+ public void clear(PreparedStatement pstmt, Connection connection) {
+ try {
+ if (pstmt != null) {
+ pstmt.close();
+ }
+ if (connection != null) {
+ connection.close();
+ }
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+
+ }
+} \ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/dao/JdbcConnectionManager.java b/src/main/java/cn/ac/iie/dao/JdbcConnectionManager.java
new file mode 100644
index 0000000..ef369f7
--- /dev/null
+++ b/src/main/java/cn/ac/iie/dao/JdbcConnectionManager.java
@@ -0,0 +1,392 @@
+package cn.ac.iie.dao;
+
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Date;
+import java.util.Enumeration;
+import java.util.Hashtable;
+import java.util.Properties;
+import java.util.StringTokenizer;
+import java.util.Vector;
+
+import com.nis.exception.DaoException;
+
+
+
+
+
+/**
+ *
+ * <p>JDBC 连接管理类</p>
+ * <p>使用该类需要具备两个条件:
+ * <li>1.数据库驱动jar包</li>
+ * <li>2.在src目录下生成db.properties,具有如下内容
+ * drivers=* 数据库连接驱动类
+ * idb.url=* 数据库连接URL
+ * idb.maxconn=* 线程最大连接数
+ * idb.user=* 数据库连接用户名
+ * idb.password=* 数据库连接密码
+ * </li>
+ * </p>
+ * @author 中科智源育成信息有限公司 E-mail: [email protected]
+ * @version 1.0 创建时间:2010-11-8 下午04:54:15
+ *
+ */
+public final class JdbcConnectionManager {
+ private static final JdbcConnectionManager jdbcConnectionManager = new JdbcConnectionManager() ; // 唯一实例
+ private static int clients;
+ private Vector drivers = new Vector();
+ private Hashtable pools = new Hashtable();
+
+ //抑制默认的构造器,避免实例化对象
+ private JdbcConnectionManager(){
+ init();
+ }
+
+ /**
+ * 读取属性完成初始化
+ */
+ private void init() {
+
+ //InputStream is = getClass().getResourceAsStream("/db.properties");
+
+ Properties dbProps = new Properties();
+ try {
+ InputStream is = JdbcConnectionManager.class.getClassLoader().getResourceAsStream("db.properties");
+ //InputStream is = new FileInputStream(System.getProperty("user.dir")+File.separator+"config"+File.separator+"db.properties");
+ dbProps.load(is);
+ System.out.println("读取数据成功!");
+ } catch (Exception e) {
+ System.err.println("不能读取属性文件. "
+ + "请确保db.properties在CLASSPATH指定的路径中");
+ throw new RuntimeException(new FileNotFoundException("unknow db.properties"));
+ }
+ loadDrivers(dbProps);
+ createPools(dbProps);
+ }
+
+ /**
+ * 装载和注册所有JDBC驱动程序
+ *
+ * @param props
+ * 属性
+ */
+ private void loadDrivers(Properties props) {
+ String driverClasses = props.getProperty("drivers");
+ StringTokenizer st = new StringTokenizer(driverClasses);
+ while (st.hasMoreElements()) {
+ String driverClassName = st.nextToken().trim();
+
+ try {
+
+ Driver driver = (Driver) Class.forName(driverClassName)
+ .newInstance();
+ DriverManager.registerDriver(driver);
+ drivers.addElement(driver);
+ } catch (Exception e) {
+ System.out.println("无法装载驱动,异常信息:" + e.getMessage());
+ }
+ }
+ }
+
+ /**
+ * 根据指定属性创建连接池实例.
+ *
+ * @param props
+ * 连接池属性
+ */
+ private void createPools(Properties props) {
+ Enumeration propNames = props.propertyNames();
+ while (propNames.hasMoreElements()) {
+ String name = (String) propNames.nextElement();
+ if (name.endsWith(".url")) {
+ String poolName = name.substring(0, name.lastIndexOf("."));
+ String url = props.getProperty(poolName + ".url");
+ if (url == null) {
+ System.out.println("没有为连接池" + poolName + "指定URL");
+ continue;
+ }
+ String user = props.getProperty(poolName + ".user");
+ String password = props.getProperty(poolName + ".password");
+ String maxconn = props.getProperty(poolName + ".maxconn", "0");
+
+ int max;
+ try {
+ max = Integer.valueOf(maxconn).intValue();
+ } catch (NumberFormatException e) {
+ System.out.println("错误的最大连接数限制: " + maxconn + " .连接池: "
+ + poolName);
+ max = 0;
+ }
+ DBConnectionPool pool = new DBConnectionPool(poolName, url,
+ user, password, max);
+ pools.put(poolName, pool);
+
+ }
+ }
+ }
+
+
+ /**
+ * 返回singleton实例.如果是第一次调用此方法,则创建实例
+ *
+ * @return JdbcConnectionManager 唯一实例
+ */
+ public static synchronized JdbcConnectionManager getInstance() {
+ clients++;
+ return jdbcConnectionManager;
+ }
+
+
+
+ /**
+ * 将连接对象返回给由名字指定的连接池
+ *
+ * @param name
+ * 在属性文件中定义的连接池名字
+ * @param conn
+ * 连接对象
+ */
+ public void freeConnection(String name, Connection conn) {
+ DBConnectionPool pool = (DBConnectionPool) pools.get(name);
+ if (pool != null) {
+ pool.freeConnection(conn);
+ }
+ }
+
+ /**
+ * 获得一个可用的(空闲的)连接.如果没有可用连接,且已有连接数小于最大连接数 限制,则创建并返回新连接
+ *
+ * @param name
+ * 在属性文件中定义的连接池名字
+ * @return Connection 可用连接或null
+ */
+ public Connection getConnection(String name) throws DaoException{
+ DBConnectionPool pool = (DBConnectionPool) pools.get(name);
+ if (pool != null) {
+ return pool.getConnection();
+ }
+ return null;
+ }
+
+ /**
+ * 获得一个可用连接.若没有可用连接,且已有连接数小于最大连接数限制, 则创建并返回新连接.否则,在指定的时间内等待其它线程释放连接.
+ *
+ * @param name
+ * 连接池名字
+ * @param time
+ * 以毫秒计的等待时间
+ * @return Connection 可用连接或null
+ */
+ public Connection getConnection(String name, long time)throws DaoException {
+ DBConnectionPool pool = (DBConnectionPool) pools.get(name);
+ if (pool != null) {
+ return pool.getConnection(time);
+ }
+ return null;
+ }
+
+ //返回多少个连接
+ private int getClient() {
+ return clients;
+ }
+
+ /**
+ * 关闭所有连接,撤销驱动程序的注册
+ */
+ public synchronized void release() {
+ // 等待直到最后一个客户程序调用
+ if (--clients != 0) {
+ return;
+ }
+
+ Enumeration allPools = pools.elements();
+ while (allPools.hasMoreElements()) {
+ DBConnectionPool pool = (DBConnectionPool) allPools.nextElement();
+ pool.release();
+ }
+
+ Enumeration allDrivers = drivers.elements();
+ while (allDrivers.hasMoreElements()) {
+ Driver driver = (Driver) allDrivers.nextElement();
+ try {
+ DriverManager.deregisterDriver(driver);
+ // System.out.println("撤销JDBC驱动程序 " + driver.getClass().getName()
+ // + "的注册");
+ } catch (SQLException e) {
+ System.out.println("无法撤销下列JDBC驱动程序的注册: "
+ + driver.getClass().getName() + "。错误信息:"
+ + e.getMessage());
+ }
+ }
+ }
+
+ /**
+ *
+ * 清空PreparedStatement、Statement、ResultSet对象,未定义的置空。
+ * @param pstmt PreparedStatement对象
+ * @param stmt Statement对象
+ * @param rs ResultSet对象
+ */
+ public void clear(PreparedStatement pstmt,Statement stmt,ResultSet rs) {
+ try {
+ if (stmt != null) {
+ stmt.close();
+ }
+ if (rs != null) {
+ rs.close();
+ }
+ if (pstmt != null) {
+ pstmt.close();
+ }
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+
+ }
+
+
+ /**
+ * 此内部类定义了一个连接池.它能够根据要求创建新连接,直到预定的最大连接数为止.在返回连接给客户程序之前,它能够验证连接的有效性.
+ */
+ class DBConnectionPool {
+ private int checkedOut;
+ private Vector freeConnections = new Vector();
+ private int maxConn;
+ private String name;
+ private String password;
+ private String URL;
+ private String user;
+
+ /**
+ * 创建新的连接池
+ *
+ * @param name
+ * 连接池名字
+ * @param URL
+ * 数据库的JDBC URL
+ * @param user
+ * 数据库帐号,或 null
+ * @param password
+ * 密码,或 null
+ * @param maxConn
+ * 此连接池允许建立的最大连接数
+ */
+ public DBConnectionPool(String name, String URL, String user,
+ String password, int maxConn) {
+ this.name = name;
+ this.URL = URL;
+ this.user = user;
+ this.password = password;
+ this.maxConn = maxConn;
+ }
+
+ /**
+ * 将不再使用的连接返回给连接池
+ *
+ * @param con
+ * 客户程序释放的连接
+ */
+ public synchronized void freeConnection(Connection con) {
+ // 将指定连接加入到向量末尾
+ freeConnections.addElement(con);
+ checkedOut--;
+ //release();
+ notifyAll();
+ }
+
+ /**
+ * 从连接池获得一个可用连接.如没有空闲的连接且当前连接数小于最大连接 数限制,则创建新连接.如原来登记为可用的连接不再有效,则从向量删除之,
+ * 然后递归调用自己以尝试新的可用连接.
+ */
+ public synchronized Connection getConnection() throws DaoException{
+ Connection con = null;
+ if (freeConnections.size() > 0) {
+ // 获取向量中第一个可用连接
+ con = (Connection) freeConnections.firstElement();
+ freeConnections.removeElementAt(0);
+ try {
+ if (con.isClosed()) {
+ // 递归调用自己,尝试再次获取可用连接
+ con = getConnection();
+ }
+ } catch (SQLException e) {
+ // 递归调用自己,尝试再次获取可用连接
+ con = getConnection();
+ }
+ } else if (maxConn == 0 || checkedOut < maxConn) {
+ con = newConnection();
+ }
+ if (con != null) {
+ checkedOut++;
+ }
+ return con;
+ }
+
+ /**
+ * 从连接池获取可用连接.可以指定客户程序能够等待的最长时间 参见前一个getConnection()方法.
+ *
+ * @param timeout
+ * 以毫秒计的等待时间限制
+ */
+ public synchronized Connection getConnection(long timeout) throws DaoException {
+ long startTime = new Date().getTime();
+ Connection con;
+ while ((con = getConnection()) == null) {
+ try {
+ wait(timeout);
+ } catch (InterruptedException e) {
+ }
+ if ((new Date().getTime() - startTime) >= timeout) {
+ // wait()返回的原因是超时
+ return null;
+ }
+ }
+ return con;
+ }
+
+ /**
+ * 关闭所有连接
+ */
+ public synchronized void release() {
+ Enumeration allConnections = freeConnections.elements();
+ while (allConnections.hasMoreElements()) {
+ Connection con = (Connection) allConnections.nextElement();
+ try {
+ con.close();
+ } catch (SQLException e) {
+ System.out.println("无法关闭连接池" + name + "中的连接,错误信息:"
+ + e.getMessage());
+ }
+ }
+ freeConnections.removeAllElements();
+ }
+
+ /**
+ * 创建新的连接
+ */
+ private Connection newConnection() throws DaoException {
+ Connection con = null;
+ try {
+ if (user == null) {
+ con = DriverManager.getConnection(URL);
+ } else {
+ con = DriverManager.getConnection(URL, user, password);
+ }
+ System.out.println("连接池" + name + "创建一个新的连接");
+ } catch (SQLException e) {
+ throw new DaoException("无法创建下列URL的连接:" + URL + "\n错误信息:"+ e.getMessage());
+ }
+ //freeConnections.addElement(con);
+ return con;
+ }
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/dao/JdbcPzConnectionManager.java b/src/main/java/cn/ac/iie/dao/JdbcPzConnectionManager.java
new file mode 100644
index 0000000..f35d5d3
--- /dev/null
+++ b/src/main/java/cn/ac/iie/dao/JdbcPzConnectionManager.java
@@ -0,0 +1,392 @@
+package cn.ac.iie.dao;
+
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Date;
+import java.util.Enumeration;
+import java.util.Hashtable;
+import java.util.Properties;
+import java.util.StringTokenizer;
+import java.util.Vector;
+
+import com.nis.exception.DaoException;
+
+
+
+
+
+/**
+ *
+ * <p>JDBC 连接管理类</p>
+ * <p>使用该类需要具备两个条件:
+ * <li>1.数据库驱动jar包</li>
+ * <li>2.在src目录下生成db.properties,具有如下内容
+ * drivers=* 数据库连接驱动类
+ * idb.url=* 数据库连接URL
+ * idb.maxconn=* 线程最大连接数
+ * idb.user=* 数据库连接用户名
+ * idb.password=* 数据库连接密码
+ * </li>
+ * </p>
+ * @author 中科智源育成信息有限公司 E-mail: [email protected]
+ * @version 1.0 创建时间:2010-11-8 下午04:54:15
+ *
+ */
+public final class JdbcPzConnectionManager {
+ private static final JdbcPzConnectionManager jdbcPzConnectionManager = new JdbcPzConnectionManager() ; // 唯一实例
+ private static int clients;
+ private Vector drivers = new Vector();
+ private Hashtable pools = new Hashtable();
+
+ //抑制默认的构造器,避免实例化对象
+ private JdbcPzConnectionManager(){
+ init();
+ }
+
+ /**
+ * 读取属性完成初始化
+ */
+ private void init() {
+
+ //InputStream is = getClass().getResourceAsStream("/db.properties");
+
+ Properties dbProps = new Properties();
+ try {
+ InputStream is = JdbcPzConnectionManager.class.getClassLoader().getResourceAsStream("db_pz.properties");
+ //InputStream is = new FileInputStream(System.getProperty("user.dir")+File.separator+"config"+File.separator+"db.properties");
+ dbProps.load(is);
+ System.out.println("db_pz.properties读取数据成功!");
+ } catch (Exception e) {
+ System.err.println("不能读取属性文件. "
+ + "请确保db_pz.properties在CLASSPATH指定的路径中");
+ throw new RuntimeException(new FileNotFoundException("unknow db_pz.properties"));
+ }
+ loadDrivers(dbProps);
+ createPools(dbProps);
+ }
+
+ /**
+ * 装载和注册所有JDBC驱动程序
+ *
+ * @param props
+ * 属性
+ */
+ private void loadDrivers(Properties props) {
+ String driverClasses = props.getProperty("drivers");
+ StringTokenizer st = new StringTokenizer(driverClasses);
+ while (st.hasMoreElements()) {
+ String driverClassName = st.nextToken().trim();
+
+ try {
+
+ Driver driver = (Driver) Class.forName(driverClassName)
+ .newInstance();
+ DriverManager.registerDriver(driver);
+ drivers.addElement(driver);
+ } catch (Exception e) {
+ System.out.println("无法装载驱动,异常信息:" + e.getMessage());
+ }
+ }
+ }
+
+ /**
+ * 根据指定属性创建连接池实例.
+ *
+ * @param props
+ * 连接池属性
+ */
+ private void createPools(Properties props) {
+ Enumeration propNames = props.propertyNames();
+ while (propNames.hasMoreElements()) {
+ String name = (String) propNames.nextElement();
+ if (name.endsWith(".url")) {
+ String poolName = name.substring(0, name.lastIndexOf("."));
+ String url = props.getProperty(poolName + ".url");
+ if (url == null) {
+ System.out.println("没有为连接池" + poolName + "指定URL");
+ continue;
+ }
+ String user = props.getProperty(poolName + ".user");
+ String password = props.getProperty(poolName + ".password");
+ String maxconn = props.getProperty(poolName + ".maxconn", "0");
+
+ int max;
+ try {
+ max = Integer.valueOf(maxconn).intValue();
+ } catch (NumberFormatException e) {
+ System.out.println("错误的最大连接数限制: " + maxconn + " .连接池: "
+ + poolName);
+ max = 0;
+ }
+ DBConnectionPool pool = new DBConnectionPool(poolName, url,
+ user, password, max);
+ pools.put(poolName, pool);
+
+ }
+ }
+ }
+
+
+ /**
+ * 返回singleton实例.如果是第一次调用此方法,则创建实例
+ *
+ * @return JdbcConnectionManager 唯一实例
+ */
+ public static synchronized JdbcPzConnectionManager getInstance() {
+ clients++;
+ return jdbcPzConnectionManager;
+ }
+
+
+
+ /**
+ * 将连接对象返回给由名字指定的连接池
+ *
+ * @param name
+ * 在属性文件中定义的连接池名字
+ * @param conn
+ * 连接对象
+ */
+ public void freeConnection(String name, Connection conn) {
+ DBConnectionPool pool = (DBConnectionPool) pools.get(name);
+ if (pool != null) {
+ pool.freeConnection(conn);
+ }
+ }
+
+ /**
+ * 获得一个可用的(空闲的)连接.如果没有可用连接,且已有连接数小于最大连接数 限制,则创建并返回新连接
+ *
+ * @param name
+ * 在属性文件中定义的连接池名字
+ * @return Connection 可用连接或null
+ */
+ public Connection getConnection(String name) throws DaoException{
+ DBConnectionPool pool = (DBConnectionPool) pools.get(name);
+ if (pool != null) {
+ return pool.getConnection();
+ }
+ return null;
+ }
+
+ /**
+ * 获得一个可用连接.若没有可用连接,且已有连接数小于最大连接数限制, 则创建并返回新连接.否则,在指定的时间内等待其它线程释放连接.
+ *
+ * @param name
+ * 连接池名字
+ * @param time
+ * 以毫秒计的等待时间
+ * @return Connection 可用连接或null
+ */
+ public Connection getConnection(String name, long time)throws DaoException {
+ DBConnectionPool pool = (DBConnectionPool) pools.get(name);
+ if (pool != null) {
+ return pool.getConnection(time);
+ }
+ return null;
+ }
+
+ //返回多少个连接
+ private int getClient() {
+ return clients;
+ }
+
+ /**
+ * 关闭所有连接,撤销驱动程序的注册
+ */
+ public synchronized void release() {
+ // 等待直到最后一个客户程序调用
+ if (--clients != 0) {
+ return;
+ }
+
+ Enumeration allPools = pools.elements();
+ while (allPools.hasMoreElements()) {
+ DBConnectionPool pool = (DBConnectionPool) allPools.nextElement();
+ pool.release();
+ }
+
+ Enumeration allDrivers = drivers.elements();
+ while (allDrivers.hasMoreElements()) {
+ Driver driver = (Driver) allDrivers.nextElement();
+ try {
+ DriverManager.deregisterDriver(driver);
+ // System.out.println("撤销JDBC驱动程序 " + driver.getClass().getName()
+ // + "的注册");
+ } catch (SQLException e) {
+ System.out.println("无法撤销下列JDBC驱动程序的注册: "
+ + driver.getClass().getName() + "。错误信息:"
+ + e.getMessage());
+ }
+ }
+ }
+
+ /**
+ *
+ * 清空PreparedStatement、Statement、ResultSet对象,未定义的置空。
+ * @param pstmt PreparedStatement对象
+ * @param stmt Statement对象
+ * @param rs ResultSet对象
+ */
+ public void clear(PreparedStatement pstmt,Statement stmt,ResultSet rs) {
+ try {
+ if (stmt != null) {
+ stmt.close();
+ }
+ if (rs != null) {
+ rs.close();
+ }
+ if (pstmt != null) {
+ pstmt.close();
+ }
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+
+ }
+
+
+ /**
+ * 此内部类定义了一个连接池.它能够根据要求创建新连接,直到预定的最大连接数为止.在返回连接给客户程序之前,它能够验证连接的有效性.
+ */
+ class DBConnectionPool {
+ private int checkedOut;
+ private Vector freeConnections = new Vector();
+ private int maxConn;
+ private String name;
+ private String password;
+ private String URL;
+ private String user;
+
+ /**
+ * 创建新的连接池
+ *
+ * @param name
+ * 连接池名字
+ * @param URL
+ * 数据库的JDBC URL
+ * @param user
+ * 数据库帐号,或 null
+ * @param password
+ * 密码,或 null
+ * @param maxConn
+ * 此连接池允许建立的最大连接数
+ */
+ public DBConnectionPool(String name, String URL, String user,
+ String password, int maxConn) {
+ this.name = name;
+ this.URL = URL;
+ this.user = user;
+ this.password = password;
+ this.maxConn = maxConn;
+ }
+
+ /**
+ * 将不再使用的连接返回给连接池
+ *
+ * @param con
+ * 客户程序释放的连接
+ */
+ public synchronized void freeConnection(Connection con) {
+ // 将指定连接加入到向量末尾
+ freeConnections.addElement(con);
+ checkedOut--;
+ //release();
+ notifyAll();
+ }
+
+ /**
+ * 从连接池获得一个可用连接.如没有空闲的连接且当前连接数小于最大连接 数限制,则创建新连接.如原来登记为可用的连接不再有效,则从向量删除之,
+ * 然后递归调用自己以尝试新的可用连接.
+ */
+ public synchronized Connection getConnection() throws DaoException{
+ Connection con = null;
+ if (freeConnections.size() > 0) {
+ // 获取向量中第一个可用连接
+ con = (Connection) freeConnections.firstElement();
+ freeConnections.removeElementAt(0);
+ try {
+ if (con.isClosed()) {
+ // 递归调用自己,尝试再次获取可用连接
+ con = getConnection();
+ }
+ } catch (SQLException e) {
+ // 递归调用自己,尝试再次获取可用连接
+ con = getConnection();
+ }
+ } else if (maxConn == 0 || checkedOut < maxConn) {
+ con = newConnection();
+ }
+ if (con != null) {
+ checkedOut++;
+ }
+ return con;
+ }
+
+ /**
+ * 从连接池获取可用连接.可以指定客户程序能够等待的最长时间 参见前一个getConnection()方法.
+ *
+ * @param timeout
+ * 以毫秒计的等待时间限制
+ */
+ public synchronized Connection getConnection(long timeout) throws DaoException {
+ long startTime = new Date().getTime();
+ Connection con;
+ while ((con = getConnection()) == null) {
+ try {
+ wait(timeout);
+ } catch (InterruptedException e) {
+ }
+ if ((new Date().getTime() - startTime) >= timeout) {
+ // wait()返回的原因是超时
+ return null;
+ }
+ }
+ return con;
+ }
+
+ /**
+ * 关闭所有连接
+ */
+ public synchronized void release() {
+ Enumeration allConnections = freeConnections.elements();
+ while (allConnections.hasMoreElements()) {
+ Connection con = (Connection) allConnections.nextElement();
+ try {
+ con.close();
+ } catch (SQLException e) {
+ System.out.println("无法关闭连接池" + name + "中的连接,错误信息:"
+ + e.getMessage());
+ }
+ }
+ freeConnections.removeAllElements();
+ }
+
+ /**
+ * 创建新的连接
+ */
+ private Connection newConnection() throws DaoException {
+ Connection con = null;
+ try {
+ if (user == null) {
+ con = DriverManager.getConnection(URL);
+ } else {
+ con = DriverManager.getConnection(URL, user, password);
+ }
+ System.out.println("连接池" + name + "创建一个新的连接");
+ } catch (SQLException e) {
+ throw new DaoException("无法创建下列URL的连接:" + URL + "\n错误信息:"+ e.getMessage());
+ }
+ //freeConnections.addElement(con);
+ return con;
+ }
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/dao/KafkaDB.java b/src/main/java/cn/ac/iie/dao/KafkaDB.java
new file mode 100644
index 0000000..4d15449
--- /dev/null
+++ b/src/main/java/cn/ac/iie/dao/KafkaDB.java
@@ -0,0 +1,81 @@
+package cn.ac.iie.dao;
+
+import cn.ac.iie.bean.voipSipFromToLog.RouteRelationLog;
+import cn.ac.iie.bean.voipSipOrigin.SipOriginALL;
+import cn.ac.iie.common.RealtimeCountConfig;
+import com.alibaba.fastjson.JSONObject;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.log4j.Logger;
+
+import java.text.SimpleDateFormat;
+import java.util.LinkedList;
+import java.util.Properties;
+
+/**
+ * 发送数据的kafka工具类
+ */
+public class KafkaDB {
+ private static Logger logger = Logger.getLogger(KafkaDB.class);
+
+ private static Producer<String, String> producer;
+
+ private static KafkaDB kafkaDB;
+
+ private KafkaDB() {
+ getProducer();
+ }
+
+ public static KafkaDB getInstance() {
+ if (kafkaDB == null) {
+ kafkaDB = new KafkaDB();
+ }
+ return kafkaDB;
+ }
+
+ public void siporiLog2KafkaFromSipInsertBoltDC(LinkedList<String> sipOriJsonS) {
+ long time = System.currentTimeMillis() / 1000L;
+ for (String sipOriJson : sipOriJsonS) {
+ try {
+ SipOriginALL sipOriginLog = JSONObject.parseObject(sipOriJson, SipOriginALL.class);
+ sipOriginLog.setStat_time(time);
+ producer.send(new ProducerRecord<>(RealtimeCountConfig.KAFKA_SIP_COMPLEMENT_TOPIC, JSONObject.toJSONString(sipOriginLog)));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ producer.flush();
+ }
+
+ public void routeRelatLog2KafkaFromSipInsertBoltDC(LinkedList<String> routeJsonS) {
+ long time = System.currentTimeMillis() / 1000L;
+ for (String routeJson : routeJsonS) {
+ try {
+ RouteRelationLog routeRelationLog = JSONObject.parseObject(routeJson, RouteRelationLog.class);
+ routeRelationLog.setTimestamp(time);
+ producer.send(new ProducerRecord<>(RealtimeCountConfig.KAFKA_ROUTE_RELATION_TOPIC, JSONObject.toJSONString(routeRelationLog)));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ producer.flush();
+ }
+
+ /**
+ * 根据kafka生产者配置信息初始化kafka消息生产者,只初始化一次
+ */
+ private void getProducer() {
+ Properties properties = new Properties();
+ properties.put("bootstrap.servers", RealtimeCountConfig.BOOTSTRAP_OUTPUT_SERVERS);
+ properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+ properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+ properties.put("acks", "1");
+ properties.put("linger.ms", "2");
+ properties.put("request.timeout.ms", 20000);
+ properties.put("batch.size", 262144);
+ properties.put("buffer.memory", 33554432);
+ producer = new KafkaProducer<String, String>(properties);
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/spout/CustomizedKafkaSpout.java b/src/main/java/cn/ac/iie/spout/CustomizedKafkaSpout.java
new file mode 100644
index 0000000..bddce06
--- /dev/null
+++ b/src/main/java/cn/ac/iie/spout/CustomizedKafkaSpout.java
@@ -0,0 +1,66 @@
+package cn.ac.iie.spout;
+
+import cn.ac.iie.common.RealtimeCountConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.log4j.Logger;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Properties;
+
+public class CustomizedKafkaSpout extends BaseRichSpout{
+ private static final long serialVersionUID = -3363788553406229592L;
+ private KafkaConsumer<String, String> consumer;
+ private SpoutOutputCollector collector = null;
+ private TopologyContext context = null;
+ private final static Logger logger = Logger.getLogger(CustomizedKafkaSpout.class);
+
+
+ private static Properties createConsumerConfig() {
+ Properties props = new Properties();
+ props.put("bootstrap.servers", RealtimeCountConfig.BOOTSTRAP_SERVERS);
+ props.put("group.id", RealtimeCountConfig.GROUP_ID_PREFIX+"-rc-"+ RealtimeCountConfig.GROUP_ID);
+ props.put("auto.offset.reset", RealtimeCountConfig.AUTO_OFFSET_RESET);
+ props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+ props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+ return props;
+ }
+
+ @Override
+ public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+ // TODO Auto-generated method stub
+ this.collector=collector;
+ this.context=context;
+ Properties prop = createConsumerConfig();
+ this.consumer = new KafkaConsumer<>(prop);
+ this.consumer.subscribe(Arrays.asList(RealtimeCountConfig.KAFKA_TOPIC));
+ }
+
+ @Override
+ public void close() {
+ consumer.close();
+ }
+
+ @Override
+ public void nextTuple() {
+ // TODO Auto-generated method stub
+ ConsumerRecords<String, String> records = consumer.poll(10000L);
+ for (ConsumerRecord<String, String> record : records) {
+ this.collector.emit(new Values(record.value()));
+ }
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ // TODO Auto-generated method stub
+ declarer.declare(new Fields("source"));
+ }
+}
diff --git a/src/main/java/cn/ac/iie/spout/sip/SIP_ORIGIN_ALL_KafkaSpout.java b/src/main/java/cn/ac/iie/spout/sip/SIP_ORIGIN_ALL_KafkaSpout.java
new file mode 100644
index 0000000..c267533
--- /dev/null
+++ b/src/main/java/cn/ac/iie/spout/sip/SIP_ORIGIN_ALL_KafkaSpout.java
@@ -0,0 +1,79 @@
+package cn.ac.iie.spout.sip;
+
+import cn.ac.iie.bean.voipSipOrigin.SipOriginALL;
+import cn.ac.iie.common.RealtimeCountConfig;
+import com.alibaba.fastjson.JSONObject;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.log4j.Logger;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Properties;
+
+public class SIP_ORIGIN_ALL_KafkaSpout extends BaseRichSpout {
+ private static final long serialVersionUID = -3363788553406229592L;
+ private KafkaConsumer<String, String> consumer;
+ private SpoutOutputCollector collector = null;
+ private TopologyContext context = null;
+ private final static Logger logger = Logger.getLogger(SIP_ORIGIN_ALL_KafkaSpout.class);
+
+ private static Properties createConsumerConfig() {
+ Properties props = new Properties();
+ props.put("bootstrap.servers", RealtimeCountConfig.BOOTSTRAP_SERVERS);
+ props.put("group.id", RealtimeCountConfig.GROUP_ID_PREFIX + "-sip-" + RealtimeCountConfig.GROUP_ID_SUFFIX);
+ props.put("fetch.max.bytes", RealtimeCountConfig.FETCH_MAX_BYTES);//默认52428800
+ props.put("max.partition.fetch.bytes", RealtimeCountConfig.MAX_PARTITION_FETCH_BYTES);//默认1048576
+ props.put("max.poll.interval.ms", RealtimeCountConfig.MAX_POLL_INTERVAL_MS);//默认300000
+ props.put("max.poll.records", RealtimeCountConfig.MAX_POLL_RECORDS);//默认500
+ props.put("session.timeout.ms", RealtimeCountConfig.SESSION_TIMEOUT_MS);//默认10000
+ props.put("auto.offset.reset", RealtimeCountConfig.AUTO_OFFSET_RESET);
+ props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+ props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+ return props;
+ }
+
+ @Override
+ public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+ // TODO Auto-generated method stub
+ this.collector = collector;
+ this.context = context;
+ Properties prop = createConsumerConfig();
+ this.consumer = new KafkaConsumer<>(prop);
+ this.consumer.subscribe(Arrays.asList(RealtimeCountConfig.KAFKA_SIP_ORIGIN_TOPIC));
+ }
+
+ @Override
+ public void close() {
+ consumer.close();
+ }
+
+ @Override
+ public void nextTuple() {
+ // TODO Auto-generated method stub
+ try {
+ ConsumerRecords<String, String> records = consumer.poll(10000L);
+ for (ConsumerRecord<String, String> record : records) {
+ SipOriginALL sipOriginLogDisable = JSONObject.parseObject(record.value(), SipOriginALL.class);
+ if (!("OPTIONS".equals(sipOriginLogDisable.getMethod()))) {
+ this.collector.emit(new Values(record.value(), RealtimeCountConfig.KAFKA_SIP_ORIGIN_TOPIC));
+ }
+ }
+ } catch (Exception e) {
+ logger.error("SIP_ORIGIN_ALL_KafkaSpout-->filter OPTIONS error ---> " + e + " <---");
+ }
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ // TODO Auto-generated method stub
+ declarer.declare(new Fields("source", "logtype"));
+ }
+}
diff --git a/src/main/java/cn/ac/iie/topology/LogRealtimeCountTopology.java b/src/main/java/cn/ac/iie/topology/LogRealtimeCountTopology.java
new file mode 100644
index 0000000..73c7888
--- /dev/null
+++ b/src/main/java/cn/ac/iie/topology/LogRealtimeCountTopology.java
@@ -0,0 +1,94 @@
+package cn.ac.iie.topology;
+
+import cn.ac.iie.bolt.*;
+import cn.ac.iie.common.RealtimeCountConfig;
+import cn.ac.iie.spout.sip.SIP_ORIGIN_ALL_KafkaSpout;
+import org.apache.log4j.Logger;
+import org.apache.storm.Config;
+import org.apache.storm.generated.AlreadyAliveException;
+import org.apache.storm.generated.AuthorizationException;
+import org.apache.storm.generated.InvalidTopologyException;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.tuple.Fields;
+
+public class LogRealtimeCountTopology {
+ private static Logger logger = Logger.getLogger(LogRealtimeCountTopology.class);
+ private final String topologyName;
+ private final Config topologyConfig;
+ private TopologyBuilder builder;
+
+ public LogRealtimeCountTopology() {
+ this(LogRealtimeCountTopology.class.getSimpleName());
+ }
+
+ public LogRealtimeCountTopology(String topologyName) {
+ this.topologyName = topologyName;
+ topologyConfig = createTopologConfig();
+ }
+
+ private Config createTopologConfig() {
+ Config conf = new Config();
+ conf.setDebug(false);
+ conf.setMessageTimeoutSecs(120);
+ conf.setMaxSpoutPending(RealtimeCountConfig.TOPOLOGY_CONFIG_MAX_SPOUT_PENDING);
+ if (RealtimeCountConfig.TOPOLOGY_NUM_ACKS == 0) {
+ conf.setNumAckers(0);
+ }
+ return conf;
+ }
+
+ public void runLocally() throws InterruptedException {
+ topologyConfig.setMaxTaskParallelism(1);
+ StormRunner.runTopologyLocally(builder, topologyName, topologyConfig, 600);
+ }
+
+ public void runRemotely() throws AlreadyAliveException, InvalidTopologyException, AuthorizationException {
+ topologyConfig.setNumWorkers(RealtimeCountConfig.TOPOLOGY_WORKERS);
+ topologyConfig.put(Config.TOPOLOGY_TRANSFER_BUFFER_SIZE, 32);
+ topologyConfig.put(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE, 16384);//老版配置为8
+ topologyConfig.put(Config.TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE, 16384);
+
+ StormRunner.runTopologyRemotely(builder, topologyName, topologyConfig);
+ }
+
+ public void buildTopology() {
+ builder = new TopologyBuilder();
+ builder.setSpout("SIP_ORIGIN_ALL_KafkaSpout", new SIP_ORIGIN_ALL_KafkaSpout(), RealtimeCountConfig.SPOUT_PARALLELISM);
+
+ builder.setBolt("FromSpoutBufferBoltDC", new FromSpoutBufferBoltDC(), RealtimeCountConfig.BUFFER_BOLT_PARALLELISM).shuffleGrouping("SIP_ORIGIN_ALL_KafkaSpout");
+
+ builder.setBolt("GetSipOriBoltDC", new GetSipOriBoltDC(), RealtimeCountConfig.FORMAT_BOLT_PARALLELISM).shuffleGrouping("FromSpoutBufferBoltDC");
+
+ if (RealtimeCountConfig.GROUP_STRATEGY == 0) {
+ builder.setBolt("SipInsertBoltDC", new SipInsertBoltDC(RealtimeCountConfig.TOPOLOGY_TICK_TUPLE_FREQ_SECS), RealtimeCountConfig.DATABASE_BOLT_PARALLELISM).fieldsGrouping("GetSipOriBoltDC", new Fields("jsonLog"));
+ } else {
+ builder.setBolt("SipInsertBoltDC", new SipInsertBoltDC(RealtimeCountConfig.TOPOLOGY_TICK_TUPLE_FREQ_SECS), RealtimeCountConfig.DATABASE_BOLT_PARALLELISM).shuffleGrouping("GetSipOriBoltDC");
+ }
+
+ builder.setBolt("SipRealTimeCountBoltDC", new SipRealTimeCountBoltDC(RealtimeCountConfig.TOPOLOGY_TICK_TUPLE_COUNT_FREQ_SECS), RealtimeCountConfig.COUNT_BOLT_PARALLELISM).shuffleGrouping("SipInsertBoltDC");
+
+ builder.setBolt("SipRealTimeMergeBoltDC", new SipRealTimeMergeBoltDC(RealtimeCountConfig.TOPOLOGY_TICK_TUPLE_MERGE_FREQ_SECS), RealtimeCountConfig.MERGE_BOLT_PARALLELISM).fieldsGrouping("SipRealTimeCountBoltDC", new Fields("countType"));
+ }
+
+ public static void main(String[] args) throws Exception {
+ LogRealtimeCountTopology csst = null;
+ boolean runLocally = true;
+ if (args.length >= 2 && args[1].equalsIgnoreCase("remote")) {
+ runLocally = false;
+ csst = new LogRealtimeCountTopology(args[0]);
+ } else {
+ csst = new LogRealtimeCountTopology();
+ }
+
+ csst.buildTopology();
+ RealtimeCountConfig.configShow();
+
+ if (runLocally) {
+ logger.info("执行本地模式...");
+ csst.runLocally();
+ } else {
+ logger.info("执行远程部署模式...");
+ csst.runRemotely();
+ }
+ }
+}
diff --git a/src/main/java/cn/ac/iie/topology/StormRunner.java b/src/main/java/cn/ac/iie/topology/StormRunner.java
new file mode 100644
index 0000000..d2d4ab9
--- /dev/null
+++ b/src/main/java/cn/ac/iie/topology/StormRunner.java
@@ -0,0 +1,32 @@
+package cn.ac.iie.topology;
+
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.AlreadyAliveException;
+import org.apache.storm.generated.AuthorizationException;
+import org.apache.storm.generated.InvalidTopologyException;
+import org.apache.storm.topology.TopologyBuilder;
+
+public final class StormRunner{
+ private static final int MILLS_IN_SEC = 1000;
+
+ private StormRunner() {}
+
+ public static void runTopologyLocally(TopologyBuilder builder, String topologyName, Config conf, int runtimeInSeconds) throws InterruptedException {
+
+ LocalCluster localCluster = new LocalCluster();
+ localCluster.submitTopology(topologyName, conf, builder.createTopology());
+ Thread.sleep((long) runtimeInSeconds * MILLS_IN_SEC);
+ localCluster.shutdown();
+
+ }
+
+ public static void runTopologyRemotely(TopologyBuilder builder, String topologyName, Config conf ) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException {
+
+ StormSubmitter.submitTopologyWithProgressBar(topologyName, conf, builder.createTopology());
+ }
+
+
+}
diff --git a/src/main/java/cn/ac/iie/utils/CSVAlarm.java b/src/main/java/cn/ac/iie/utils/CSVAlarm.java
new file mode 100644
index 0000000..fd64b5f
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/CSVAlarm.java
@@ -0,0 +1,77 @@
+package cn.ac.iie.utils;
+
+import cn.ac.iie.common.HttpManager;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.util.EntityUtils;
+import org.apache.log4j.Logger;
+
+public class CSVAlarm {
+ //类初始化时,自动实例化,饿汉单例模式
+ private static final CSVAlarm csvAlarm = new CSVAlarm();
+ private static Logger logger = Logger.getLogger(CSVAlarm.class);
+
+ public static CSVAlarm getInfoLoadInstance(){
+ return csvAlarm;
+ }
+
+ //私有构造方法,防止外部构建
+ private CSVAlarm(){
+ }
+
+ public void csvDataLoad(String url, String topicNmane, String data) throws Exception {
+ // http client
+ DefaultHttpClient httpClient = new DefaultHttpClient();
+ String topic = topicNmane;
+ try {
+ // 需要发送的数据
+ String msg = data;//每条数据内部"\t"分割,数据间"\n"分割;//自己又换了分隔符:每条数据内部"#"分割,数据间"$"分割;
+ //开始结束时间
+// long start = System.currentTimeMillis();
+ HttpPost request = new HttpPost(url);
+ //用户名与密码,用于权限控制,传输时加密
+// request.addHeader("User", "LiMing");
+// request.addHeader("Password", "123");
+ //指定使用topic 自动绑定对应schema
+ request.addHeader("Topic", topic);
+ //Schema-Version可选,不填或为空默认使用最新版本的schema
+ request.addHeader("Schema-Version", "2");
+ //csv 或者 avro,大小写不敏感
+ request.addHeader("Format", "csv");
+ //行列分隔符默认为下值
+// request.addHeader("Row-Split", "\\n");
+// request.addHeader("Field-Split", "\\t");
+ request.addHeader("Row-Split", "\\n");
+ request.addHeader("Field-Split", ",");
+ StringEntity payload = new StringEntity(msg);
+ request.setEntity(payload);
+ HttpResponse response = httpClient.execute(request);
+ try {
+ int statuCode = response.getStatusLine().getStatusCode();
+ HttpEntity entity = response.getEntity();
+ if (statuCode == 200) {
+ logger.info("数据中心加载成功, 返回码: " + statuCode);
+ System.out.println("数据中心加载成功, 返回码: " + statuCode);
+ EntityUtils.consume(entity);
+ } else {
+ String ret = EntityUtils.toString(entity);
+ EntityUtils.consume(entity);
+ logger.info("数据中心加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data);
+ System.out.println("数据中心加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data);
+ logger.error("数据中心加载失败: " + ret + " --- code: " + statuCode);
+ System.out.println("数据中心加载失败: " + ret + " --- code: " + statuCode);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ } catch (Exception ex) {
+ // handle exception
+ ex.printStackTrace();
+ } finally {
+ httpClient.getConnectionManager().shutdown(); //Deprecated
+ }
+ }
+} \ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/utils/HiveDao/HdfsDataLoad_Avro.java b/src/main/java/cn/ac/iie/utils/HiveDao/HdfsDataLoad_Avro.java
new file mode 100644
index 0000000..152bc4d
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/HiveDao/HdfsDataLoad_Avro.java
@@ -0,0 +1,240 @@
+package cn.ac.iie.utils.HiveDao;
+
+import cn.ac.iie.bean.voipSipOrigin.SipOriginALL;
+import cn.ac.iie.common.RealtimeCountConfig;
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.fastjson.JSONObject;
+import com.zdjizhi.utils.StringUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
+
+import java.net.URI;
+import java.sql.Connection;
+import java.sql.Statement;
+import java.util.LinkedList;
+import java.util.UUID;
+
+/**
+ * HDFS-Hive-Avro格式载入类
+ *
+ * @author Colbert
+ */
+public class HdfsDataLoad_Avro {
+
+ private static Logger logger = Logger.getLogger(HdfsDataLoad_Avro.class);
+ private static DruidDataSource ds = HiveDataSourceUtil.getHiveDataSource();
+ private Connection con = null;
+ private Statement stmt = null;
+ private FSDataOutputStream outputStream_avro = null;
+
+ private DataFileWriter<GenericRecord> fileWriter = null;
+
+ private static HdfsDataLoad_Avro hdfsDataLoad_avro = null;
+ private static FileSystem fileSystem = null;
+
+ private HdfsDataLoad_Avro() {
+ getHdfsConnection();
+ }
+
+ public static HdfsDataLoad_Avro getHdfsInstance() {
+ if (hdfsDataLoad_avro == null || fileSystem == null) {
+ hdfsDataLoad_avro = new HdfsDataLoad_Avro();
+ }
+ return hdfsDataLoad_avro;
+ }
+
+ /**
+ * 创建HDFS连接
+ */
+ private void getHdfsConnection() {
+ try {
+ Configuration conf = new Configuration();
+ conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
+ conf.setBoolean("dfs.support.append", true);
+ conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
+ conf.setBoolean("dfs.client.block.write.replace-datanode-on-failure.enable", true);
+ fileSystem = FileSystem.get(new URI(RealtimeCountConfig.HDFS_URL), conf, RealtimeCountConfig.HDFS_USER);
+ } catch (Exception e) {
+ logger.error("HdfsDataLoad_Avro getHdfsConnection method is error !!!--->{" + e + "}<---");
+ e.printStackTrace();
+ }
+ }
+
+
+ public void dataSipToHdfsAvro(String partition, LinkedList<String> data, String topicName, String logType, Long timeSend) {
+ long time = timeSend;
+ String uuid = UUID.randomUUID().toString().replaceAll("-", "").toLowerCase();
+ String hdfs_path = RealtimeCountConfig.HDFS_URL + RealtimeCountConfig.HDFS_PATH + topicName.toLowerCase() + "/" + partition + "/" + topicName.toLowerCase() + "-" + uuid + ".avro";//格式 hdfs://ns1/input/frag-media-expire-log/20190730/frag-media-expire-log-d84772e8257048f3be1ca82f8e35f215.avro
+ try {
+ Path path = new Path(hdfs_path);
+ fileSystem.createNewFile(path);
+ outputStream_avro = fileSystem.append(path);
+
+ switch (logType) {
+ case "origin":
+ String schemaJsonSip = "{\"type\": \"record\",\"name\": \"siplog\",\"fields\": [{\"name\": \"call_id\", \"type\": [\"string\", \"null\"]},{\"name\": \"clj_ip\", \"type\": [\"string\", \"null\"]},{\"name\": \"found_time\", \"type\": \"int\"},{\"name\": \"src_ip\", \"type\": [\"string\", \"null\"]},{\"name\": \"src_location_nation\", \"type\": [\"string\", \"null\"]},{\"name\": \"src_location_nation_code\", \"type\": [\"string\", \"null\"]},{\"name\": \"src_location_region\", \"type\": [\"string\", \"null\"]},{\"name\": \"src_port\", \"type\": \"int\"},{\"name\": \"dst_ip\", \"type\": [\"string\", \"null\"]},{\"name\": \"ip_type\", \"type\": [\"string\", \"null\"]},{\"name\": \"dst_location_nation\", \"type\": [\"string\", \"null\"]},{\"name\": \"dst_location_nation_code\", \"type\": [\"string\", \"null\"]},{\"name\": \"dst_location_region\", \"type\": [\"string\", \"null\"]},{\"name\": \"dst_port\", \"type\": \"int\"},{\"name\": \"method\", \"type\": [\"string\", \"null\"]},{\"name\": \"request_uri\", \"type\": [\"string\", \"null\"]},{\"name\": \"user_name\", \"type\": [\"string\", \"null\"]},{\"name\": \"service_domain\", \"type\": [\"string\", \"null\"]},{\"name\": \"service_domain_valid\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_stat\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_stat_format\", \"type\": [\"string\", \"null\"]},{\"name\": \"from\", \"type\": [\"string\", \"null\"]},{\"name\": \"from_nickname\", \"type\": [\"string\", \"null\"]},{\"name\": \"from_usr_name\", \"type\": [\"string\", \"null\"]},{\"name\": \"from_ser_domain\", \"type\": [\"string\", \"null\"]},{\"name\": \"from_ser_domain_valid\", \"type\": [\"string\", \"null\"]},{\"name\": \"from_tag\", \"type\": [\"string\", \"null\"]},{\"name\": \"to\", \"type\": [\"string\", \"null\"]},{\"name\": \"to_nickname\", \"type\": [\"string\", \"null\"]},{\"name\": \"to_usr_name\", \"type\": [\"string\", \"null\"]},{\"name\": \"to_ser_domain\", \"type\": [\"string\", \"null\"]},{\"name\": \"to_ser_domain_valid\", \"type\": [\"string\", \"null\"]},{\"name\": \"to_tag\", \"type\": [\"string\", \"null\"]},{\"name\": \"cseq\", \"type\": [\"string\", \"null\"]},{\"name\": \"cseq_method\", \"type\": [\"string\", \"null\"]},{\"name\": \"user_agent\", \"type\": [\"string\", \"null\"]},{\"name\": \"device_type\", \"type\": [\"string\", \"null\"]},{\"name\": \"max_forwards\", \"type\": [\"string\", \"null\"]},{\"name\": \"server\", \"type\": [\"string\", \"null\"]},{\"name\": \"server_type\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_via_json\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_contact\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_contact_nickname\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_contact_usr_name\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_contact_ser_domain\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_ser_domain_valid\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_record_route_json\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_route_json\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_expires\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_others\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_content_type\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_content\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_via_json\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_contact\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_contact_nickname\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_contact_usr_name\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_contact_ser_domain\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_ser_domain_valid\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_record_route_json\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_route_json\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_expires\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_others\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_content_type\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_content\", \"type\": [\"string\", \"null\"]},{\"name\": \"req_coding\", \"type\": [\"string\", \"null\"]},{\"name\": \"res_coding\", \"type\": [\"string\", \"null\"]},{\"name\": \"stat_time\", \"type\": \"int\"}]}";
+ Schema schemaSip = new Schema.Parser().parse(schemaJsonSip);
+ fileWriter = new DataFileWriter<GenericRecord>(new GenericDatumWriter<GenericRecord>(schemaSip)).setSyncInterval(100);
+ fileWriter.setCodec(CodecFactory.snappyCodec());
+ fileWriter.create(schemaSip, outputStream_avro);
+ for (String dataJson : data) {
+ SipOriginALL sipOriginAllLog = JSONObject.parseObject(dataJson, SipOriginALL.class);
+ sipOriginAllLog.setStat_time(time);
+ GenericRecord recordSip = new GenericData.Record(schemaSip);
+
+ recordSip.put("call_id", sipOriginAllLog.getCall_ID());
+ recordSip.put("clj_ip", sipOriginAllLog.getCLJ_IP());
+ recordSip.put("found_time", sipOriginAllLog.getFound_Time());
+ recordSip.put("src_ip", sipOriginAllLog.getSRC_IP());
+ recordSip.put("src_location_nation", sipOriginAllLog.getSRC_LOCATION_NATION());
+ recordSip.put("src_location_nation_code", sipOriginAllLog.getSRC_LOCATION_NATION_CODE());
+ recordSip.put("src_location_region", sipOriginAllLog.getSRC_LOCATION_REGION());
+ recordSip.put("src_port", sipOriginAllLog.getSRC_PORT());
+ recordSip.put("dst_ip", sipOriginAllLog.getDST_IP());
+ recordSip.put("ip_type", sipOriginAllLog.getIP_TYPE());
+ recordSip.put("dst_location_nation", sipOriginAllLog.getDST_LOCATION_NATION());
+ recordSip.put("dst_location_nation_code", sipOriginAllLog.getDST_LOCATION_NATION_CODE());
+ recordSip.put("dst_location_region", sipOriginAllLog.getDST_LOCATION_REGION());
+ recordSip.put("dst_port", sipOriginAllLog.getDST_PORT());
+ recordSip.put("method", sipOriginAllLog.getMethod());
+ recordSip.put("request_uri", sipOriginAllLog.getRequest_URI());
+ recordSip.put("user_name", sipOriginAllLog.getUser_name());
+ recordSip.put("service_domain", sipOriginAllLog.getService_domain());
+ recordSip.put("service_domain_valid", sipOriginAllLog.getService_domain_valid());
+ recordSip.put("res_stat", sipOriginAllLog.getRes_stat());
+ recordSip.put("res_stat_format", sipOriginAllLog.getRes_stat_format());
+ recordSip.put("from", sipOriginAllLog.getFrom());
+ recordSip.put("from_nickname", sipOriginAllLog.getFrom_Nickname());
+ recordSip.put("from_usr_name", sipOriginAllLog.getFrom_usr_name());
+ recordSip.put("from_ser_domain", sipOriginAllLog.getFrom_ser_domain());
+ recordSip.put("from_ser_domain_valid", sipOriginAllLog.getFrom_ser_domain_valid());
+ recordSip.put("from_tag", sipOriginAllLog.getFrom_tag());
+ recordSip.put("to", sipOriginAllLog.getTo());
+ recordSip.put("to_nickname", sipOriginAllLog.getTo_Nickname());
+ recordSip.put("to_usr_name", sipOriginAllLog.getTo_usr_name());
+ recordSip.put("to_ser_domain", sipOriginAllLog.getTo_ser_domain());
+ recordSip.put("to_ser_domain_valid", sipOriginAllLog.getTo_ser_domain_valid());
+ recordSip.put("to_tag", sipOriginAllLog.getTo_tag());
+ recordSip.put("cseq", sipOriginAllLog.getCseq());
+ recordSip.put("cseq_method", sipOriginAllLog.getCseq_method());
+ recordSip.put("user_agent", sipOriginAllLog.getUser_Agent());
+ recordSip.put("device_type", sipOriginAllLog.getDevice_type());
+ recordSip.put("max_forwards", sipOriginAllLog.getMax_Forwards());
+ recordSip.put("server", sipOriginAllLog.getServer());
+ recordSip.put("server_type", sipOriginAllLog.getServer_type());
+ recordSip.put("req_via_json", sipOriginAllLog.getReq_Via_Json());
+ recordSip.put("req_contact", sipOriginAllLog.getReq_Contact());
+ recordSip.put("req_contact_nickname", sipOriginAllLog.getReq_Contact_Nickname());
+ recordSip.put("req_contact_usr_name", sipOriginAllLog.getReq_Contact_usr_name());
+ recordSip.put("req_contact_ser_domain", sipOriginAllLog.getReq_Contact_ser_domain());
+ recordSip.put("req_ser_domain_valid", sipOriginAllLog.getReq_ser_domain_valid());
+ recordSip.put("req_record_route_json", sipOriginAllLog.getReq_Record_Route_Json());
+ recordSip.put("req_route_json", sipOriginAllLog.getReq_Route_Json());
+ recordSip.put("req_expires", sipOriginAllLog.getReq_Expires());
+ recordSip.put("req_others", sipOriginAllLog.getReq_Others());
+ recordSip.put("req_content_type", sipOriginAllLog.getReq_Content_Type());
+ recordSip.put("req_content", sipOriginAllLog.getReq_Content());
+ recordSip.put("res_via_json", sipOriginAllLog.getRes_Via_Json());
+ recordSip.put("res_contact", sipOriginAllLog.getRes_Contact());
+ recordSip.put("res_contact_nickname", sipOriginAllLog.getRes_Contact_Nickname());
+ recordSip.put("res_contact_usr_name", sipOriginAllLog.getRes_Contact_usr_name());
+ recordSip.put("res_contact_ser_domain", sipOriginAllLog.getRes_Contact_ser_domain());
+ recordSip.put("res_ser_domain_valid", sipOriginAllLog.getRes_ser_domain_valid());
+ recordSip.put("res_record_route_json", sipOriginAllLog.getRes_Record_Route_Json());
+ recordSip.put("res_route_json", sipOriginAllLog.getRes_Route_Json());
+ recordSip.put("res_expires", sipOriginAllLog.getRes_Expires());
+ recordSip.put("res_others", sipOriginAllLog.getRes_Others());
+ recordSip.put("res_content_type", sipOriginAllLog.getRes_Content_Type());
+ recordSip.put("res_content", sipOriginAllLog.getRes_Content());
+ recordSip.put("req_coding", sipOriginAllLog.getReq_coding());
+ recordSip.put("res_coding", sipOriginAllLog.getRes_coding());
+ recordSip.put("stat_time", sipOriginAllLog.getStat_time());
+
+ fileWriter.append(recordSip);
+ fileWriter.flush();
+ }
+ break;
+ default:
+ logger.error("HdfsDataLoad_Avro toHdfs logType is error !!!This logType is--->{" + logType + "}<---");
+ break;
+ }
+
+ logger.warn("HdfsDataLoad_Avro data to HDFS Successful,hdfs_path is -->{" + hdfs_path + "}<---");
+
+ if (fileWriter != null) {
+ fileWriter.close();
+ fileWriter = null;
+ }
+ if (outputStream_avro != null) {
+ outputStream_avro.close();
+ outputStream_avro = null;
+ }
+
+ switch (logType) {
+ case "origin":
+ String tablenameSip = RealtimeCountConfig.HIVE_SIP_CLEAN_TABLE;
+ loadDataToHiveAvro(tablenameSip, partition, hdfs_path);
+ break;
+// case "route":
+// String tablenameFrag = RealtimeCountConfig.HIVE_SIP_ROUTE_TABLE;
+// loadDataToHiveAvro(tablenameFrag, partition, hdfs_path);
+// break;
+ default:
+ logger.error("HdfsDataLoad_Avro toHive logType is error !!!This logType is--->{" + logType + "}<---");
+ break;
+ }
+
+ } catch (Exception e) {
+ logger.error("HdfsDataLoad_Avro dataToHdfs method is error !!!--->{" + e + "}<---");
+ e.printStackTrace();
+ }
+ }
+
+ public void loadDataToHiveAvro(String tablename, String partition, String hdfs_path) {
+ String dataUrl = hdfs_path;
+ StringBuffer sb = new StringBuffer();
+ try {
+ con = ds.getConnection();
+ stmt = con.createStatement();
+ sb.append("load data inpath ").append("'").append(dataUrl).append("'")
+ .append(" into table ").append(tablename).append(" partition( time_partition=").append(partition).append(")");
+ stmt.execute(sb.toString());
+
+ logger.warn("HdfsDataLoad_Avro data to Hive Successful,dataUrl is -->{" + dataUrl + "}<---");
+
+ } catch (Exception e) {
+ logger.error("HdfsDataLoad_Avro loadDataToHive method is error !!!--->{" + e + "}<---");
+ e.printStackTrace();
+ } finally {
+ try {
+ if (stmt != null) {
+ stmt.close();
+ stmt = null;
+ }
+ if (con != null) {
+ con.close();
+ con = null;
+ }
+ } catch (Exception e) {
+ logger.error("HdfsDataLoad_Avro loadDataToHive when close is error !!!--->{" + e + "}<---");
+ e.printStackTrace();
+ }
+ }
+ }
+
+ private String strDefaultValue(String str) {
+ return (StringUtil.isBlank(str)
+ || str == null
+ || str.equals("")
+ || str.length() == 0) ? "$#$" : str;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/utils/HiveDao/HiveDataSourceUtil.java b/src/main/java/cn/ac/iie/utils/HiveDao/HiveDataSourceUtil.java
new file mode 100644
index 0000000..9223f92
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/HiveDao/HiveDataSourceUtil.java
@@ -0,0 +1,187 @@
+package cn.ac.iie.utils.HiveDao;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.fastjson.JSONException;
+import com.alibaba.fastjson.JSONObject;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import net.sf.json.JSONArray;
+import org.apache.log4j.Logger;
+
+import java.sql.*;
+import java.util.Properties;
+
+/**
+ * Hive-JDBC连接池
+ *
+ * @author Colbert
+ */
+public class HiveDataSourceUtil {
+ private static DruidDataSource hiveDataSource = new DruidDataSource();
+ public static Connection conn = null;
+ private static final Logger logger = Logger.getLogger(HiveDataSourceUtil.class);
+
+ public static DruidDataSource getHiveDataSource() {
+ if (hiveDataSource.isInited()) {
+ return hiveDataSource;
+ }
+
+ try {
+ Properties dsProp = new Properties();
+ dsProp.load(HiveDataSourceUtil.class.getClassLoader().getResourceAsStream("hive.properties"));
+ hiveDataSource.setDriverClassName(dsProp.getProperty("hive_jdbc_drivers"));
+ //基本属性 url、user、password
+ hiveDataSource.setUrl(dsProp.getProperty("hive_jdbc_url"));
+ hiveDataSource.setUsername(dsProp.getProperty("hive_jdbc_username"));
+ hiveDataSource.setPassword(dsProp.getProperty("hive_jdbc_password"));
+
+ //配置初始化大小、最小、最大
+ hiveDataSource.setInitialSize(Integer.parseInt(dsProp.getProperty("hive_initialSize")));
+ hiveDataSource.setMinIdle(Integer.parseInt(dsProp.getProperty("hive_minIdle")));
+ hiveDataSource.setMaxActive(Integer.parseInt(dsProp.getProperty("hive_maxActive")));
+
+ //配置获取连接等待超时的时间
+ hiveDataSource.setMaxWait(Integer.parseInt(dsProp.getProperty("hive_maxWait")));
+
+ //配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
+ hiveDataSource.setTimeBetweenEvictionRunsMillis(60000);
+
+ //配置一个连接在池中最小生存的时间,单位是毫秒
+ hiveDataSource.setMinEvictableIdleTimeMillis(300000);
+
+ hiveDataSource.setValidationQuery("SELECT 1");
+ hiveDataSource.setTestWhileIdle(true);
+ hiveDataSource.setTestOnBorrow(true);
+// hiveDataSource.setKeepAlive(true);
+
+ //打开PSCache,并且指定每个连接上PSCache的大小
+ hiveDataSource.setPoolPreparedStatements(true);
+ hiveDataSource.setMaxPoolPreparedStatementPerConnectionSize(20);
+
+ //配置监控统计拦截的filters
+// hiveDataSource.setFilters("stat");
+
+ hiveDataSource.init();
+ } catch (Exception e) {
+ e.printStackTrace();
+ closeHiveDataSource();
+ }
+ return hiveDataSource;
+ }
+
+ /**
+ * @Description:关闭Hive连接池
+ */
+ public static void closeHiveDataSource() {
+ if (hiveDataSource != null) {
+ hiveDataSource.close();
+ }
+ }
+
+ /**
+ * @return
+ * @Description:获取Hive连接
+ */
+ public static Connection getHiveConn() {
+ try {
+ hiveDataSource = getHiveDataSource();
+ conn = hiveDataSource.getConnection();
+ } catch (Exception e) {
+ logger.error("HiveDataSourceUtil--" + e + ":获取Hive连接失败!");
+ }
+ return conn;
+ }
+
+ /**
+ * @Description:关闭Hive数据连接
+ */
+ public static void closeConn() {
+ try {
+ if (conn != null) {
+ conn.close();
+ }
+ } catch (Exception e) {
+ logger.error("HiveDataSourceUtil--" + e + ":关闭Hive-conn连接失败!");
+ }
+ }
+
+
+ public static void main(String[] args) throws Exception {
+ DruidDataSource ds = HiveDataSourceUtil.getHiveDataSource();
+ Connection conn = ds.getConnection();
+ Statement stmt = null;
+ if (conn == null) {
+ System.out.println("null");
+ } else {
+ System.out.println("conn");
+ stmt = conn.createStatement();
+ ResultSet res = stmt.executeQuery("select * from test.frag_media_expire_log limit 10");
+ int i = 0;
+ while (res.next()) {
+ if (i < 10) {
+ System.out.println(res.getString(2));
+ i++;
+ }
+ }
+// String s = resultSetToJson(res);
+// String s = ResultSetToJsonString(res);
+// System.out.println(s);
+ }
+
+ stmt.close();
+ conn.close();
+ }
+
+ public static String resultSetToJson(ResultSet rs) throws SQLException, JSONException {
+ // json数组
+ JSONArray array = new JSONArray();
+
+ // 获取列数
+ ResultSetMetaData metaData = rs.getMetaData();
+ int columnCount = metaData.getColumnCount();
+
+ // 遍历ResultSet中的每条数据
+ while (rs.next()) {
+ JSONObject jsonObj = new JSONObject();
+
+ // 遍历每一列
+ for (int i = 1; i <= columnCount; i++) {
+ String columnName = metaData.getColumnLabel(i);
+ String value = rs.getString(columnName);
+ jsonObj.put(columnName, value);
+ }
+// array.put(jsonObj);
+ array.add(jsonObj);
+ }
+
+ return array.toString();
+ }
+
+ public static final JsonObject ResultSetToJsonObject(ResultSet rs) {
+ JsonObject element = null;
+ JsonArray ja = new JsonArray();
+ JsonObject jo = new JsonObject();
+ ResultSetMetaData rsmd = null;
+ String columnName, columnValue = null;
+ try {
+ rsmd = rs.getMetaData();
+ while (rs.next()) {
+ element = new JsonObject();
+ for (int i = 0; i < rsmd.getColumnCount(); i++) {
+ columnName = rsmd.getColumnName(i + 1);
+ columnValue = rs.getString(columnName);
+ element.addProperty(columnName, columnValue);
+ }
+ ja.add(element);
+ }
+ jo.add("result", ja);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ return jo;
+ }
+
+ public static final String ResultSetToJsonString(ResultSet rs) {
+ return ResultSetToJsonObject(rs).toString();
+ }
+}
diff --git a/src/main/java/cn/ac/iie/utils/IPIPLibrary/Ipip.java b/src/main/java/cn/ac/iie/utils/IPIPLibrary/Ipip.java
new file mode 100644
index 0000000..ec1febe
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/IPIPLibrary/Ipip.java
@@ -0,0 +1,189 @@
+package cn.ac.iie.utils.IPIPLibrary;
+
+/**
+ * Created by lizhao on 2017-9-15.
+ */
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Arrays;
+import java.nio.charset.Charset;
+import java.util.Random;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+public class Ipip {
+
+ public static String randomIp() {
+ Random r = new Random();
+ StringBuffer str = new StringBuffer();
+ str.append(r.nextInt(1000000) % 255);
+ str.append(".");
+ str.append(r.nextInt(1000000) % 255);
+ str.append(".");
+ str.append(r.nextInt(1000000) % 255);
+ str.append(".");
+ str.append(0);
+
+ return str.toString();
+ }
+
+ public static void main(String[] args){
+ Ipip.load("E:\\ipip.dat");
+
+ Long st = System.nanoTime();
+ for (int i = 0; i < 100; i++)
+ {
+ //IP.find(randomIp());
+ System.out.println(Arrays.toString(Ipip.find(randomIp())));
+ }
+ Long et = System.nanoTime();
+ System.out.println((et - st) / 1000 / 1000);
+
+// System.out.println(Arrays.toString(IP.find("118.28.8.8")));
+ }
+
+ public static boolean enableFileWatch = false;
+
+ private static int offset;
+ private static int[] index = new int[256];
+ private static ByteBuffer dataBuffer;
+ private static ByteBuffer indexBuffer;
+ private static Long lastModifyTime = 0L;
+ private static File ipFile ;
+ private static ReentrantLock lock = new ReentrantLock();
+
+ public static void load(String filename) {
+ ipFile = new File(filename);
+ load();
+ if (enableFileWatch) {
+ watch();
+ }
+ }
+
+ public static void load(String filename, boolean strict) throws Exception {
+ ipFile = new File(filename);
+ if (strict) {
+ int contentLength = Long.valueOf(ipFile.length()).intValue();
+ if (contentLength < 512 * 1024) {
+ throw new Exception("ip data file error.");
+ }
+ }
+ load();
+ if (enableFileWatch) {
+ watch();
+ }
+ }
+
+ public static String[] find(String ip) {
+ int ip_prefix_value = new Integer(ip.substring(0, ip.indexOf(".")));
+ long ip2long_value = ip2long(ip);
+ int start = index[ip_prefix_value];
+ int max_comp_len = offset - 1028;
+ long index_offset = -1;
+ int index_length = -1;
+ byte b = 0;
+ for (start = start * 8 + 1024; start < max_comp_len; start += 8) {
+ if (int2long(indexBuffer.getInt(start)) >= ip2long_value) {
+ index_offset = bytesToLong(b, indexBuffer.get(start + 6), indexBuffer.get(start + 5), indexBuffer.get(start + 4));
+ index_length = 0xFF & indexBuffer.get(start + 7);
+ break;
+ }
+ }
+
+ byte[] areaBytes;
+
+ lock.lock();
+ try {
+ dataBuffer.position(offset + (int) index_offset - 1024);
+ areaBytes = new byte[index_length];
+ dataBuffer.get(areaBytes, 0, index_length);
+ } finally {
+ lock.unlock();
+ }
+
+ return new String(areaBytes, Charset.forName("UTF-8")).split("\t", -1);
+ }
+
+ private static void watch() {
+ Executors.newScheduledThreadPool(1).scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ long time = ipFile.lastModified();
+ if (time > lastModifyTime) {
+ lastModifyTime = time;
+ load();
+ }
+ }
+ }, 1000L, 5000L, TimeUnit.MILLISECONDS);
+ }
+
+ private static void load() {
+ lastModifyTime = ipFile.lastModified();
+ FileInputStream fin = null;
+ lock.lock();
+ try {
+ dataBuffer = ByteBuffer.allocate(Long.valueOf(ipFile.length()).intValue());
+ fin = new FileInputStream(ipFile);
+ int readBytesLength;
+ byte[] chunk = new byte[4096];
+ while (fin.available() > 0) {
+ readBytesLength = fin.read(chunk);
+ dataBuffer.put(chunk, 0, readBytesLength);
+ }
+ dataBuffer.position(0);
+ int indexLength = dataBuffer.getInt();
+ byte[] indexBytes = new byte[indexLength];
+ dataBuffer.get(indexBytes, 0, indexLength - 4);
+ indexBuffer = ByteBuffer.wrap(indexBytes);
+ indexBuffer.order(ByteOrder.LITTLE_ENDIAN);
+ offset = indexLength;
+
+ int loop = 0;
+ while (loop++ < 256) {
+ index[loop - 1] = indexBuffer.getInt();
+ }
+ indexBuffer.order(ByteOrder.BIG_ENDIAN);
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ } finally {
+ try {
+ if (fin != null) {
+ fin.close();
+ }
+ } catch (IOException e){
+ e.printStackTrace();
+ }
+ lock.unlock();
+ }
+ }
+
+ private static long bytesToLong(byte a, byte b, byte c, byte d) {
+ return int2long((((a & 0xff) << 24) | ((b & 0xff) << 16) | ((c & 0xff) << 8) | (d & 0xff)));
+ }
+
+ private static int str2Ip(String ip) {
+ String[] ss = ip.split("\\.");
+ int a, b, c, d;
+ a = Integer.parseInt(ss[0]);
+ b = Integer.parseInt(ss[1]);
+ c = Integer.parseInt(ss[2]);
+ d = Integer.parseInt(ss[3]);
+ return (a << 24) | (b << 16) | (c << 8) | d;
+ }
+
+ private static long ip2long(String ip) {
+ return int2long(str2Ip(ip));
+ }
+
+ private static long int2long(int i) {
+ long l = i & 0x7fffffffL;
+ if (i < 0) {
+ l |= 0x080000000L;
+ }
+ return l;
+ }
+} \ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/utils/RealtimeCountConfigurations.java b/src/main/java/cn/ac/iie/utils/RealtimeCountConfigurations.java
new file mode 100644
index 0000000..2a21798
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/RealtimeCountConfigurations.java
@@ -0,0 +1,67 @@
+package cn.ac.iie.utils;
+
+import java.util.Properties;
+
+//import com.nis.util.StringUtil;
+public final class RealtimeCountConfigurations {
+
+ private static Properties propCommon = new Properties();//0
+ private static Properties propService = new Properties();//1
+
+ public static String getStringProperty(Integer type, String key) {
+ if(type == 0){
+ return propCommon.getProperty(key);
+ } else if(type == 1){
+ return propService.getProperty(key);
+ } else {
+ return null;
+ }
+ }
+
+
+ public static Integer getIntProperty(Integer type, String key) {
+ if(type == 0){
+ return Integer.parseInt(propCommon.getProperty(key));
+ } else if(type == 1){
+ return Integer.parseInt(propService.getProperty(key));
+ } else {
+ return null;
+ }
+ }
+
+ public static Long getLongProperty(Integer type, String key) {
+ if(type == 0){
+ return Long.parseLong(propCommon.getProperty(key));
+ } else if(type == 1){
+ return Long.parseLong(propService.getProperty(key));
+ } else {
+ return null;
+ }
+ }
+
+ public static Boolean getBooleanProperty(Integer type, String key) {
+ if(type == 0){
+ return propCommon.getProperty(key).toLowerCase().trim().equals("true");
+ } else if(type == 1){
+ return propService.getProperty(key).toLowerCase().trim().equals("true");
+ } else {
+ return null;
+ }
+ }
+
+ static {
+ try {
+ propCommon.load(RealtimeCountConfigurations.class.getClassLoader().getResourceAsStream("realtime_config.properties"));//0
+ propService.load(RealtimeCountConfigurations.class.getClassLoader().getResourceAsStream("storm_config.properties"));//1
+ /*prop.load(new FileInputStream(System.getProperty("user.dir")
+ + File.separator + "config"+File.separator + "config.properties"));*/
+ System.out.println("realtime_config.properties加载成功");
+ System.out.println("storm_config.properties加载成功");
+
+ } catch (Exception e) {
+ propCommon = null;
+ propService = null;
+ System.err.println("RealtimeCountConfigurations配置文件加载失败");
+ }
+ }
+}
diff --git a/src/main/java/cn/ac/iie/utils/TupleUtils.java b/src/main/java/cn/ac/iie/utils/TupleUtils.java
new file mode 100644
index 0000000..1a5889a
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/TupleUtils.java
@@ -0,0 +1,13 @@
+package cn.ac.iie.utils;
+
+import org.apache.storm.Constants;
+import org.apache.storm.tuple.Tuple;
+
+public final class TupleUtils {
+ //判断是否系统自动发送的Tuple
+ public static boolean isTick(Tuple tuple) {
+ return tuple != null
+ && Constants.SYSTEM_COMPONENT_ID.equals(tuple.getSourceComponent())
+ && Constants.SYSTEM_TICK_STREAM_ID.equals(tuple.getSourceStreamId());
+ }
+}
diff --git a/src/main/java/cn/ac/iie/utils/dao/ClickHouseUtils.java b/src/main/java/cn/ac/iie/utils/dao/ClickHouseUtils.java
new file mode 100644
index 0000000..3dced6d
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/dao/ClickHouseUtils.java
@@ -0,0 +1,47 @@
+package cn.ac.iie.utils.dao;
+
+
+import com.zdjizhi.utils.StringUtil;
+
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+
+/**
+ * ClickHouse 入库类型转换类
+ *
+ * @author Administrator
+ */
+public class ClickHouseUtils {
+
+ public static void setInt(PreparedStatement pstms, int index, String str) {
+ try {
+ int num = 0;
+ if (str != null) {
+ num = Integer.parseInt(str);
+ }
+ pstms.setInt(index, num);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void setString(PreparedStatement pstmts, int index, String str) throws Exception {
+ if (StringUtil.isNotBlank(str)) {
+ pstmts.setString(index, str);
+ } else {
+ str = "";
+ pstmts.setString(index, str);
+ }
+ }
+
+ public static void setTimeStamp(PreparedStatement pstmts, int index, String str) throws Exception {
+ pstmts.setTimestamp(index, new Timestamp(Long.parseLong(str + "000")));
+ }
+
+ public static void setLong(PreparedStatement pstmts, int index, String str) throws Exception {
+ pstmts.setLong(index, Long.parseLong(str));
+ }
+
+
+}
diff --git a/src/main/java/cn/ac/iie/utils/getjson/GetJsonToKafkaUtils.java b/src/main/java/cn/ac/iie/utils/getjson/GetJsonToKafkaUtils.java
new file mode 100644
index 0000000..a73aa7d
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/getjson/GetJsonToKafkaUtils.java
@@ -0,0 +1,835 @@
+//package cn.ac.iie.utils.getjson;
+//
+//import cn.ac.iie.bean.dk.DK_BEHAVIOR_LOG;
+//import cn.ac.iie.bean.mm.MM_AV_IP_LOG;
+//import cn.ac.iie.bean.mm.MM_VOIP_IP_LOG;
+//import cn.ac.iie.bean.ntc.NTC_CONN_RECORD_LOG;
+//import cn.ac.iie.bean.pxy.PXY_HTTP_LOG;
+//import cn.ac.iie.common.FlowWriteConfig;
+//import cn.ac.iie.common.RealtimeCountConfig;
+//import cn.ac.iie.utils.ordinary.DecodeUtils;
+//import cn.ac.iie.utils.ordinary.MD5Utils;
+//import cn.ac.iie.utils.ordinary.TransFormUtils;
+//import com.alibaba.fastjson.JSONObject;
+//import com.alibaba.fastjson.parser.Feature;
+//import com.zdjizhi.utils.IpLookup;
+//import com.zdjizhi.utils.StringUtil;
+//import org.apache.log4j.Logger;
+//
+///**
+// * @author antlee
+// * @date 2018/7/19
+// */
+//public class GetJsonToKafkaUtils {
+// private static Logger logger = Logger.getLogger(GetJsonToKafkaUtils.class);
+// private static IpLookup ipLookup = new IpLookup.Builder(false)
+// .loadDataFileV4(RealtimeCountConfig.IP_LIBRARY)
+// .loadDataFileV6(RealtimeCountConfig.IP_LIBRARY)
+// .build();
+//
+//
+// /**
+// * NTC topic对准类
+// *
+// * @param message 日志
+// * @param topic topic名称
+// * @return 补全日志
+// */
+// public static String getNTCData(String message, String topic) {
+// switch (topic) {
+// case "NTC-HTTP-LOG":
+// return httpReplenish(message);
+// case "NTC-MAIL-LOG":
+// return mailReplenish(message);
+// case "NTC-IP-LOG":
+// return ipReplenish(message);
+// case "NTC-APP-LOG":
+// return appReplenish(message);
+// case "NTC-SSL-LOG":
+// return sslReplenish(message);
+// case "NTC-DDOS-LOG":
+// return ddosReplenish(message);
+// case "NTC-DNS-LOG":
+// return dnsReplenish(message);
+// case "NTC-COLLECT-MAIL-LOG":
+// return mailReplenish(message);
+// case "NTC-OPENVPN-LOG":
+// return openVpnLog(message);
+// case "NTC-P2P-LOG":
+// return p2pReplenish(message);
+// case "PXY-HTTP-LOG":
+// return pxyHttpReplenish(message);
+// case "NTC-BGP-LOG":
+// return bgpReplenish(message);
+// case "NTC-FTP-LOG":
+// return ftpReplenish(message);
+// case "NTC-STREAMING-MEDIA-LOG":
+// return streamMediaReplenish(message);
+// case "NTC-VOIP-LOG":
+// return voipReplenish(message);
+//
+//// case "NTC-COLLECT-HTTP-LOG":
+//// return message;
+//// case "NTC-CONN-RECORD-LOG":
+//// return message;
+//// case "NTC-COLLECT-RADIUS-LOG":
+//// return message;
+//// case "NTC-KEYWORDS-URL-LOG":
+//// return message;
+//
+//// case "NTC-IPSEC-LOG":
+//// return ipsecLog(message);
+//// case "NTC-L2TP-LOG":
+//// return l2tpLog(message);
+//// case "NTC-SSH-LOG":
+//// return sshLog(message);
+//// case "NTC-PPTP-LOG":
+//// return pptpReplenish(message);
+// default:
+// logger.error("There is no corresponding topic! topic name is :" + topic);
+// break;
+// }
+// return null;
+// }
+//
+//
+// /**
+// * HTTP Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String httpReplenish(String message) {
+// try {
+// NTC_HTTP_LOG ntcHttpLog = JSONObject.parseObject(message, NTC_HTTP_LOG.class);
+// String sIp = ntcHttpLog.getS_ip();
+// String dIp = ntcHttpLog.getD_ip();
+// ntcHttpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcHttpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcHttpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcHttpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+// ntcHttpLog.setWebsite(StringUtil.getDomain(ntcHttpLog.getUrl()));
+//// ntcHttpLog.setD_subscribe_id(RedisPollUtils.getJedisCluster().get(dIP));
+//// ntcHttpLog.setS_subscribe_id(RedisPollUtils.getJedisCluster().get(sIp));
+// TransFormUtils.setHTTPFile(ntcHttpLog);
+// return JSONObject.toJSONString(ntcHttpLog);
+// } catch (Exception e) {
+// logger.error(("HTTP 补全日志出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// /**
+// * HTTP 文件方法
+// *
+// * @param message
+// * @return
+// */
+//
+// public static String httpLogFile(String message) {
+// try {
+// NTC_HTTP_LOG ntcHttpLog = JSONObject.parseObject(message, NTC_HTTP_LOG.class);
+// int cfgId = ntcHttpLog.getCfg_id();
+// if (StringUtil.isBlank(ntcHttpLog.getUrl())) {
+// TransFormUtils.getUniFlow(ntcHttpLog);
+// }
+// String url = ntcHttpLog.getUrl();
+// String reqBody = ntcHttpLog.getReq_body_file();
+// if (StringUtil.isNotBlank(reqBody)) {
+// ntcHttpLog.setReq_body_key(MD5Utils.md5Encode(cfgId + url + ntcHttpLog.getReq_body_file() + ntcHttpLog.getS_ip() + ntcHttpLog.getFound_time()));
+// }
+// ntcHttpLog.setRes_body_key(TransFormUtils.getHttpKey(cfgId, url, ntcHttpLog.getRes_body_file()));
+// ntcHttpLog.setReq_hdr_key(TransFormUtils.getHttpKey(cfgId, url, ntcHttpLog.getReq_hdr_file()));
+// ntcHttpLog.setRes_hdr_key(TransFormUtils.getHttpKey(cfgId, url, ntcHttpLog.getRes_hdr_file()));
+// return JSONObject.toJSONString(ntcHttpLog);
+// } catch (Exception e) {
+// logger.error(("HTPP 文件补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * IP日志不全操作方法
+// *
+// * @param message
+// * @return
+// */
+// private static String ipReplenish(String message) {
+// try {
+// NTC_IP_LOG ntcIpLog = JSONObject.parseObject(message, NTC_IP_LOG.class);
+// String sIp = ntcIpLog.getS_ip();
+// String dIp = ntcIpLog.getD_ip();
+// ntcIpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcIpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcIpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcIpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcIpLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+//// ntcIpLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+// return JSONObject.toJSONString(ntcIpLog);
+// } catch (Exception e) {
+// logger.error(("IP 日志补全操作出现异常!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * MAIL文件传输方法
+// *
+// * @param message
+// * @return
+// */
+// public static String emailFile(String message) {
+// try {
+// NTC_MAIL_LOG ntcMailLog = JSONObject.parseObject(message, NTC_MAIL_LOG.class);
+// ntcMailLog.setEml_key(TransFormUtils.getEmlKey(ntcMailLog.getFound_time(), ntcMailLog.getMail_from(),
+// ntcMailLog.getMail_to(), ntcMailLog.getSubject(), ntcMailLog.getEml_file()));
+// if (StringUtil.isNotBlank(ntcMailLog.getSubject())) {
+// String subjectCharset = JSONObject.parseObject(message).getString("subject_charset");
+// ntcMailLog.setSubject(DecodeUtils.base64Str(ntcMailLog.getSubject(), subjectCharset));
+// }
+// return JSONObject.toJSONString(ntcMailLog);
+// } catch (Exception e) {
+// logger.error(("MAIL 文件传输出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * MAIL 日志详细信息补全方法
+// *
+// * @param message
+// * @return
+// */
+// private static String mailReplenish(String message) {
+// try {
+// NTC_MAIL_LOG ntcMailLog = JSONObject.parseObject(message, NTC_MAIL_LOG.class);
+// String sIp = ntcMailLog.getS_ip();
+// String dIp = ntcMailLog.getD_ip();
+// ntcMailLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcMailLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcMailLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcMailLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcMailLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+//// ntcMailLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+// TransFormUtils.setMailFile(ntcMailLog);
+// return JSONObject.toJSONString(ntcMailLog);
+// } catch (Exception e) {
+// logger.error(("MAIL 日志补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * APP Log Replenish.
+// *
+// * @param message
+// * @return
+// */
+// private static String appReplenish(String message) {
+// try {
+// NTC_APP_LOG ntcAppLog = JSONObject.parseObject(message, NTC_APP_LOG.class);
+// String sIp = ntcAppLog.getS_ip();
+// String dIp = ntcAppLog.getD_ip();
+// ntcAppLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcAppLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcAppLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcAppLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcAppLog.setD_subscribe_id(RedisPollUtils.getJedisCluster().get(dIP));
+//// ntcAppLog.setS_subscribe_id(RedisPollUtils.getJedisCluster().get(sIp));
+// return JSONObject.toJSONString(ntcAppLog);
+// } catch (Exception e) {
+// logger.error(("APP 日志补全出现错误!!! ") + e);
+// return "";
+// }
+// }
+//
+//
+// /**
+// * DDOS Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String ddosReplenish(String message) {
+// try {
+// NTC_DDOS_LOG ntcDdosLog = JSONObject.parseObject(message, NTC_DDOS_LOG.class);
+// String sIp = ntcDdosLog.getS_ip();
+// String dIp = ntcDdosLog.getD_ip();
+// ntcDdosLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcDdosLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcDdosLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcDdosLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcDdosLog.setD_subscribe_id(RedisPollUtils.getJedisCluster().get(dIP));
+//// ntcDdosLog.setS_subscribe_id(RedisPollUtils.getJedisCluster().get(sIp));
+// return JSONObject.toJSONString(ntcDdosLog);
+// } catch (Exception e) {
+// logger.error(("DDOS 日志补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * SSL Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String sslReplenish(String message) {
+// try {
+// NTC_SSL_LOG ntcSslLog = JSONObject.parseObject(message, NTC_SSL_LOG.class);
+// String sIp = ntcSslLog.getS_ip();
+// String dIp = ntcSslLog.getD_ip();
+// ntcSslLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcSslLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcSslLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcSslLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcSslLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+//// ntcSslLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+// return JSONObject.toJSONString(ntcSslLog);
+// } catch (Exception e) {
+// logger.error("SSL 日志补全出现错误!!!" + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * DNS Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String dnsReplenish(String message) {
+// try {
+// NTC_DNS_LOG ntcDnsLog = JSONObject.parseObject(message, NTC_DNS_LOG.class);
+// String sIp = ntcDnsLog.getS_ip();
+// String dIp = ntcDnsLog.getD_ip();
+// ntcDnsLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcDnsLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcDnsLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcDnsLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcDnsLog.setD_subscribe_id(RedisPollUtils.getJedisCluster().get(dIP));
+//// ntcDnsLog.setS_subscribe_id(RedisPollUtils.getJedisCluster().get(sIp));
+// return JSONObject.toJSONString(ntcDnsLog);
+// } catch (Exception e) {
+// logger.error(("DNS 日志补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * P2P Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String p2pReplenish(String message) {
+// try {
+// NTC_P2P_LOG ntcP2PLog = JSONObject.parseObject(message, NTC_P2P_LOG.class);
+// String sIp = ntcP2PLog.getS_ip();
+// String dIp = ntcP2PLog.getD_ip();
+// ntcP2PLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcP2PLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcP2PLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcP2PLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcP2PLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+//// ntcP2PLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+// return JSONObject.toJSONString(ntcP2PLog);
+// } catch (Exception e) {
+// logger.error(("P2P 日志补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * pxy 文件方法
+// *
+// * @param message
+// * @return
+// */
+//
+// public static String pxyLogFile(String message) {
+// try {
+// PXY_HTTP_LOG pxyHttpLog = JSONObject.parseObject(message, PXY_HTTP_LOG.class);
+// int cfgId = pxyHttpLog.getCfg_id();
+// String url = pxyHttpLog.getUrl();
+// pxyHttpLog.setReq_body_key(MD5Utils.md5Encode(cfgId + url + pxyHttpLog.getReq_body() + pxyHttpLog.getS_ip() + pxyHttpLog.getFound_time()));
+// pxyHttpLog.setResp_body_key(TransFormUtils.getHttpKey(cfgId, url, pxyHttpLog.getResp_body()));
+// pxyHttpLog.setWebsite(StringUtil.getDomain(pxyHttpLog.getUrl()));
+// return JSONObject.toJSONString(pxyHttpLog);
+// } catch (Exception e) {
+// logger.error(("PXY 文件补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// /**
+// * PXYHTTP Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String pxyHttpReplenish(String message) {
+// try {
+// PXY_HTTP_LOG pxyHttpLog = JSONObject.parseObject(message, PXY_HTTP_LOG.class);
+// String sIp = pxyHttpLog.getS_ip();
+// String dIp = pxyHttpLog.getD_ip();
+// pxyHttpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// pxyHttpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// pxyHttpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// pxyHttpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+// if (StringUtil.isNotBlank(pxyHttpLog.getReq_body())) {
+// pxyHttpLog.setReq_body("http://" + FlowWriteConfig.ASTANA_OSS_ADDRS + "/download/" + pxyHttpLog.getReq_body_key() + "." + "html");
+// }
+// if (StringUtil.isNotBlank(pxyHttpLog.getResp_body())) {
+// pxyHttpLog.setResp_body("http://" + FlowWriteConfig.ASTANA_OSS_ADDRS + "/download/" + pxyHttpLog.getResp_body_key() + "." + "html");
+// }
+// return JSONObject.toJSONString(pxyHttpLog);
+// } catch (Exception e) {
+// logger.error("PXY 日志补全出现错误!!!" + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * BGP Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String bgpReplenish(String message) {
+// try {
+// NTC_BGP_LOG ntcBgpLog = JSONObject.parseObject(message, NTC_BGP_LOG.class);
+// String sIp = ntcBgpLog.getS_ip();
+// String dIp = ntcBgpLog.getD_ip();
+// ntcBgpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcBgpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcBgpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcBgpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcBgpLog.setD_subscribe_id(RedisPollUtils.getJedisCluster().get(dIP));
+//// ntcBgpLog.setS_subscribe_id(RedisPollUtils.getJedisCluster().get(sIp));
+// return JSONObject.toJSONString(ntcBgpLog);
+// } catch (Exception e) {
+// logger.error(("BGP 日志补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * FTP Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String ftpReplenish(String message) {
+// try {
+// NTC_FTP_LOG ntcFtpLog = JSONObject.parseObject(message, NTC_FTP_LOG.class);
+// String sIp = ntcFtpLog.getS_ip();
+// String dIp = ntcFtpLog.getD_ip();
+// ntcFtpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcFtpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcFtpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcFtpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcFtpLog.setD_subscribe_id(RedisPollUtils.getJedisCluster().get(dIP));
+//// ntcFtpLog.setS_subscribe_id(RedisPollUtils.getJedisCluster().get(sIp));
+// return JSONObject.toJSONString(ntcFtpLog);
+// } catch (Exception e) {
+// logger.error(("FTP 日志补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * Stream Media Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String streamMediaReplenish(String message) {
+// try {
+// NTC_STREAMING_MEDIA_LOG ntcStreamingMediaLog = JSONObject.parseObject(message, NTC_STREAMING_MEDIA_LOG.class);
+// String sIp = ntcStreamingMediaLog.getS_ip();
+// String dIp = ntcStreamingMediaLog.getD_ip();
+// ntcStreamingMediaLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcStreamingMediaLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcStreamingMediaLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcStreamingMediaLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcStreamingMediaLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+//// ntcStreamingMediaLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+// return JSONObject.toJSONString(ntcStreamingMediaLog);
+// } catch (Exception e) {
+// logger.error(("Stream 日志补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// /**
+// * Voip Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String voipReplenish(String message) {
+// try {
+// NTC_VOIP_LOG ntcVoipLog = JSONObject.parseObject(message, NTC_VOIP_LOG.class);
+// String sIp = ntcVoipLog.getS_ip();
+// String dIp = ntcVoipLog.getD_ip();
+// ntcVoipLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcVoipLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcVoipLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcVoipLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcVoipLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+//// ntcVoipLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+// return JSONObject.toJSONString(ntcVoipLog);
+// } catch (Exception e) {
+// logger.error(("VOIP 日志补全出现错误!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// private static String openVpnLog(String message) {
+// try {
+// NTC_OPENVPN_LOG ntcOpenvpnLog = JSONObject.parseObject(message, NTC_OPENVPN_LOG.class);
+// String sIp = ntcOpenvpnLog.getS_ip();
+// String dIp = ntcOpenvpnLog.getD_ip();
+// ntcOpenvpnLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcOpenvpnLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcOpenvpnLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcOpenvpnLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcOpenvpnLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcOpenvpnLog.getS_ip()));
+//// ntcOpenvpnLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcOpenvpnLog.getD_ip()));
+// return JSONObject.toJSONString(ntcOpenvpnLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// /**
+// * 音视频topic对准类
+// *
+// * @param message 日志
+// * @param topic topic名
+// * @return 补全后日志
+// */
+// public static String getMMData(String message, String topic) {
+// switch (topic) {
+// case "NTC-COLLECT-VOIP-LOG":
+// return collectVoipLog(message);
+// case "MM-PORN-AUDIO-LEVEL-LOG":
+// return avIpLog(message);
+// case "MM-PORN-VIDEO-LEVEL-LOG":
+// return avIpLog(message);
+// case "MM-SAMPLE-AUDIO-LOG":
+// return avIpLog(message);
+// case "MM-SAMPLE-VIDEO-LOG":
+// return avIpLog(message);
+// case "MM-SAMPLE-PIC-LOG":
+// return avIpLog(message);
+// case "MM-SAMPLE-VOIP-LOG":
+// return voipIpLog(message);
+// case "MM-FILE-DIGEST-LOG":
+// return avIpLog(message);
+// case "MM-AV-IP-LOG":
+// return avIpLog(message);
+// case "MM-SPEAKER-RECOGNIZATION-LOG":
+// return avIpLog(message);
+// case "MM-LOGO-DETECTION-LOG":
+// return avIpLog(message);
+// case "MM-FACE-RECOGNIZATION-LOG":
+// return avIpLog(message);
+// case "MM-AV-URL-LOG":
+// return avIpLog(message);
+// case "MM-PIC-IP-LOG":
+// return avIpLog(message);
+// case "MM-PIC-URL-LOG":
+// return avIpLog(message);
+// case "MM-VOIP-IP-LOG":
+// return voipIpLog(message);
+// case "MM-VOIP-ACCOUNT-LOG":
+// return voipIpLog(message);
+// default:
+// logger.error("There is no corresponding topic! topic name is :" + topic);
+// break;
+// }
+// return null;
+// }
+//
+//
+// private static String avIpLog(String message) {
+// try {
+// MM_AV_IP_LOG mmAvIpLog = JSONObject.parseObject(message, MM_AV_IP_LOG.class);
+// String sIp = mmAvIpLog.getS_ip();
+// String dIp = mmAvIpLog.getD_ip();
+// mmAvIpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// mmAvIpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// mmAvIpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// mmAvIpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// mmAvIpLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+//// mmAvIpLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+// String url = mmAvIpLog.getLog_uri();
+// if (StringUtil.isNotBlank(url)) {
+// String key = MD5Utils.md5Encode(url);
+// String end = StringUtil.getFileExtendName(url).toLowerCase();
+// mmAvIpLog.setLog_uri("http://" + FlowWriteConfig.ALMATY_OSS_ADDRS + "/download/" + key + "." + end);
+// }
+// return JSONObject.toJSONString(mmAvIpLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// private static String voipIpLog(String message) {
+// try {
+// MM_VOIP_IP_LOG mmVoipIpLog = JSONObject.parseObject(message, MM_VOIP_IP_LOG.class);
+// String sIp = mmVoipIpLog.getS_ip();
+// String dIp = mmVoipIpLog.getD_ip();
+// mmVoipIpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// mmVoipIpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// mmVoipIpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// mmVoipIpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// mmVoipIpLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+//// mmVoipIpLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+// String url = mmVoipIpLog.getLog_uri();
+// if (StringUtil.isNotBlank(url)) {
+// String key = MD5Utils.md5Encode(url);
+// String end = StringUtil.getFileExtendName(url).toLowerCase();
+// mmVoipIpLog.setLog_uri("http://" + FlowWriteConfig.ALMATY_OSS_ADDRS + "/download/" + key + "." + end);
+// }
+// return JSONObject.toJSONString(mmVoipIpLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// private static String collectVoipLog(String message) {
+// try {
+// NTC_COLLECT_VOIP_LOG ntcCollectVoipLog = JSONObject.parseObject(message, NTC_COLLECT_VOIP_LOG.class);
+// if (StringUtil.isNotBlank(ntcCollectVoipLog.getTo_from_store_url())) {
+// String key = MD5Utils.md5Encode(ntcCollectVoipLog.getPid() + ntcCollectVoipLog.getTo_from_store_url() + ntcCollectVoipLog.getFound_time());
+// String url = ntcCollectVoipLog.getTo_from_store_url();
+// String end = StringUtil.getFileExtendName(url).toLowerCase();
+// ntcCollectVoipLog.setTo_from_store_url("http://" + FlowWriteConfig.ALMATY_OSS_ADDRS + "/download/" + key + "." + end);
+// }
+// if (StringUtil.isNotBlank(ntcCollectVoipLog.getFrom_to_store_url())) {
+// String key = MD5Utils.md5Encode(ntcCollectVoipLog.getPid() + ntcCollectVoipLog.getFrom_to_store_url() + ntcCollectVoipLog.getFound_time());
+// String url = ntcCollectVoipLog.getFrom_to_store_url();
+// String end = StringUtil.getFileExtendName(url).toLowerCase();
+// ntcCollectVoipLog.setFrom_to_store_url("http://" + FlowWriteConfig.ALMATY_OSS_ADDRS + "/download/" + key + "." + end);
+// }
+// return JSONObject.toJSONString(ntcCollectVoipLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// /**
+// * 其他topic对准类
+// *
+// * @param message 日志
+// * @return 补全后日志
+// */
+// public static String getOtherData(String message, String topic) {
+// switch (topic) {
+// case "NTC-HTTP-LOG":
+// return otherHttpLog(message);
+// case "NTC-CONN-RECORD-LOG":
+// NTC_CONN_RECORD_LOG ntcConnRecordLog = JSONObject.parseObject(message, NTC_CONN_RECORD_LOG.class, Feature.OrderedField);
+// ntcConnRecordLog.setD_asn(ipLookup.asnLookup(ntcConnRecordLog.getD_ip(), true));
+// ntcConnRecordLog.setS_asn(ipLookup.asnLookup(ntcConnRecordLog.getS_ip(), true));
+// return JSONObject.toJSONString(ntcConnRecordLog);
+// default:
+// logger.error("There is no corresponding topic! topic name is :" + topic);
+// break;
+// }
+// return null;
+// }
+//
+//
+// private static String otherHttpLog(String message) {
+// NTC_HTTP_LOG ntcHttpLog = JSONObject.parseObject(message, NTC_HTTP_LOG.class);
+// if (ntcHttpLog.getService() == 152) {
+// if (StringUtil.isBlank(ntcHttpLog.getUrl())) {
+// TransFormUtils.getUniFlow(ntcHttpLog);
+// }
+// String sIp = ntcHttpLog.getS_ip();
+// String dIp = ntcHttpLog.getD_ip();
+// ntcHttpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcHttpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcHttpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcHttpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+// ntcHttpLog.setWebsite(StringUtil.getDomain(ntcHttpLog.getUrl()));
+//// ntcHttpLog.setD_subscribe_id(RedisPollUtils.getJedisCluster().get(dIP));
+//// ntcHttpLog.setS_subscribe_id(RedisPollUtils.getJedisCluster().get(sIp));
+// return JSONObject.toJSONString(ntcHttpLog);
+// }
+// return "";
+// }
+//
+//
+// /**
+// * collect Mail to Mail
+// *
+// * @param message
+// * @return
+// */
+// public static String collectMailToMailLog(String message) {
+// try {
+// NTC_MAIL_LOG ntcMailLog = JSONObject.parseObject(message, NTC_MAIL_LOG.class);
+// String sIp = ntcMailLog.getS_ip();
+// String dIp = ntcMailLog.getD_ip();
+// ntcMailLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcMailLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcMailLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcMailLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcMailLog.setS_subscribe_id(TransFormUtils.getSubscribe(sIp));
+//// ntcMailLog.setD_subscribe_id(TransFormUtils.getSubscribe(dIP));
+// ntcMailLog.setEml_key(TransFormUtils.getEmlKey(ntcMailLog.getFound_time(), ntcMailLog.getMail_from(),
+// ntcMailLog.getMail_to(), ntcMailLog.getSubject(), ntcMailLog.getEml_file()));
+// if (StringUtil.isNotBlank(ntcMailLog.getSubject())) {
+// String subjectCharset = JSONObject.parseObject(message).getString("subject_charset");
+// ntcMailLog.setSubject(DecodeUtils.base64Str(ntcMailLog.getSubject(), subjectCharset));
+// }
+// TransFormUtils.setMailFile(ntcMailLog);
+// return JSONObject.toJSONString(ntcMailLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// /**
+// * ---------------------------------删除的topic------------------------------------------------------------
+// **/
+//
+// private static String behaviorLog(String message) {
+// try {
+// DK_BEHAVIOR_LOG dkBehaviorLog = JSONObject.parseObject(message, DK_BEHAVIOR_LOG.class, Feature.OrderedField);
+// dkBehaviorLog.setServer_locate(ipLookup.countryLookup(dkBehaviorLog.getD_ip()));
+// dkBehaviorLog.setClient_locate(ipLookup.cityLookupDetail(dkBehaviorLog.getS_ip()));
+// dkBehaviorLog.setD_asn(ipLookup.asnLookup(dkBehaviorLog.getD_ip()).trim());
+// dkBehaviorLog.setS_asn(ipLookup.asnLookup(dkBehaviorLog.getS_ip()).trim());
+//// dkBehaviorLog.setS_subscribe_id(TransFormUtils.getSubscribe(dkBehaviorLog.getS_ip()));
+//// dkBehaviorLog.setD_subscribe_id(TransFormUtils.getSubscribe(dkBehaviorLog.getD_ip()));
+// return JSONObject.toJSONString(dkBehaviorLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// private static String ipsecLog(String message) {
+// try {
+// NTC_IPSEC_LOG ntcIpsecLog = JSONObject.parseObject(message, NTC_IPSEC_LOG.class, Feature.OrderedField);
+// ntcIpsecLog.setServer_locate(ipLookup.countryLookup(ntcIpsecLog.getD_ip()));
+// ntcIpsecLog.setClient_locate(ipLookup.cityLookupDetail(ntcIpsecLog.getS_ip()));
+// ntcIpsecLog.setD_asn(ipLookup.asnLookup(ntcIpsecLog.getD_ip()).trim());
+// ntcIpsecLog.setS_asn(ipLookup.asnLookup(ntcIpsecLog.getS_ip()).trim());
+//// ntcIpsecLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcIpsecLog.getS_ip()));
+//// ntcIpsecLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcIpsecLog.getD_ip()));
+// return JSONObject.toJSONString(ntcIpsecLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// private static String l2tpLog(String message) {
+// try {
+// NTC_L2TP_LOG ntcL2TPLog = JSONObject.parseObject(message, NTC_L2TP_LOG.class, Feature.OrderedField);
+// ntcL2TPLog.setServer_locate(ipLookup.countryLookup(ntcL2TPLog.getD_ip()));
+// ntcL2TPLog.setClient_locate(ipLookup.cityLookupDetail(ntcL2TPLog.getS_ip()));
+// ntcL2TPLog.setD_asn(ipLookup.asnLookup(ntcL2TPLog.getD_ip()).trim());
+// ntcL2TPLog.setS_asn(ipLookup.asnLookup(ntcL2TPLog.getS_ip()).trim());
+//// ntcL2TPLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcL2TPLog.getS_ip()));
+//// ntcL2TPLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcL2TPLog.getD_ip()));
+// return JSONObject.toJSONString(ntcL2TPLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// private static String sshLog(String message) {
+// try {
+// NTC_SSH_LOG ntcSshLog = JSONObject.parseObject(message, NTC_SSH_LOG.class, Feature.OrderedField);
+// ntcSshLog.setServer_locate(ipLookup.countryLookup(ntcSshLog.getD_ip()));
+// ntcSshLog.setClient_locate(ipLookup.cityLookupDetail(ntcSshLog.getS_ip()));
+// ntcSshLog.setD_asn(ipLookup.asnLookup(ntcSshLog.getD_ip()).trim());
+// ntcSshLog.setS_asn(ipLookup.asnLookup(ntcSshLog.getS_ip()).trim());
+// try {
+// ntcSshLog.setVersion(ntcSshLog.getVersion().replaceAll("\n", "\\\\n"));
+// ntcSshLog.setVersion(ntcSshLog.getVersion().replaceAll("\r", "\\\\r"));
+// } catch (Exception e) {
+// ntcSshLog.setVersion("");
+// }
+//// ntcSshLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcSshLog.getS_ip()));
+//// ntcSshLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcSshLog.getD_ip()));
+// return JSONObject.toJSONString(ntcSshLog);
+// } catch (Exception e) {
+// logger.error("Log parsing error!!! " + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// /**
+// * PPTP Log Replenish
+// *
+// * @param message
+// * @return
+// */
+// private static String pptpReplenish(String message) {
+// try {
+// NTC_PPTP_LOG ntcPptpLog = JSONObject.parseObject(message, NTC_PPTP_LOG.class);
+// ntcPptpLog.setServer_locate(ipLookup.countryLookup(ntcPptpLog.getD_ip()));
+// ntcPptpLog.setClient_locate(ipLookup.cityLookupDetail(ntcPptpLog.getS_ip()));
+// ntcPptpLog.setD_asn(ipLookup.asnLookup(ntcPptpLog.getD_ip()).trim());
+// ntcPptpLog.setS_asn(ipLookup.asnLookup(ntcPptpLog.getS_ip()).trim());
+//// ntcPptpLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcPptpLog.getS_ip()));
+//// ntcPptpLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcPptpLog.getD_ip()));
+// return JSONObject.toJSONString(ntcPptpLog);
+// } catch (Exception e) {
+// logger.error(("Log parsing error!!! ") + e);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// /**---------------------------------删除的topic------------------------------------------------------------**/
+//
+//}
diff --git a/src/main/java/cn/ac/iie/utils/getjson/GetStrToClickHouseUtils.java b/src/main/java/cn/ac/iie/utils/getjson/GetStrToClickHouseUtils.java
new file mode 100644
index 0000000..3ac5d1e
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/getjson/GetStrToClickHouseUtils.java
@@ -0,0 +1,585 @@
+//package cn.ac.iie.utils.getjson;
+//
+//import cn.ac.iie.bean.dk.DK_BEHAVIOR_LOG;
+//import cn.ac.iie.bean.mm.MM_AV_IP_LOG;
+//import cn.ac.iie.bean.mm.MM_VOIP_IP_LOG;
+//import cn.ac.iie.bean.ntc.NTC_CONN_RECORD_LOG;
+//import cn.ac.iie.bean.ntcwidely.NTC_COLLECT_SSL_LOG;
+//import cn.ac.iie.bean.pxy.PXY_HTTP_LOG;
+//import cn.ac.iie.common.FlowWriteConfig;
+//import cn.ac.iie.common.RealtimeCountConfig;
+//import cn.ac.iie.utils.ordinary.DecodeUtils;
+//import com.alibaba.fastjson.JSONObject;
+//import com.alibaba.fastjson.parser.Feature;
+//import com.zdjizhi.utils.IpLookup;
+//import com.zdjizhi.utils.StringUtil;
+//import org.apache.log4j.Logger;
+//
+///**
+// * @author antlee
+// * @date 2018/7/19
+// */
+//public class GetStrToClickHouseUtils {
+// private static Logger logger = Logger.getLogger(GetStrToClickHouseUtils.class);
+// private static IpLookup ipLookup = new IpLookup.Builder(false)
+// .loadDataFileV4(RealtimeCountConfig.IP_LIBRARY)
+// .loadDataFileV6(RealtimeCountConfig.IP_LIBRARY)
+// .build();
+//
+// /**
+// * NTC topic对准类
+// *
+// * @param message 日志
+// * @param topic topic名称
+// * @return 补全日志
+// */
+// public static String getData(String message, String topic) {
+// switch (topic) {
+// case "NTC-CONN-RECORD-LOG":
+// return ntcConnLog(message);
+// case "NTC-COLLECT-MAIL-LOG":
+// return mailLog(message);
+// case "NTC-COLLECT-SSL-LOG":
+// return sslLog(message);
+// case "NTC-SSL-LOG":
+// return sslLog(message);
+// case "NTC-APP-LOG":
+// return appLog(message);
+// case "NTC-HTTP-LOG":
+// return httpLog(message);
+// case "NTC-IP-LOG":
+// return ipLog(message);
+// case "PXY-HTTP-LOG":
+// return pHttpLog(message);
+// case "NTC-DNS-LOG":
+// return dnsLog(message);
+// case "NTC-BGP-LOG":
+// return bgpLog(message);
+// case "NTC-DDOS-LOG":
+// return ddosLog(message);
+// case "NTC-FTP-LOG":
+// return ftpLog(message);
+// case "NTC-MAIL-LOG":
+// return mailLog(message);
+// case "NTC-OPENVPN-LOG":
+// return openVpnLog(message);
+// case "NTC-P2P-LOG":
+// return p2pLog(message);
+// case "NTC-STREAMING-MEDIA-LOG":
+// return streamMediaLog(message);
+// case "NTC-VOIP-LOG":
+// return voipLog(message);
+// case "NTC-KEYWORDS-URL-LOG":
+// return message;
+// case "MM-SAMPLE-AUDIO-LOG":
+// return avIpLog(message);
+// case "MM-SAMPLE-VIDEO-LOG":
+// return avIpLog(message);
+// case "MM-PORN-AUDIO-LEVEL-LOG":
+// return avIpLog(message);
+// case "MM-PORN-VIDEO-LEVEL-LOG":
+// return avIpLog(message);
+// case "MM-SAMPLE-PIC-LOG":
+// return avIpLog(message);
+// case "MM-SAMPLE-VOIP-LOG":
+// return voipIpLog(message);
+// case "MM-FILE-DIGEST-LOG":
+// return avIpLog(message);
+// case "NTC-COLLECT-VOIP-LOG":
+// return collectVoipLog(message);
+//
+//// case "MM-AV-IP-LOG":
+//// return avIpLog(message);
+//// case "MM-AV-URL-LOG":
+//// return avIpLog(message);
+//// case "MM-PIC-IP-LOG":
+//// return avIpLog(message);
+//// case "MM-PIC-URL-LOG":
+//// return avIpLog(message);
+//// case "MM-VOIP-IP-LOG":
+//// return voipIpLog(message);
+//// case "MM-SPEAKER-RECOGNIZATION-LOG":
+//// return avIpLog(message);
+//// case "MM-LOGO-DETECTION-LOG":
+//// return avIpLog(message);
+//// case "MM-FACE-RECOGNIZATION-LOG":
+//// return avIpLog(message);
+//// case "MM-VOIP-ACCOUNT-LOG":
+//// return voipIpLog(message);
+//// case "NTC-IPSEC-LOG":
+//// return ipsecLog(message);
+//// case "NTC-L2TP-LOG":
+//// return l2tpLog(message);
+//// case "NTC-PPTP-LOG":
+//// return pptpLog(message);
+//// case "NTC-SSH-LOG":
+//// return sshLog(message);
+//
+// default:
+// logger.error("There is no corresponding topic! topic name is :" + topic);
+// break;
+// }
+// return null;
+// }
+//
+//
+// private static String ntcConnLog(String message) {
+// try {
+// NTC_CONN_RECORD_LOG ntcConnRecordLog = JSONObject.parseObject(message, NTC_CONN_RECORD_LOG.class);
+// String appLabel = ntcConnRecordLog.getApp_label();
+// String sIp = ntcConnRecordLog.getS_ip();
+// String dIp = ntcConnRecordLog.getD_ip();
+// ntcConnRecordLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcConnRecordLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcConnRecordLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcConnRecordLog.setS_asn(ipLookup.asnLookup(sIp, true));
+// if (StringUtil.isNotBlank(appLabel)) {
+// String[] split = ntcConnRecordLog.getApp_label().split(";");
+// ntcConnRecordLog.setProto_id(Integer.parseInt(split[0].split("=")[1]));
+// ntcConnRecordLog.setApp_id(Integer.parseInt(split[1].split("=")[1]));
+// ntcConnRecordLog.setOs_id(Integer.parseInt(split[2].split("=")[1]));
+// ntcConnRecordLog.setBs_id(Integer.parseInt(split[3].split("=")[1]));
+// ntcConnRecordLog.setWeb_id(Integer.parseInt(split[4].split("=")[1]));
+// ntcConnRecordLog.setBehav_id(Integer.parseInt(split[5].split("=")[1]));
+// } else {
+// ntcConnRecordLog.setProto_id(0);
+// ntcConnRecordLog.setApp_id(0);
+// ntcConnRecordLog.setOs_id(0);
+// ntcConnRecordLog.setBs_id(0);
+// ntcConnRecordLog.setWeb_id(0);
+// ntcConnRecordLog.setBehav_id(0);
+// }
+// return JSONObject.toJSONString(ntcConnRecordLog);
+// } catch (Exception e) {
+// logger.error("Log parsing error!!! \n" + message);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+//
+// private static String avIpLog(String message) {
+// try {
+// MM_AV_IP_LOG mmAvIpLog = JSONObject.parseObject(message, MM_AV_IP_LOG.class);
+// String sIp = mmAvIpLog.getS_ip();
+// String dIp = mmAvIpLog.getD_ip();
+// mmAvIpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// mmAvIpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// mmAvIpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// mmAvIpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// mmAvIpLog.setS_subscribe_id(TransFormUtils.getSubscribe(mmAvIpLog.getS_ip()));
+//// mmAvIpLog.setD_subscribe_id(TransFormUtils.getSubscribe(mmAvIpLog.getD_ip()));
+// return JSONObject.toJSONString(mmAvIpLog);
+// } catch (Exception e) {
+// logger.error("Log parsing error!!! \n" + message);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// private static String voipIpLog(String message) {
+// try {
+// MM_VOIP_IP_LOG mmVoipIpLog = JSONObject.parseObject(message, MM_VOIP_IP_LOG.class);
+// String sIp = mmVoipIpLog.getS_ip();
+// String dIp = mmVoipIpLog.getD_ip();
+// mmVoipIpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// mmVoipIpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// mmVoipIpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// mmVoipIpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// mmVoipIpLog.setS_subscribe_id(TransFormUtils.getSubscribe(mmVoipIpLog.getS_ip()));
+//// mmVoipIpLog.setD_subscribe_id(TransFormUtils.getSubscribe(mmVoipIpLog.getD_ip()));
+// return JSONObject.toJSONString(mmVoipIpLog);
+// } catch (Exception e) {
+// logger.error("Log parsing error!!! \n" + message);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// private static String collectVoipLog(String message) {
+// try {
+// NTC_COLLECT_VOIP_LOG ntcCollectVoipLog = JSONObject.parseObject(message, NTC_COLLECT_VOIP_LOG.class);
+// return JSONObject.toJSONString(ntcCollectVoipLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+//
+// private static String appLog(String message) {
+// try {
+// NTC_APP_LOG ntcAppLog = JSONObject.parseObject(message, NTC_APP_LOG.class);
+// String sIp = ntcAppLog.getS_ip();
+// String dIp = ntcAppLog.getD_ip();
+// ntcAppLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcAppLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcAppLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcAppLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcAppLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcAppLog.getS_ip()));
+//// ntcAppLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcAppLog.getD_ip()));
+// return JSONObject.toJSONString(ntcAppLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String bgpLog(String message) {
+// try {
+// NTC_BGP_LOG ntcBgpLog = JSONObject.parseObject(message, NTC_BGP_LOG.class);
+// String sIp = ntcBgpLog.getS_ip();
+// String dIp = ntcBgpLog.getD_ip();
+// ntcBgpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcBgpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcBgpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcBgpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcBgpLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcBgpLog.getS_ip()));
+//// ntcBgpLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcBgpLog.getD_ip()));
+// return JSONObject.toJSONString(ntcBgpLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String ddosLog(String message) {
+// try {
+// NTC_DDOS_LOG ntcDdosLog = JSONObject.parseObject(message, NTC_DDOS_LOG.class);
+// String sIp = ntcDdosLog.getS_ip();
+// String dIp = ntcDdosLog.getD_ip();
+// ntcDdosLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcDdosLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcDdosLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcDdosLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcDdosLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcDdosLog.getS_ip()));
+//// ntcDdosLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcDdosLog.getD_ip()));
+// return JSONObject.toJSONString(ntcDdosLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String dnsLog(String message) {
+// try {
+// NTC_DNS_LOG ntcDnsLog = JSONObject.parseObject(message, NTC_DNS_LOG.class);
+// String sIp = ntcDnsLog.getS_ip();
+// String dIp = ntcDnsLog.getD_ip();
+// ntcDnsLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcDnsLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcDnsLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcDnsLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcDnsLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcDnsLog.getS_ip()));
+//// ntcDnsLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcDnsLog.getD_ip()));
+// return JSONObject.toJSONString(ntcDnsLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String ftpLog(String message) {
+// try {
+// NTC_FTP_LOG ntcFtpLog = JSONObject.parseObject(message, NTC_FTP_LOG.class);
+// String sIp = ntcFtpLog.getS_ip();
+// String dIp = ntcFtpLog.getD_ip();
+// ntcFtpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcFtpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcFtpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcFtpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcFtpLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcFtpLog.getS_ip()));
+//// ntcFtpLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcFtpLog.getD_ip()));
+// return JSONObject.toJSONString(ntcFtpLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String httpLog(String message) {
+// try {
+// NTC_HTTP_LOG ntcHttpLog = JSONObject.parseObject(message, NTC_HTTP_LOG.class);
+//// if (StringUtil.isBlank(ntcHttpLog.getUrl())) {
+//// TransFormUtils.getUniFlow(ntcHttpLog);
+//// }
+// String sIp = ntcHttpLog.getS_ip();
+// String dIp = ntcHttpLog.getD_ip();
+// ntcHttpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcHttpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcHttpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcHttpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcHttpLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcHttpLog.getS_ip()));
+//// ntcHttpLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcHttpLog.getD_ip()));
+// ntcHttpLog.setWebsite(StringUtil.getDomain(ntcHttpLog.getUrl()));
+// return JSONObject.toJSONString(ntcHttpLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String ipLog(String message) {
+// try {
+// NTC_IP_LOG ntcIpLog = JSONObject.parseObject(message, NTC_IP_LOG.class);
+// String sIp = ntcIpLog.getS_ip();
+// String dIp = ntcIpLog.getD_ip();
+// ntcIpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcIpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcIpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcIpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcIpLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcIpLog.getS_ip()));
+//// ntcIpLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcIpLog.getD_ip()));
+// return JSONObject.toJSONString(ntcIpLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+//
+// private static String mailLog(String message) {
+// try {
+// NTC_MAIL_LOG ntcMailLog = JSONObject.parseObject(message, NTC_MAIL_LOG.class);
+// String sIp = ntcMailLog.getS_ip();
+// String dIp = ntcMailLog.getD_ip();
+// ntcMailLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcMailLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcMailLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcMailLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcMailLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcMailLog.getS_ip()));
+//// ntcMailLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcMailLog.getD_ip()));
+// if (StringUtil.isNotBlank(ntcMailLog.getSubject())) {
+// String subjectCharset = JSONObject.parseObject(message).getString("subject_charset");
+// ntcMailLog.setSubject(DecodeUtils.base64Str(ntcMailLog.getSubject(), subjectCharset));
+// }
+// return JSONObject.toJSONString(ntcMailLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String openVpnLog(String message) {
+// try {
+// NTC_OPENVPN_LOG ntcOpenvpnLog = JSONObject.parseObject(message, NTC_OPENVPN_LOG.class);
+// String sIp = ntcOpenvpnLog.getS_ip();
+// String dIp = ntcOpenvpnLog.getD_ip();
+// ntcOpenvpnLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcOpenvpnLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcOpenvpnLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcOpenvpnLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcOpenvpnLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcOpenvpnLog.getS_ip()));
+//// ntcOpenvpnLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcOpenvpnLog.getD_ip()));
+// return JSONObject.toJSONString(ntcOpenvpnLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String p2pLog(String message) {
+// try {
+// NTC_P2P_LOG ntcP2PLog = JSONObject.parseObject(message, NTC_P2P_LOG.class);
+// String sIp = ntcP2PLog.getS_ip();
+// String dIp = ntcP2PLog.getD_ip();
+// ntcP2PLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcP2PLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcP2PLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcP2PLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcP2PLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcP2PLog.getS_ip()));
+//// ntcP2PLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcP2PLog.getD_ip()));
+// return JSONObject.toJSONString(ntcP2PLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+//
+// private static String streamMediaLog(String message) {
+// try {
+// NTC_STREAMING_MEDIA_LOG ntcStreamingMediaLog = JSONObject.parseObject(message, NTC_STREAMING_MEDIA_LOG.class);
+// String sIp = ntcStreamingMediaLog.getS_ip();
+// String dIp = ntcStreamingMediaLog.getD_ip();
+// ntcStreamingMediaLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcStreamingMediaLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcStreamingMediaLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcStreamingMediaLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcStreamingMediaLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcStreamingMediaLog.getS_ip()));
+//// ntcStreamingMediaLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcStreamingMediaLog.getD_ip()));
+// return JSONObject.toJSONString(ntcStreamingMediaLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String voipLog(String message) {
+// try {
+// NTC_VOIP_LOG ntcVoipLog = JSONObject.parseObject(message, NTC_VOIP_LOG.class);
+// String sIp = ntcVoipLog.getS_ip();
+// String dIp = ntcVoipLog.getD_ip();
+// ntcVoipLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcVoipLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcVoipLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcVoipLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcVoipLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcVoipLog.getS_ip()));
+//// ntcVoipLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcVoipLog.getD_ip()));
+// return JSONObject.toJSONString(ntcVoipLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+//
+// private static String sslLog(String message) {
+// try {
+// NTC_COLLECT_SSL_LOG ntcSslLog = JSONObject.parseObject(message, NTC_COLLECT_SSL_LOG.class);
+// String sIp = ntcSslLog.getS_ip();
+// String dIp = ntcSslLog.getD_ip();
+// ntcSslLog.setServer_locate(ipLookup.countryLookup(dIp));
+// ntcSslLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// ntcSslLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// ntcSslLog.setS_asn(ipLookup.asnLookup(sIp, true));
+//// ntcSslLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcSslLog.getS_ip()));
+//// ntcSslLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcSslLog.getD_ip()));
+// return JSONObject.toJSONString(ntcSslLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String pHttpLog(String message) {
+// try {
+// PXY_HTTP_LOG pxyHttpLog = JSONObject.parseObject(message, PXY_HTTP_LOG.class);
+// String sIp = pxyHttpLog.getS_ip();
+// String dIp = pxyHttpLog.getD_ip();
+// pxyHttpLog.setServer_locate(ipLookup.countryLookup(dIp));
+// pxyHttpLog.setClient_locate(ipLookup.cityLookupDetail(sIp));
+// pxyHttpLog.setD_asn(ipLookup.asnLookup(dIp, true));
+// pxyHttpLog.setS_asn(ipLookup.asnLookup(sIp, true));
+// pxyHttpLog.setWebsite(StringUtil.getDomain(pxyHttpLog.getUrl()));
+// return JSONObject.toJSONString(pxyHttpLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+//
+// /**
+// * ---------------------------------删除的topic------------------------------------------------------------
+// **/
+//
+//
+// private static String behaviorLog(String message) {
+// try {
+// DK_BEHAVIOR_LOG dkBehaviorLog = JSONObject.parseObject(message, DK_BEHAVIOR_LOG.class, Feature.OrderedField);
+// dkBehaviorLog.setServer_locate(ipLookup.countryLookup(dkBehaviorLog.getD_ip()));
+// dkBehaviorLog.setClient_locate(ipLookup.cityLookupDetail(dkBehaviorLog.getS_ip()));
+// dkBehaviorLog.setD_asn(ipLookup.asnLookup(dkBehaviorLog.getD_ip()).trim());
+// dkBehaviorLog.setS_asn(ipLookup.asnLookup(dkBehaviorLog.getS_ip()).trim());
+//// dkBehaviorLog.setS_subscribe_id(TransFormUtils.getSubscribe(dkBehaviorLog.getS_ip()));
+//// dkBehaviorLog.setD_subscribe_id(TransFormUtils.getSubscribe(dkBehaviorLog.getD_ip()));
+// return JSONObject.toJSONString(dkBehaviorLog);
+// } catch (Exception e) {
+// logger.error("Log parsing error!!! \n" + message);
+// e.printStackTrace();
+// return "";
+// }
+// }
+//
+// private static String sshLog(String message) {
+// try {
+// NTC_SSH_LOG ntcSshLog = JSONObject.parseObject(message, NTC_SSH_LOG.class, Feature.OrderedField);
+// ntcSshLog.setServer_locate(ipLookup.countryLookup(ntcSshLog.getD_ip()));
+// ntcSshLog.setClient_locate(ipLookup.cityLookupDetail(ntcSshLog.getS_ip()));
+// ntcSshLog.setD_asn(ipLookup.asnLookup(ntcSshLog.getD_ip()).trim());
+// ntcSshLog.setS_asn(ipLookup.asnLookup(ntcSshLog.getS_ip()).trim());
+// try {
+// ntcSshLog.setVersion(ntcSshLog.getVersion().replaceAll("\n", "\\\\n"));
+// ntcSshLog.setVersion(ntcSshLog.getVersion().replaceAll("\r", "\\\\r"));
+// } catch (Exception e) {
+// ntcSshLog.setVersion("");
+// }
+//// ntcSshLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcSshLog.getS_ip()));
+//// ntcSshLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcSshLog.getD_ip()));
+// return JSONObject.toJSONString(ntcSshLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String pptpLog(String message) {
+// try {
+// NTC_PPTP_LOG ntcPptpLog = JSONObject.parseObject(message, NTC_PPTP_LOG.class, Feature.OrderedField);
+// ntcPptpLog.setServer_locate(ipLookup.countryLookup(ntcPptpLog.getD_ip()));
+// ntcPptpLog.setClient_locate(ipLookup.cityLookupDetail(ntcPptpLog.getS_ip()));
+// ntcPptpLog.setD_asn(ipLookup.asnLookup(ntcPptpLog.getD_ip()).trim());
+// ntcPptpLog.setS_asn(ipLookup.asnLookup(ntcPptpLog.getS_ip()).trim());
+//// ntcPptpLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcPptpLog.getS_ip()));
+//// ntcPptpLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcPptpLog.getD_ip()));
+// return JSONObject.toJSONString(ntcPptpLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String l2tpLog(String message) {
+// try {
+// NTC_L2TP_LOG ntcL2TPLog = JSONObject.parseObject(message, NTC_L2TP_LOG.class, Feature.OrderedField);
+// ntcL2TPLog.setServer_locate(ipLookup.countryLookup(ntcL2TPLog.getD_ip()));
+// ntcL2TPLog.setClient_locate(ipLookup.cityLookupDetail(ntcL2TPLog.getS_ip()));
+// ntcL2TPLog.setD_asn(ipLookup.asnLookup(ntcL2TPLog.getD_ip()).trim());
+// ntcL2TPLog.setS_asn(ipLookup.asnLookup(ntcL2TPLog.getS_ip()).trim());
+//// ntcL2TPLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcL2TPLog.getS_ip()));
+//// ntcL2TPLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcL2TPLog.getD_ip()));
+// return JSONObject.toJSONString(ntcL2TPLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+// private static String ipsecLog(String message) {
+// try {
+// NTC_IPSEC_LOG ntcIpsecLog = JSONObject.parseObject(message, NTC_IPSEC_LOG.class, Feature.OrderedField);
+// ntcIpsecLog.setServer_locate(ipLookup.countryLookup(ntcIpsecLog.getD_ip()));
+// ntcIpsecLog.setClient_locate(ipLookup.cityLookupDetail(ntcIpsecLog.getS_ip()));
+// ntcIpsecLog.setD_asn(ipLookup.asnLookup(ntcIpsecLog.getD_ip()).trim());
+// ntcIpsecLog.setS_asn(ipLookup.asnLookup(ntcIpsecLog.getS_ip()).trim());
+//// ntcIpsecLog.setS_subscribe_id(TransFormUtils.getSubscribe(ntcIpsecLog.getS_ip()));
+//// ntcIpsecLog.setD_subscribe_id(TransFormUtils.getSubscribe(ntcIpsecLog.getD_ip()));
+// return JSONObject.toJSONString(ntcIpsecLog);
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("Log parsing error!!! \n" + message);
+// return "";
+// }
+// }
+//
+//
+//}
diff --git a/src/main/java/cn/ac/iie/utils/redis/RedisPollUtils.java b/src/main/java/cn/ac/iie/utils/redis/RedisPollUtils.java
new file mode 100644
index 0000000..23407dd
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/redis/RedisPollUtils.java
@@ -0,0 +1,61 @@
+package cn.ac.iie.utils.redis;
+
+import org.apache.log4j.Logger;
+import redis.clients.jedis.HostAndPort;
+import redis.clients.jedis.JedisCluster;
+import redis.clients.jedis.JedisPoolConfig;
+
+import java.io.IOException;
+import java.util.LinkedHashSet;
+import java.util.Properties;
+import java.util.Set;
+
+/**
+ * Redis连接池
+ *
+ * @author my
+ * @date 2018-07-04
+ */
+public final class RedisPollUtils {
+ private static final Logger logger = Logger.getLogger(RedisPollUtils.class);
+ private static JedisCluster jedisCluster;
+ private static Properties props = new Properties();
+
+ /**
+ * 不允许通过new创建该类的实例
+ */
+ private RedisPollUtils() {
+ }
+
+ /**
+ * 初始化Redis连接池
+ */
+ public static JedisCluster getJedisCluster() {
+ try {
+ String redisConfigFile = "redis_aaa_config.properties";
+ props.load(RedisPollUtils.class.getClassLoader().getResourceAsStream(redisConfigFile));
+ } catch (IOException e) {
+ logger.error("Properties Initialization Failed!!!!");
+ e.printStackTrace();
+ }
+ JedisPoolConfig poolConfig = new JedisPoolConfig();
+ poolConfig.setMaxTotal(Integer.valueOf(props.getProperty("jedis.pool.maxActive")));
+ poolConfig.setMaxIdle(Integer.valueOf(props.getProperty("jedis.pool.maxIdle")));
+ poolConfig.setMaxWaitMillis(Long.valueOf(props.getProperty("jedis.pool.maxWait")));
+ poolConfig.setTestOnReturn(Boolean.valueOf(props.getProperty("jedis.pool.testOnReturn")));
+ poolConfig.setTestOnBorrow(Boolean.valueOf(props.getProperty("jedis.pool.testOnBorrow")));
+ Set<HostAndPort> nodes = new LinkedHashSet<HostAndPort>();
+ for (String port : props.getProperty("redis.port").split(",")) {
+ nodes.add(new HostAndPort(props.getProperty("redis.ip1"), Integer.parseInt(port)));
+ nodes.add(new HostAndPort(props.getProperty("redis.ip2"), Integer.parseInt(port)));
+ nodes.add(new HostAndPort(props.getProperty("redis.ip3"), Integer.parseInt(port)));
+ nodes.add(new HostAndPort(props.getProperty("redis.ip4"), Integer.parseInt(port)));
+ nodes.add(new HostAndPort(props.getProperty("redis.ip5"), Integer.parseInt(port)));
+ }
+ if (jedisCluster == null) {
+ jedisCluster = new JedisCluster(nodes, poolConfig);
+ }
+ return jedisCluster;
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/utils/redis/RedisUrlPollUtils.java b/src/main/java/cn/ac/iie/utils/redis/RedisUrlPollUtils.java
new file mode 100644
index 0000000..98cd1be
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/redis/RedisUrlPollUtils.java
@@ -0,0 +1,61 @@
+package cn.ac.iie.utils.redis;
+
+import org.apache.log4j.Logger;
+import redis.clients.jedis.HostAndPort;
+import redis.clients.jedis.JedisCluster;
+import redis.clients.jedis.JedisPoolConfig;
+
+import java.util.LinkedHashSet;
+import java.util.Properties;
+import java.util.Set;
+
+/**
+ * Redis连接池
+ *
+ * @author my
+ * @date 2018-07-04
+ */
+public class RedisUrlPollUtils {
+ private static final Logger logger = Logger.getLogger(RedisUrlPollUtils.class);
+ private static String redisConfigFile = "redis_url_config.properties";
+ private static JedisCluster jedisCluster;
+ private static Properties props = new Properties();
+
+ /**
+ * 不允许通过new创建该类的实例
+ */
+ private RedisUrlPollUtils() {
+ }
+
+ /**
+ * 初始化Redis连接池
+ */
+ public static JedisCluster getJedisCluster() {
+ try {
+ props.load(RedisUrlPollUtils.class.getClassLoader().getResourceAsStream(redisConfigFile));
+ JedisPoolConfig poolConfig = new JedisPoolConfig();
+ poolConfig.setMaxTotal(Integer.valueOf(props.getProperty("jedis.pool.maxActive")));
+ poolConfig.setMaxIdle(Integer.valueOf(props.getProperty("jedis.pool.maxIdle")));
+ poolConfig.setMaxWaitMillis(Long.valueOf(props.getProperty("jedis.pool.maxWait")));
+ poolConfig.setTestOnBorrow(Boolean.valueOf(props.getProperty("jedis.pool.testOnBorrow")));
+ poolConfig.setTestOnReturn(Boolean.valueOf(props.getProperty("jedis.pool.testOnReturn")));
+ Set<HostAndPort> nodes = new LinkedHashSet<HostAndPort>();
+ for (String port : props.getProperty("redis.port").split(",")) {
+ nodes.add(new HostAndPort(props.getProperty("redis.ip1"), Integer.parseInt(port)));
+ nodes.add(new HostAndPort(props.getProperty("redis.ip2"), Integer.parseInt(port)));
+ nodes.add(new HostAndPort(props.getProperty("redis.ip3"), Integer.parseInt(port)));
+ nodes.add(new HostAndPort(props.getProperty("redis.ip4"), Integer.parseInt(port)));
+ nodes.add(new HostAndPort(props.getProperty("redis.ip5"), Integer.parseInt(port)));
+ }
+ if (jedisCluster == null) {
+ jedisCluster = new JedisCluster(nodes, poolConfig);
+ }
+ return jedisCluster;
+ } catch (Exception e) {
+ logger.error(("JedisCluster Connection creation Failed!!!") + e);
+ e.printStackTrace();
+ return null;
+ }
+ }
+
+}
diff --git a/src/main/java/log4j.properties b/src/main/java/log4j.properties
new file mode 100644
index 0000000..89cba2f
--- /dev/null
+++ b/src/main/java/log4j.properties
@@ -0,0 +1,16 @@
+log4j.rootLogger=warn, stdout,ROLLING_FILE
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Threshold=INFO
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
+
+
+log4j.appender.ROLLING_FILE=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.ROLLING_FILE.encoding=UTF-8
+log4j.appender.ROLLING_FILE.Threshold=INFO
+log4j.appender.ROLLING_FILE.DatePattern ='.'yyyy-MM-dd'.log'
+log4j.appender.ROLLING_FILE.File=log/storm-log-process.log
+log4j.appender.ROLLING_FILE.Append=true
+log4j.appender.ROLLING_FILE.layout=org.apache.log4j.PatternLayout
+log4j.appender.ROLLING_FILE.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - %m%n \ No newline at end of file
diff --git a/src/main/resources/core-site.xml b/src/main/resources/core-site.xml
new file mode 100644
index 0000000..516149e
--- /dev/null
+++ b/src/main/resources/core-site.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. See accompanying LICENSE file.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+ <property>
+ <name>fs.defaultFS</name>
+ <value>hdfs://ns1</value>
+ </property>
+ <property>
+ <name>hadoop.tmp.dir</name>
+ <value>file:/opt/hadoop/tmp</value>
+ </property>
+ <property>
+ <name>io.file.buffer.size</name>
+ <value>131702</value>
+ </property>
+ <property>
+ <name>hadoop.proxyuser.root.hosts</name>
+ <value>*</value>
+ </property>
+ <property>
+ <name>hadoop.proxyuser.root.groups</name>
+ <value>*</value>
+ </property>
+ <property>
+ <name>hadoop.logfile.size</name>
+ <value>10000000</value>
+ <description>The max size of each log file</description>
+ </property>
+
+ <property>
+ <name>hadoop.logfile.count</name>
+ <value>1</value>
+ <description>The max number of log files</description>
+ </property>
+ <property>
+ <name>ha.zookeeper.quorum</name>
+ <value>master:2181,slave1:2181,slave2:2181</value>
+ </property>
+</configuration>
+
diff --git a/src/main/resources/hdfs-site.xml b/src/main/resources/hdfs-site.xml
new file mode 100644
index 0000000..845b195
--- /dev/null
+++ b/src/main/resources/hdfs-site.xml
@@ -0,0 +1,116 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. See accompanying LICENSE file.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+ <property>
+ <name>dfs.namenode.name.dir</name>
+ <value>file:/home/bigdata/hadoop/dfs/name</value>
+ </property>
+ <property>
+ <name>dfs.datanode.data.dir</name>
+ <value>file:/home/bigdata/hadoop/dfs/data</value>
+ </property>
+ <property>
+ <name>dfs.replication</name>
+ <value>2</value>
+ </property>
+ <property>
+ <name>dfs.namenode.secondary.http-address</name>
+ <value>master:9001</value>
+ </property>
+ <property>
+ <name>dfs.webhdfs.enabled</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>dfs.permissions</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>dfs.permissions.enabled</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>dfs.nameservices</name>
+ <value>ns1</value>
+ </property>
+ <property>
+ <name>dfs.blocksize</name>
+ <value>134217728</value>
+ </property>
+ <property>
+ <name>dfs.ha.namenodes.ns1</name>
+ <value>nn1,nn2</value>
+ </property>
+ <!-- nn1的RPC通信地址,nn1所在地址 -->
+ <property>
+ <name>dfs.namenode.rpc-address.ns1.nn1</name>
+ <value>master:9000</value>
+ </property>
+ <!-- nn1的http通信地址,外部访问地址 -->
+ <property>
+ <name>dfs.namenode.http-address.ns1.nn1</name>
+ <value>master:50070</value>
+ </property>
+ <!-- nn2的RPC通信地址,nn2所在地址 -->
+ <property>
+ <name>dfs.namenode.rpc-address.ns1.nn2</name>
+ <value>slave1:9000</value>
+ </property>
+ <!-- nn2的http通信地址,外部访问地址 -->
+ <property>
+ <name>dfs.namenode.http-address.ns1.nn2</name>
+ <value>slave1:50070</value>
+ </property>
+ <!-- 指定NameNode的元数据在JournalNode日志上的存放位置(一般和zookeeper部署在一起) -->
+ <property>
+ <name>dfs.namenode.shared.edits.dir</name>
+ <value>qjournal://slave1:8485;slave2:8485;master:8485/ns1</value>
+ </property>
+ <!-- 指定JournalNode在本地磁盘存放数据的位置 -->
+ <property>
+ <name>dfs.journalnode.edits.dir</name>
+ <value>/home/bigdata/hadoop/journal</value>
+ </property>
+ <!--客户端通过代理访问namenode,访问文件系统,HDFS 客户端与Active 节点通信的Java 类,使用其确定Active 节点是否活跃 -->
+ <property>
+ <name>dfs.client.failover.proxy.provider.ns1</name>
+ <value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
+ </property>
+ <!--这是配置自动切换的方法,有多种使用方法,具体可以看官网,在文末会给地址,这里是远程登录杀死的方法 -->
+ <property>
+ <name>dfs.ha.fencing.methods</name>
+ <value>sshfence</value>
+ </property>
+ <!-- 这个是使用sshfence隔离机制时才需要配置ssh免登陆 -->
+ <property>
+ <name>dfs.ha.fencing.ssh.private-key-files</name>
+ <value>/root/.ssh/id_rsa</value>
+ </property>
+ <!-- 配置sshfence隔离机制超时时间,这个属性同上,如果你是用脚本的方法切换,这个应该是可以不配置的 -->
+ <property>
+ <name>dfs.ha.fencing.ssh.connect-timeout</name>
+ <value>30000</value>
+ </property>
+ <!-- 这个是开启自动故障转移,如果你没有自动故障转移,这个可以先不配 -->
+ <property>
+ <name>dfs.ha.automatic-failover.enabled</name>
+ <value>true</value>
+ </property>
+</configuration>
+
diff --git a/src/main/resources/mapred-site.xml b/src/main/resources/mapred-site.xml
new file mode 100644
index 0000000..99a2ed7
--- /dev/null
+++ b/src/main/resources/mapred-site.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. See accompanying LICENSE file.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+ <property>
+ <name>mapreduce.framework.name</name>
+ <value>yarn</value>
+ </property>
+ <property>
+ <name>mapreduce.jobhistory.address</name>
+ <value>master:10020</value>
+ </property>
+ <property>
+ <name>mapreduce.jobhistory.webapp.address</name>
+ <value>master:19888</value>
+ </property>
+</configuration>
+
diff --git a/src/main/resources/yarn-site.xml b/src/main/resources/yarn-site.xml
new file mode 100644
index 0000000..77e392d
--- /dev/null
+++ b/src/main/resources/yarn-site.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. See accompanying LICENSE file.
+-->
+<configuration>
+
+ <property>
+ <name>yarn.nodemanager.aux-services</name>
+ <value>mapreduce_shuffle</value>
+ </property>
+ <property>
+ <name>yarn.resourcemanager.ha.enabled</name>
+ <value>true</value>
+ </property>
+ <!--声明两台resourcemanager的地址-->
+ <property>
+ <name>yarn.resourcemanager.cluster-id</name>
+ <value>rmcluster</value>
+ </property>
+ <property>
+ <name>yarn.resourcemanager.ha.rm-ids</name>
+ <value>rm1,rm2</value>
+ </property>
+ <property>
+ <name>yarn.resourcemanager.hostname.rm1</name>
+ <value>master</value>
+ </property>
+ <property>
+ <name>yarn.resourcemanager.hostname.rm2</name>
+ <value>slave1</value>
+ </property>
+ <!--指定zookeeper集群的地址-->
+ <property>
+ <name>yarn.resourcemanager.zk-address</name>
+ <value>master:2181,slave1:2181,slave2:2181</value>
+ </property>
+ <!--启用自动恢复,当任务进行一半,rm坏掉,就要启动自动恢复,默认是false-->
+ <property>
+ <name>yarn.resourcemanager.recovery.enabled</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>yarn.resourcemanager.store.class</name>
+ <value>org.apache.hadoop.yarn.server.resourcemanager.recovery.ZKRMStateStore</value>
+ </property>
+
+ <property>
+ <name>yarn.nodemanager.vmem-check-enabled</name>
+ <value>false</value>
+ </property>
+</configuration>