summaryrefslogtreecommitdiff
path: root/IP-learning-graph/src
diff options
context:
space:
mode:
authorwanglihui <[email protected]>2020-07-13 20:00:59 +0800
committerwanglihui <[email protected]>2020-07-13 20:00:59 +0800
commitc9010b26057472d9d9bf1b6aedd1fdbbd01043f6 (patch)
treeb0d8da1095e70909080654c0bb3245294d039ed8 /IP-learning-graph/src
parent705ed03926d5edc97e5d4f56309eeb55cbd67f15 (diff)
更新client ip属性
Diffstat (limited to 'IP-learning-graph/src')
-rw-r--r--IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseArangoData.java3
-rw-r--r--IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseClickhouseData.java14
-rw-r--r--IP-learning-graph/src/main/java/cn/ac/iie/service/read/ReadClickhouseData.java28
-rw-r--r--IP-learning-graph/src/main/java/cn/ac/iie/service/relationship/LocateFqdn2Ip.java75
-rw-r--r--IP-learning-graph/src/main/java/cn/ac/iie/service/update/Relationship.java67
-rw-r--r--IP-learning-graph/src/main/java/cn/ac/iie/service/update/Vertex.java10
-rw-r--r--IP-learning-graph/src/main/java/cn/ac/iie/test/IpLearningApplicationTest.java3
-rw-r--r--IP-learning-graph/src/main/resources/application.properties5
-rw-r--r--IP-learning-graph/src/test/java/cn/ac/iie/ArrayTest.java14
9 files changed, 142 insertions, 77 deletions
diff --git a/IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseArangoData.java b/IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseArangoData.java
index 92ac31c..53d8e6d 100644
--- a/IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseArangoData.java
+++ b/IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseArangoData.java
@@ -12,6 +12,9 @@ import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
+/**
+ * 获取arangoDB历史数据
+ */
public class BaseArangoData {
private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);
diff --git a/IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseClickhouseData.java b/IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseClickhouseData.java
index 4c32287..b9e003d 100644
--- a/IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseClickhouseData.java
+++ b/IP-learning-graph/src/main/java/cn/ac/iie/dao/BaseClickhouseData.java
@@ -34,7 +34,7 @@ public class BaseClickhouseData {
private Statement statement;
void BaseVFqdn() {
- initializeVertexMap(vFqdnMap);
+ initializeMap(vFqdnMap);
LOG.info("FQDN resultMap初始化完成");
String sql = getVFqdnSql();
long start = System.currentTimeMillis();
@@ -63,7 +63,7 @@ public class BaseClickhouseData {
}
void BaseVIp() {
- initializeVertexMap(vIpMap);
+ initializeMap(vIpMap);
LOG.info("IP resultMap初始化完成");
String sql = getVIpSql();
long start = System.currentTimeMillis();
@@ -90,7 +90,7 @@ public class BaseClickhouseData {
}
void BaseVertexSubscriber(){
- initializeVertexMap(vSubscriberMap);
+ initializeMap(vSubscriberMap);
LOG.info("SUBSCRIBER resultMap初始化完成");
String sql = getVertexSubscriberSql();
long start = System.currentTimeMillis();
@@ -118,7 +118,7 @@ public class BaseClickhouseData {
}
void BaseRelationshipSubscriberLocateIp(){
- initializeVertexMap(eSubsciberLocateIpMap);
+ initializeMap(eSubsciberLocateIpMap);
LOG.info("R_LOCATE_SUBSCRIBER2IP");
String sql = getRelationshipSubsciberLocateIpSql();
long start = System.currentTimeMillis();
@@ -142,7 +142,7 @@ public class BaseClickhouseData {
}
void BaseEFqdnAddressIp() {
- initializeVertexMap(eFqdnAddressIpMap);
+ initializeMap(eFqdnAddressIpMap);
LOG.info("R_LOCATE_FQDN2IP resultMap初始化完成");
String sql = getEFqdnAddressIpSql();
long start = System.currentTimeMillis();
@@ -166,7 +166,7 @@ public class BaseClickhouseData {
}
void BaseEIpVisitFqdn() {
- initializeVertexMap(eIpVisitFqdnMap);
+ initializeMap(eIpVisitFqdnMap);
LOG.info("R_VISIT_IP2FQDN resultMap初始化完成");
String sql = getEIpVisitFqdnSql();
long start = System.currentTimeMillis();
@@ -188,7 +188,7 @@ public class BaseClickhouseData {
}
}
- private void initializeVertexMap(Map map){
+ private void initializeMap(Map map){
try {
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
map.put(i, new HashMap<>());
diff --git a/IP-learning-graph/src/main/java/cn/ac/iie/service/read/ReadClickhouseData.java b/IP-learning-graph/src/main/java/cn/ac/iie/service/read/ReadClickhouseData.java
index 2b48e7d..a07e907 100644
--- a/IP-learning-graph/src/main/java/cn/ac/iie/service/read/ReadClickhouseData.java
+++ b/IP-learning-graph/src/main/java/cn/ac/iie/service/read/ReadClickhouseData.java
@@ -13,6 +13,8 @@ import java.util.regex.Pattern;
public class ReadClickhouseData {
+ public static long currentHour = System.currentTimeMillis() / (60 * 60 * 1000) * 60 * 60;
+
private static Pattern pattern = Pattern.compile("^[\\d]*$");
private static final Logger LOG = LoggerFactory.getLogger(ReadClickhouseData.class);
@@ -100,6 +102,10 @@ public class ReadClickhouseData {
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long countTotal = resultSet.getLong("COUNT_TOTAL");
String[] distCipRecents = (String[]) resultSet.getArray("DIST_CIP_RECENT").getArray();
+ long[] clientIpTs = new long[distCipRecents.length];
+ for (int i = 0;i < clientIpTs.length;i++){
+ clientIpTs[i] = currentHour;
+ }
String key = vFqdn + "-" + vIp;
newDoc = new BaseEdgeDocument();
@@ -109,8 +115,8 @@ public class ReadClickhouseData {
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("COUNT_TOTAL", countTotal);
- newDoc.addAttribute("DIST_CIP_RECENT", distCipRecents);
- newDoc.addAttribute("DIST_CIP_TOTAL", distCipRecents);
+ newDoc.addAttribute("DIST_CIP", distCipRecents);
+ newDoc.addAttribute("DIST_CIP_TS",clientIpTs);
}
return newDoc;
@@ -176,7 +182,7 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
- String where = "common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime;
+ String where = "common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String sslSql = "SELECT ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni";
String httpSql = "SELECT http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host";
return "SELECT FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME FROM ((" + sslSql + ") UNION ALL (" + httpSql + ")) GROUP BY FQDN HAVING FQDN != ''";
@@ -186,7 +192,7 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
- String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime;
+ String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String clientIpSql = "SELECT common_client_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_c2s_byte_num) as BYTES_SUM,'client' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP";
String serverIpSql = "SELECT common_server_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_s2c_byte_num) as BYTES_SUM,'server' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP";
return "SELECT * FROM((" + clientIpSql + ") UNION ALL (" + serverIpSql + "))";
@@ -196,9 +202,9 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
- String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime;
- String sslSql = "SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(30)(common_client_ip) AS DIST_CIP_RECENT,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip";
- String httpSql = "SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(30)(common_client_ip) AS DIST_CIP_RECENT,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip";
+ String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
+ String sslSql = "SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(100)(common_client_ip) AS DIST_CIP_RECENT,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip";
+ String httpSql = "SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(100)(common_client_ip) AS DIST_CIP_RECENT,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip";
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
}
@@ -206,7 +212,7 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
- String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime;
+ String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String httpSql = "SELECT http_host AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_client_ip";
String sslSql = "SELECT ssl_sni AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE common_schema_type = 'SSL' GROUP BY ssl_sni,common_client_ip";
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
@@ -216,7 +222,7 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
- String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime + " AND common_subscriber_id != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
+ String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime + " AND common_subscriber_id != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
return "SELECT common_subscriber_id,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id";
}
@@ -224,12 +230,12 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
- String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime + " AND common_subscriber_id != '' AND radius_framed_ip != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
+ String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime + " AND common_subscriber_id != '' AND radius_framed_ip != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
return "SELECT common_subscriber_id,radius_framed_ip,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME,COUNT(*) as COUNT_TOTAL FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id,radius_framed_ip";
}
private static long[] getTimeLimit() {
- long maxTime = System.currentTimeMillis() / 1000;
+ long maxTime = currentHour;
long minTime = maxTime - 3600;
// long maxTime = ApplicationConfig.READ_CLICKHOUSE_MAX_TIME;
// long minTime = ApplicationConfig.READ_CLICKHOUSE_MIN_TIME;
diff --git a/IP-learning-graph/src/main/java/cn/ac/iie/service/relationship/LocateFqdn2Ip.java b/IP-learning-graph/src/main/java/cn/ac/iie/service/relationship/LocateFqdn2Ip.java
index 8bd8764..f3551b3 100644
--- a/IP-learning-graph/src/main/java/cn/ac/iie/service/relationship/LocateFqdn2Ip.java
+++ b/IP-learning-graph/src/main/java/cn/ac/iie/service/relationship/LocateFqdn2Ip.java
@@ -1,11 +1,11 @@
package cn.ac.iie.service.relationship;
+import cn.ac.iie.service.read.ReadClickhouseData;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
@@ -22,12 +22,79 @@ public class LocateFqdn2Ip extends Relationship {
@Override
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
super.mergeFunction(properties,schemaEdgeDoc);
- super.mergeDistinctClientIp(properties,schemaEdgeDoc);
+ mergeDistinctClientIp(properties,schemaEdgeDoc);
}
@Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
super.updateFunction(newEdgeDocument, historyEdgeDocument);
- super.updateDistinctClientIp(newEdgeDocument, historyEdgeDocument);
+ updateDistinctClientIp(newEdgeDocument, historyEdgeDocument);
}
+
+ private void mergeDistinctClientIp(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
+ String[] schemaDistCipRecents = (String[]) schemaEdgeDoc.getAttribute("DIST_CIP");
+ String[] distCipRecents = (String[]) properties.get("DIST_CIP");
+ Object[] mergeClientIp = distinctIp(schemaDistCipRecents, distCipRecents);
+ long[] mergeClientIpTs = new long[mergeClientIp.length];
+ for (int i = 0;i < mergeClientIpTs.length;i++){
+ mergeClientIpTs[i] = ReadClickhouseData.currentHour;
+ }
+ properties.put("DIST_CIP", mergeClientIp);
+ properties.put("DIST_CIP_TS",mergeClientIpTs);
+ }
+
+ private void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
+ ArrayList<String> distCip = (ArrayList<String>) edgeDocument.getAttribute("DIST_CIP");
+ ArrayList<Long> distCipTs = (ArrayList<Long>) edgeDocument.getAttribute("DIST_CIP_TS");
+ HashMap<String, Long> distCipToTs = new HashMap<>();
+ if (distCip.size() == distCipTs.size()){
+ for (int i = 0;i < distCip.size();i++){
+ distCipToTs.put(distCip.get(i),distCipTs.get(i));
+ }
+ }
+ Object[] distCipRecent = (Object[])newEdgeDocument.getAttribute("DIST_CIP");
+ for (Object cip:distCipRecent){
+ distCipToTs.put(cip.toString(),ReadClickhouseData.currentHour);
+ }
+
+ Map<String, Long> sortDistCip = sortMapByValue(distCipToTs);
+ edgeDocument.addAttribute("DIST_CIP",sortDistCip.keySet().toArray());
+ edgeDocument.addAttribute("DIST_CIP_TS",sortDistCip.values().toArray());
+ }
+
+
+ /**
+ * 使用 Map按value进行排序
+ */
+ private Map<String, Long> sortMapByValue(Map<String, Long> oriMap) {
+ if (oriMap == null || oriMap.isEmpty()) {
+ return null;
+ }
+ Map<String, Long> sortedMap = new LinkedHashMap<>();
+ List<Map.Entry<String, Long>> entryList = new ArrayList<>(oriMap.entrySet());
+ entryList.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue()));
+
+ if(entryList.size() > 100){
+ for(Map.Entry<String, Long> set:entryList.subList(0, 100)){
+ sortedMap.put(set.getKey(), set.getValue());
+ }
+ }else {
+ for(Map.Entry<String, Long> set:entryList){
+ sortedMap.put(set.getKey(), set.getValue());
+ }
+ }
+ return sortedMap;
+ }
+
+ private Object[] distinctIp(Object[] distCipTotalsSrc,Object[] distCipRecentsSrc){
+ HashSet<Object> dIpSet = new HashSet<>();
+ dIpSet.addAll(Arrays.asList(distCipRecentsSrc));
+ dIpSet.addAll(Arrays.asList(distCipTotalsSrc));
+ Object[] distCipTotals = dIpSet.toArray();
+ if (distCipTotals.length > 100) {
+ System.arraycopy(distCipTotals, 0, distCipTotals, 0, 100);
+ }
+ return distCipTotals;
+ }
+
}
diff --git a/IP-learning-graph/src/main/java/cn/ac/iie/service/update/Relationship.java b/IP-learning-graph/src/main/java/cn/ac/iie/service/update/Relationship.java
index 02f3251..4482691 100644
--- a/IP-learning-graph/src/main/java/cn/ac/iie/service/update/Relationship.java
+++ b/IP-learning-graph/src/main/java/cn/ac/iie/service/update/Relationship.java
@@ -13,11 +13,11 @@ import java.util.concurrent.CountDownLatch;
public class Relationship extends Thread {
private static final Logger LOG = LoggerFactory.getLogger(Relationship.class);
- protected HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap;
- protected ArangoDBConnect arangoManger;
- protected String collectionName;
- protected ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
- protected CountDownLatch countDownLatch;
+ private HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap;
+ private ArangoDBConnect arangoManger;
+ private String collectionName;
+ private ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
+ private CountDownLatch countDownLatch;
public Relationship(HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
@@ -98,7 +98,7 @@ public class Relationship extends Thread {
updateFoundTime(newEdgeDocument,historyEdgeDocument);
setSchemaCntByHistory(historyEdgeDocument,"TLS_CNT_RECENT","TLS_CNT_TOTAL",newEdgeDocument);
setSchemaCntByHistory(historyEdgeDocument,"HTTP_CNT_RECENT","HTTP_CNT_TOTAL",newEdgeDocument);
-// updateDistinctClientIp(newEdgeDocument,historyEdgeDocument);
+ setSchemaCntByHistory(historyEdgeDocument,"DNS_CNT_RECENT","DNS_CNT_TOTAL",newEdgeDocument);
}
protected void updateFoundTime(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument){
@@ -106,13 +106,13 @@ public class Relationship extends Thread {
historyEdgeDocument.addAttribute("LAST_FOUND_TIME", lastFoundTime);
}
- protected void setSchemaCntByHistory(BaseEdgeDocument historyEdgeDocument,String schema,String totalSchema,BaseEdgeDocument newEdgeDocument){
+ private void setSchemaCntByHistory(BaseEdgeDocument historyEdgeDocument,String schema,String totalSchema,BaseEdgeDocument newEdgeDocument){
long countTotal = Long.parseLong(newEdgeDocument.getAttribute(totalSchema).toString());
long updateCountTotal = Long.parseLong(historyEdgeDocument.getAttribute(totalSchema).toString());
ArrayList<Long> cntRecent = (ArrayList<Long>) historyEdgeDocument.getAttribute(schema);
Long[] cntRecentsSrc = cntRecent.toArray(new Long[cntRecent.size()]);
- Long[] cntRecentsDst = new Long[7];
+ Long[] cntRecentsDst = new Long[24];
System.arraycopy(cntRecentsSrc, 0, cntRecentsDst, 1, cntRecentsSrc.length - 1);
cntRecentsDst[0] = countTotal;
@@ -122,41 +122,9 @@ public class Relationship extends Thread {
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
mergeFoundTime(properties, schemaEdgeDoc);
-// mergeDistinctClientIp(properties,schemaEdgeDoc);
}
- protected void mergeDistinctClientIp(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
- String[] schemaDistCipRecents = (String[]) schemaEdgeDoc.getAttribute("DIST_CIP_RECENT");
- String[] distCipRecents = (String[]) properties.get("DIST_CIP_RECENT");
- Object[] mergeClientIp = distinctIp(schemaDistCipRecents, distCipRecents);
- properties.put("DIST_CIP_RECENT", mergeClientIp);
- properties.put("DIST_CIP_TOTAL",mergeClientIp);
- }
-
- protected void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
- ArrayList<String> distCipTotal = (ArrayList<String>) edgeDocument.getAttribute("DIST_CIP_TOTAL");
- String[] distCipTotalsSrc = distCipTotal.toArray(new String[distCipTotal.size()]);
-
- Object[] distCipRecentsSrc = (Object[])newEdgeDocument.getAttribute("DIST_CIP_RECENT");
- if (distCipTotalsSrc.length == 30) {
- Object[] distCipTotals = distinctIp(distCipTotalsSrc, distCipRecentsSrc);
- edgeDocument.addAttribute("DIST_CIP_TOTAL", distCipTotals);
- }
- edgeDocument.addAttribute("DIST_CIP_RECENT", distCipRecentsSrc);
- }
-
- protected Object[] distinctIp(Object[] distCipTotalsSrc,Object[] distCipRecentsSrc){
- HashSet<Object> dIpSet = new HashSet<>();
- dIpSet.addAll(Arrays.asList(distCipRecentsSrc));
- dIpSet.addAll(Arrays.asList(distCipTotalsSrc));
- Object[] distCipTotals = dIpSet.toArray();
- if (distCipTotals.length > 30) {
- System.arraycopy(distCipTotals, 0, distCipTotals, 0, 30);
- }
- return distCipTotals;
- }
-
- protected void mergeFoundTime(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
+ private void mergeFoundTime(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
long schemaFirstFoundTime = Long.parseLong(schemaEdgeDoc.getAttribute("FIRST_FOUND_TIME").toString());
long firstFoundTime = Long.parseLong(properties.get("FIRST_FOUND_TIME").toString());
properties.put("FIRST_FOUND_TIME", schemaFirstFoundTime < firstFoundTime ? schemaFirstFoundTime : firstFoundTime);
@@ -165,19 +133,19 @@ public class Relationship extends Thread {
properties.put("LAST_FOUND_TIME", schemaLastFoundTime > lastFoundTime ? schemaLastFoundTime : lastFoundTime);
}
- protected void setSchemaCount(String schema, BaseEdgeDocument schemaEdgeDoc, Map<String, Object> properties) {
+ private void setSchemaCount(String schema, BaseEdgeDocument schemaEdgeDoc, Map<String, Object> properties) {
switch (schema) {
case "HTTP":
long httpCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString());
properties.put("HTTP_CNT_TOTAL", httpCntTotal);
- long[] httpCntRecentsDst = new long[7];
+ long[] httpCntRecentsDst = new long[24];
httpCntRecentsDst[0] = httpCntTotal;
properties.put("HTTP_CNT_RECENT", httpCntRecentsDst);
break;
case "SSL":
long tlsCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString());
properties.put("TLS_CNT_TOTAL", tlsCntTotal);
- long[] tlsCntRecentsDst = new long[7];
+ long[] tlsCntRecentsDst = new long[24];
tlsCntRecentsDst[0] = tlsCntTotal;
properties.put("TLS_CNT_RECENT", tlsCntRecentsDst);
break;
@@ -189,10 +157,15 @@ public class Relationship extends Thread {
private void checkSchemaProperty(Map<String, Object> properties){
if (!properties.containsKey("TLS_CNT_TOTAL")){
properties.put("TLS_CNT_TOTAL",0L);
- properties.put("TLS_CNT_RECENT",new long[7]);
- }else if (!properties.containsKey("HTTP_CNT_TOTAL")){
+ properties.put("TLS_CNT_RECENT",new long[24]);
+ }
+ if (!properties.containsKey("HTTP_CNT_TOTAL")){
properties.put("HTTP_CNT_TOTAL",0L);
- properties.put("HTTP_CNT_RECENT",new long[7]);
+ properties.put("HTTP_CNT_RECENT",new long[24]);
+ }
+ if (!properties.containsKey("DNS_CNT_TOTAL")){
+ properties.put("DNS_CNT_TOTAL",0L);
+ properties.put("DNS_CNT_RECENT",new long[24]);
}
}
diff --git a/IP-learning-graph/src/main/java/cn/ac/iie/service/update/Vertex.java b/IP-learning-graph/src/main/java/cn/ac/iie/service/update/Vertex.java
index f4c31bf..e68f466 100644
--- a/IP-learning-graph/src/main/java/cn/ac/iie/service/update/Vertex.java
+++ b/IP-learning-graph/src/main/java/cn/ac/iie/service/update/Vertex.java
@@ -21,11 +21,11 @@ import java.util.concurrent.CountDownLatch;
public class Vertex extends Thread{
private static final Logger LOG = LoggerFactory.getLogger(Vertex.class);
- protected HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap;
- protected ArangoDBConnect arangoManger;
- protected String collectionName;
- protected ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
- protected CountDownLatch countDownLatch;
+ private HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap;
+ private ArangoDBConnect arangoManger;
+ private String collectionName;
+ private ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
+ private CountDownLatch countDownLatch;
public Vertex(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
diff --git a/IP-learning-graph/src/main/java/cn/ac/iie/test/IpLearningApplicationTest.java b/IP-learning-graph/src/main/java/cn/ac/iie/test/IpLearningApplicationTest.java
index c425839..e29202d 100644
--- a/IP-learning-graph/src/main/java/cn/ac/iie/test/IpLearningApplicationTest.java
+++ b/IP-learning-graph/src/main/java/cn/ac/iie/test/IpLearningApplicationTest.java
@@ -10,6 +10,7 @@ public class IpLearningApplicationTest {
private static final Logger LOG = LoggerFactory.getLogger(IpLearningApplicationTest.class);
public static void main(String[] args) {
+ long start = System.currentTimeMillis();
LOG.info("Ip Learning Application开始运行");
BaseArangoData baseArangoData = new BaseArangoData();
baseArangoData.baseDocumentDataMap();
@@ -17,6 +18,8 @@ public class IpLearningApplicationTest {
LOG.info("历史数据读取完成,开始更新数据");
UpdateGraphData updateGraphData = new UpdateGraphData();
updateGraphData.updateArango();
+ long last = System.currentTimeMillis();
+ LOG.info("共计运行时间:"+(last - start));
}
}
diff --git a/IP-learning-graph/src/main/resources/application.properties b/IP-learning-graph/src/main/resources/application.properties
index 9428d55..77d62f5 100644
--- a/IP-learning-graph/src/main/resources/application.properties
+++ b/IP-learning-graph/src/main/resources/application.properties
@@ -13,5 +13,6 @@ update.arango.batch=10000
thread.pool.number=10
thread.await.termination.time=10
-read.clickhouse.max.time=1594376834
-read.clickhouse.min.time=1593676953 \ No newline at end of file
+read.clickhouse.max.time=1594627747
+#read.clickhouse.min.time=1594622638
+read.clickhouse.min.time=1593676807 \ No newline at end of file
diff --git a/IP-learning-graph/src/test/java/cn/ac/iie/ArrayTest.java b/IP-learning-graph/src/test/java/cn/ac/iie/ArrayTest.java
index 11d0b6d..4b04e1d 100644
--- a/IP-learning-graph/src/test/java/cn/ac/iie/ArrayTest.java
+++ b/IP-learning-graph/src/test/java/cn/ac/iie/ArrayTest.java
@@ -1,5 +1,6 @@
package cn.ac.iie;
+import java.util.ArrayList;
import java.util.Arrays;
public class ArrayTest {
@@ -8,6 +9,17 @@ public class ArrayTest {
long[] longs1 = new long[7];
System.arraycopy(longs, 0, longs1, 1, longs.length - 1);
longs1[0] = 8;
- System.out.println(Arrays.toString(longs1));
+// System.out.println(Arrays.toString(longs1));
+
+ ArrayList<Long> longs2 = new ArrayList<>();
+ longs2.add(3L);
+ longs2.add(34L);
+ longs2.add(312L);
+ longs2.add(12433L);
+ longs2.add(34L);
+ longs2.add(4124L);
+
+ System.out.println(longs2.subList(0,3).size());
+ System.out.println(longs2.subList(0,3).get(3));
}
}