summaryrefslogtreecommitdiff
path: root/ip-learning-spark/src/main/java/cn/ac/iie
diff options
context:
space:
mode:
Diffstat (limited to 'ip-learning-spark/src/main/java/cn/ac/iie')
-rw-r--r--ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java53
1 files changed, 0 insertions, 53 deletions
diff --git a/ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java b/ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java
index 99b55ed..0e03d2e 100644
--- a/ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java
+++ b/ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java
@@ -6,7 +6,6 @@ import cn.ac.iie.utils.ArangoDBConnect;
import cn.ac.iie.utils.ExecutorThreadPool;
import com.arangodb.ArangoCursor;
import com.arangodb.entity.BaseDocument;
-import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -20,15 +19,6 @@ import java.util.concurrent.CountDownLatch;
*/
public class BaseArangoData {
private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);
-
- public static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexFqdnMap = new ConcurrentHashMap<>();
- public static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexIpMap = new ConcurrentHashMap<>();
- public static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexSubscriberMap = new ConcurrentHashMap<>();
- public static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationFqdnAddressIpMap = new ConcurrentHashMap<>();
- public static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationIpVisitFqdnMap = new ConcurrentHashMap<>();
- public static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationFqdnSameFqdnMap = new ConcurrentHashMap<>();
- public static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationSubsciberLocateIpMap = new ConcurrentHashMap<>();
-
private static ArangoDBConnect arangoDBConnect = ArangoDBConnect.getInstance();
private ExecutorThreadPool threadPool = ExecutorThreadPool.getInstance();
@@ -82,47 +72,4 @@ public class BaseArangoData {
return "FOR doc IN " + table + " limit "+offsetNum+","+sepNum+" RETURN doc";
}
- private long[] getTimeRange(String table) {
- long minTime = 0L;
- long maxTime = 0L;
- long startTime = System.currentTimeMillis();
- String sql = "LET doc = (FOR doc IN " + table + " RETURN doc) return {max_time:MAX(doc[*].FIRST_FOUND_TIME),min_time:MIN(doc[*].FIRST_FOUND_TIME)}";
- switch (ApplicationConfig.ARANGO_TIME_LIMIT_TYPE()) {
- case 0:
- ArangoCursor<BaseDocument> timeDoc = arangoDBConnect.executorQuery(sql, BaseDocument.class);
- try {
- if (timeDoc != null) {
- while (timeDoc.hasNext()) {
- BaseDocument doc = timeDoc.next();
- maxTime = Long.parseLong(doc.getAttribute("max_time").toString()) + ApplicationConfig.THREAD_POOL_NUMBER();
- minTime = Long.parseLong(doc.getAttribute("min_time").toString());
- }
- } else {
- LOG.warn("获取ArangoDb时间范围为空");
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- break;
- case 1:
- maxTime = ApplicationConfig.READ_ARANGO_MAX_TIME();
- minTime = ApplicationConfig.READ_ARANGO_MIN_TIME();
- break;
- default:
- }
- long lastTime = System.currentTimeMillis();
- LOG.warn(sql + "\n查询最大最小时间用时:" + (lastTime - startTime));
- return new long[]{minTime, maxTime};
-
- }
-
- private String getQuerySql(long[] timeRange, int threadNumber, String table) {
- long minTime = timeRange[0];
- long maxTime = timeRange[1];
- long diffTime = (maxTime - minTime) / ApplicationConfig.THREAD_POOL_NUMBER();
- long maxThreadTime = minTime + (threadNumber + 1) * diffTime;
- long minThreadTime = minTime + threadNumber * diffTime;
- return "FOR doc IN " + table + " filter doc.FIRST_FOUND_TIME >= " + minThreadTime + " and doc.FIRST_FOUND_TIME <= " + maxThreadTime + " " + ApplicationConfig.ARANGODB_READ_LIMIT() + " RETURN doc";
- }
-
}