summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorwanglihui <[email protected]>2021-03-23 11:26:55 +0800
committerwanglihui <[email protected]>2021-03-23 11:26:55 +0800
commitf0cebd8e1cd231271bd2f5f778e90fc5bbd9ee2c (patch)
tree9dd20822993a70aff6ae232de518f2bb1472f601
parentb62131dacdb8812d81db2fc59820e3071739e410 (diff)
格式化代码
-rw-r--r--ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java75
-rw-r--r--ip-learning-spark/src/main/java/cn/ac/iie/utils/ArangoDBConnect.java46
-rw-r--r--ip-learning-spark/src/main/resources/application.properties12
-rw-r--r--ip-learning-spark/src/main/resources/log4j.properties5
-rw-r--r--ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseArangoData.scala20
-rw-r--r--ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala158
-rw-r--r--ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala59
-rw-r--r--ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocHandler.scala96
-rw-r--r--ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala130
-rw-r--r--ip-learning-spark/src/main/scala/cn/ac/iie/utils/SparkSessionUtil.scala2
10 files changed, 204 insertions, 399 deletions
diff --git a/ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java b/ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java
deleted file mode 100644
index 0e03d2e..0000000
--- a/ip-learning-spark/src/main/java/cn/ac/iie/dao/BaseArangoData.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package cn.ac.iie.dao;
-
-import cn.ac.iie.config.ApplicationConfig;
-import cn.ac.iie.service.read.ReadHistoryArangoData;
-import cn.ac.iie.utils.ArangoDBConnect;
-import cn.ac.iie.utils.ExecutorThreadPool;
-import com.arangodb.ArangoCursor;
-import com.arangodb.entity.BaseDocument;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CountDownLatch;
-
-/**
- * 获取arangoDB历史数据
- *
- * @author wlh
- */
-public class BaseArangoData {
- private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);
- private static ArangoDBConnect arangoDBConnect = ArangoDBConnect.getInstance();
-
- private ExecutorThreadPool threadPool = ExecutorThreadPool.getInstance();
-
- public <T extends BaseDocument> void readHistoryData(String table,
- ConcurrentHashMap<Integer, ConcurrentHashMap<String, T>> historyMap,
- Class<T> type) {
- try {
- LOG.warn("开始更新" + table);
- long start = System.currentTimeMillis();
- for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER(); i++) {
- historyMap.put(i, new ConcurrentHashMap<>());
- }
- CountDownLatch countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER());
-// long[] timeRange = getTimeRange(table);
- Long countTotal = getCountTotal(table);
- for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER(); i++) {
-// String sql = getQuerySql(timeRange, i, table);
- String sql = getQuerySql(countTotal, i, table);
- ReadHistoryArangoData<T> readHistoryArangoData = new ReadHistoryArangoData<>(arangoDBConnect, sql, historyMap, type, table, countDownLatch);
- threadPool.executor(readHistoryArangoData);
- }
- countDownLatch.await();
- long last = System.currentTimeMillis();
- LOG.warn("读取" + table + " arangoDB 共耗时:" + (last - start));
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- private Long getCountTotal(String table){
- long start = System.currentTimeMillis();
- Long cnt = 0L;
- String sql = "RETURN LENGTH("+table+")";
- try {
- ArangoCursor<Long> longs = arangoDBConnect.executorQuery(sql, Long.class);
- while (longs.hasNext()){
- cnt = longs.next();
- }
- }catch (Exception e){
- LOG.error(sql +"执行异常");
- }
- long last = System.currentTimeMillis();
- LOG.info(sql+" 结果:"+cnt+" 执行时间:"+(last-start));
- return cnt;
- }
-
- private String getQuerySql(Long cnt,int threadNumber, String table){
- long sepNum = cnt / ApplicationConfig.THREAD_POOL_NUMBER() + 1;
- long offsetNum = threadNumber * sepNum;
- return "FOR doc IN " + table + " limit "+offsetNum+","+sepNum+" RETURN doc";
- }
-
-}
diff --git a/ip-learning-spark/src/main/java/cn/ac/iie/utils/ArangoDBConnect.java b/ip-learning-spark/src/main/java/cn/ac/iie/utils/ArangoDBConnect.java
index d5fb1b8..b3c0c15 100644
--- a/ip-learning-spark/src/main/java/cn/ac/iie/utils/ArangoDBConnect.java
+++ b/ip-learning-spark/src/main/java/cn/ac/iie/utils/ArangoDBConnect.java
@@ -22,11 +22,12 @@ public class ArangoDBConnect {
private static final Logger LOG = LoggerFactory.getLogger(ArangoDBConnect.class);
private static ArangoDB arangoDB = null;
private static ArangoDBConnect conn = null;
+
static {
getArangoDatabase();
}
- private static void getArangoDatabase(){
+ private static void getArangoDatabase() {
arangoDB = new ArangoDB.Builder()
.maxConnections(ApplicationConfig.THREAD_POOL_NUMBER())
.host(ApplicationConfig.ARANGODB_HOST(), ApplicationConfig.ARANGODB_PORT())
@@ -35,82 +36,81 @@ public class ArangoDBConnect {
.build();
}
- public static synchronized ArangoDBConnect getInstance(){
- if (null == conn){
+ public static synchronized ArangoDBConnect getInstance() {
+ if (null == conn) {
conn = new ArangoDBConnect();
}
return conn;
}
- private ArangoDatabase getDatabase(){
+ private ArangoDatabase getDatabase() {
return arangoDB.db(ApplicationConfig.ARANGODB_DB_NAME());
}
- public void clean(){
+ public void clean() {
try {
- if (arangoDB != null){
+ if (arangoDB != null) {
arangoDB.shutdown();
}
- }catch (Exception e){
+ } catch (Exception e) {
e.printStackTrace();
}
}
- public <T> ArangoCursor<T> executorQuery(String query,Class<T> type){
+ public <T> ArangoCursor<T> executorQuery(String query, Class<T> type) {
ArangoDatabase database = getDatabase();
Map<String, Object> bindVars = new MapBuilder().get();
AqlQueryOptions options = new AqlQueryOptions().ttl(ApplicationConfig.ARANGODB_TTL());
try {
return database.query(query, bindVars, options, type);
- }catch (Exception e){
+ } catch (Exception e) {
e.printStackTrace();
return null;
- }finally {
+ } finally {
bindVars.clear();
}
}
@Deprecated
- public <T> void insertAndUpdate(ArrayList<T> docInsert,ArrayList<T> docUpdate,String collectionName){
+ public <T> void insertAndUpdate(ArrayList<T> docInsert, ArrayList<T> docUpdate, String collectionName) {
ArangoDatabase database = getDatabase();
try {
ArangoCollection collection = database.collection(collectionName);
- if (!docInsert.isEmpty()){
+ if (!docInsert.isEmpty()) {
collection.importDocuments(docInsert);
}
- if (!docUpdate.isEmpty()){
+ if (!docUpdate.isEmpty()) {
collection.replaceDocuments(docUpdate);
}
- }catch (Exception e){
+ } catch (Exception e) {
System.out.println("更新失败");
e.printStackTrace();
- }finally {
+ } finally {
docInsert.clear();
docInsert.clear();
}
}
- public <T> void overwrite(ArrayList<T> docOverwrite,String collectionName){
+ public <T> void overwrite(ArrayList<T> docOverwrite, String collectionName) {
ArangoDatabase database = getDatabase();
try {
ArangoCollection collection = database.collection(collectionName);
- if (!docOverwrite.isEmpty()){
+ if (!docOverwrite.isEmpty()) {
DocumentCreateOptions documentCreateOptions = new DocumentCreateOptions();
documentCreateOptions.overwrite(true);
documentCreateOptions.silent(true);
MultiDocumentEntity<DocumentCreateEntity<T>> documentCreateEntityMultiDocumentEntity = collection.insertDocuments(docOverwrite, documentCreateOptions);
Collection<ErrorEntity> errors = documentCreateEntityMultiDocumentEntity.getErrors();
- for (ErrorEntity errorEntity:errors){
- LOG.warn("写入arangoDB异常:"+errorEntity.getErrorMessage());
+ for (ErrorEntity errorEntity : errors) {
+ LOG.warn("写入arangoDB异常:" + errorEntity.getErrorMessage());
}
}
- }catch (Exception e){
- System.out.println("更新失败:"+e.toString());
- }finally {
+ } catch (Exception e) {
+ LOG.error("更新失败:" + e.toString());
+ } finally {
docOverwrite.clear();
}
}
-
}
diff --git a/ip-learning-spark/src/main/resources/application.properties b/ip-learning-spark/src/main/resources/application.properties
index 9b3cee4..7df5c61 100644
--- a/ip-learning-spark/src/main/resources/application.properties
+++ b/ip-learning-spark/src/main/resources/application.properties
@@ -15,11 +15,11 @@ spark.read.clickhouse.fetchsize=10000
spark.read.clickhouse.partitionColumn=LAST_FOUND_TIME
clickhouse.socket.timeout=300000
#arangoDB配置
-arangoDB.host=192.168.40.182
+arangoDB.host=192.168.40.223
arangoDB.port=8529
-arangoDB.user=upsert
-arangoDB.password=ceiec2018
-arangoDB.DB.name=ip-learning-test-0
+arangoDB.user=root
+arangoDB.password=galaxy_2019
+arangoDB.DB.name=tsg_galaxy_v3
#arangoDB.DB.name=iplearn_media_domain
arangoDB.ttl=3600
@@ -27,8 +27,8 @@ thread.pool.number=10
#读取clickhouse时间范围方式,0:读取过去一小时;1:指定时间范围
clickhouse.time.limit.type=1
-read.clickhouse.max.time=1603785961
-read.clickhouse.min.time=1603354682
+read.clickhouse.max.time=1608518990
+read.clickhouse.min.time=1604851201
arangoDB.read.limit=1
update.arango.batch=10000
diff --git a/ip-learning-spark/src/main/resources/log4j.properties b/ip-learning-spark/src/main/resources/log4j.properties
index 5d836b5..3cc61a5 100644
--- a/ip-learning-spark/src/main/resources/log4j.properties
+++ b/ip-learning-spark/src/main/resources/log4j.properties
@@ -4,20 +4,15 @@ log4j.logger.org.apache.http.wire=OFF
#Log4j
log4j.rootLogger=info,console,file
-# ����̨��־����
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.Threshold=warn
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
-# �ļ���־����
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.Threshold=warn
log4j.appender.file.encoding=UTF-8
log4j.appender.file.Append=true
-#·���������·����������ز��������Ӧ��Ŀ��
-#log4j.appender.file.file=/home/ceiec/iplearning/logs/ip-learning-application.log
-#log4j.appender.file.file=/home/ceiec/iplearning/testLog/ip-learning-application.log
log4j.appender.file.file=./logs/ip-learning-application.log
log4j.appender.file.DatePattern='.'yyyy-MM-dd
log4j.appender.file.layout=org.apache.log4j.PatternLayout
diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseArangoData.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseArangoData.scala
new file mode 100644
index 0000000..cafcca8
--- /dev/null
+++ b/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseArangoData.scala
@@ -0,0 +1,20 @@
+package cn.ac.iie.dao
+
+import cn.ac.iie.config.ApplicationConfig
+import cn.ac.iie.spark.ArangoSpark
+import cn.ac.iie.spark.rdd.{ArangoRdd, ReadOptions}
+import cn.ac.iie.utils.SparkSessionUtil.sparkContext
+import org.slf4j.LoggerFactory
+
+object BaseArangoData {
+ private val LOG = LoggerFactory.getLogger(BaseArangoData.getClass)
+ private val options = ReadOptions(ApplicationConfig.ARANGODB_DB_NAME)
+
+ def loadArangoRdd[T](name:String): ArangoRdd[T] ={
+ val value = ArangoSpark.load[T](sparkContext, name, options)
+
+ LOG.warn(s"读取$name arangoDb:${value.count()}")
+ value
+ }
+
+}
diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala
index eb6a736..e70ffea 100644
--- a/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala
+++ b/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala
@@ -11,7 +11,7 @@ object BaseClickhouseData {
val currentHour: Long = System.currentTimeMillis / (60 * 60 * 1000) * 60 * 60
private val timeLimit: (Long, Long) = getTimeLimit
- private def initClickhouseData(sql:String): DataFrame ={
+ private def initClickhouseData(sql: String): DataFrame = {
val dataFrame: DataFrame = spark.read.format("jdbc")
.option("url", ApplicationConfig.SPARK_READ_CLICKHOUSE_URL)
@@ -24,7 +24,7 @@ object BaseClickhouseData {
.option("lowerBound", timeLimit._2)
.option("upperBound", timeLimit._1)
.option("fetchsize", ApplicationConfig.SPARK_READ_CLICKHOUSE_FETCHSIZE)
- .option("socket_timeout",ApplicationConfig.CLICKHOUSE_SOCKET_TIMEOUT)
+ .option("socket_timeout", ApplicationConfig.CLICKHOUSE_SOCKET_TIMEOUT)
.load()
dataFrame.printSchema()
dataFrame.createOrReplaceGlobalTempView("dbtable")
@@ -32,135 +32,7 @@ object BaseClickhouseData {
dataFrame
}
- def loadConnectionDataFromCk(): Unit ={
- val where = "common_recv_time >= " + timeLimit._2 + " AND common_recv_time < " + timeLimit._1
- val sql =
- s"""
- |(SELECT
- | ssl_sni,http_host,common_client_ip,common_server_ip,common_recv_time,common_c2s_byte_num,common_s2c_byte_num,common_schema_type
- |FROM
- | connection_record_log
- |WHERE $where) as dbtable
- """.stripMargin
-
- LOG.warn(sql)
- initClickhouseData(sql)
- }
-
- private def loadRadiusDataFromCk(): Unit ={
- val where =
- s"""
- | common_recv_time >= ${timeLimit._2}
- | AND common_recv_time < ${timeLimit._1}
- | AND common_subscriber_id != ''
- | AND radius_framed_ip != ''
- | AND radius_packet_type = 4
- | AND radius_acct_status_type = 1
- """.stripMargin
- val sql =
- s"""
- |(SELECT
- | common_subscriber_id,radius_framed_ip,common_recv_time
- |FROM
- | tsg_galaxy_v3.radius_record_log
- |WHERE
- | $where) as dbtable
- """.stripMargin
- LOG.warn(sql)
- initClickhouseData(sql)
- }
-
- /*
- def getVertexIpDf: DataFrame ={
- loadConnectionDataFromCk()
- val sql =
- """
- |SELECT
- | *
- |FROM
- | (
- | (
- | SELECT
- | common_client_ip AS IP,
- | MIN(common_recv_time) AS FIRST_FOUND_TIME,
- | MAX(common_recv_time) AS LAST_FOUND_TIME,
- | count(*) as SESSION_COUNT,
- | sum(common_c2s_byte_num) as BYTES_SUM,
- | 'client' as ip_type
- | FROM
- | global_temp.dbtable
- | GROUP BY
- | IP
- | )
- | UNION ALL
- | (
- | SELECT
- | common_server_ip AS IP,
- | MIN(common_recv_time) AS FIRST_FOUND_TIME,
- | MAX(common_recv_time) AS LAST_FOUND_TIME,
- | count(*) as SESSION_COUNT,
- | sum(common_s2c_byte_num) as BYTES_SUM,
- | 'server' as ip_type
- | FROM
- | global_temp.dbtable
- | GROUP BY
- | IP
- | )
- | )
- """.stripMargin
- LOG.warn(sql)
- val vertexIpDf = spark.sql(sql)
- vertexIpDf.printSchema()
- vertexIpDf
- }
-
- def getRelationFqdnLocateIpDf: DataFrame ={
- loadConnectionDataFromCk()
- val sslSql =
- """
- |SELECT
- | ssl_sni AS FQDN,
- | common_server_ip,
- | MAX(common_recv_time) AS LAST_FOUND_TIME,
- | MIN(common_recv_time) AS FIRST_FOUND_TIME,
- | COUNT(*) AS COUNT_TOTAL,
- | collect_set(common_client_ip) AS DIST_CIP_RECENT,
- | 'TLS' AS schema_type
- |FROM
- | global_temp.dbtable
- |WHERE
- | common_schema_type = 'SSL'
- |GROUP BY
- | ssl_sni,common_server_ip
- """.stripMargin
-
- val httpSql =
- """
- |SELECT
- | http_host AS FQDN,
- | common_server_ip,
- | MAX(common_recv_time) AS LAST_FOUND_TIME,
- | MIN(common_recv_time) AS FIRST_FOUND_TIME,
- | COUNT(*) AS COUNT_TOTAL,
- | collect_set(common_client_ip) AS DIST_CIP_RECENT,
- | 'HTTP' AS schema_type
- |FROM
- | global_temp.dbtable
- |WHERE
- | common_schema_type = 'HTTP'
- |GROUP BY
- | http_host,common_server_ip
- """.stripMargin
- val sql = s"SELECT * FROM (($sslSql) UNION ALL ($httpSql)) WHERE FQDN != ''"
-
- LOG.warn(sql)
- val relationFqdnLocateIpDf = spark.sql(sql)
- relationFqdnLocateIpDf.printSchema()
- relationFqdnLocateIpDf
- }
- */
-
- def getVertexFqdnDf: DataFrame ={
+ def getVertexFqdnDf: DataFrame = {
val sql =
"""
|(SELECT
@@ -168,12 +40,12 @@ object BaseClickhouseData {
|FROM
| ((SELECT
| ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
- | FROM tsg_galaxy_v3.connection_record_log
+ | FROM connection_record_log
| WHERE common_schema_type = 'SSL' GROUP BY ssl_sni
| )UNION ALL
| (SELECT
| http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
- | FROM tsg_galaxy_v3.connection_record_log
+ | FROM connection_record_log
| WHERE common_schema_type = 'HTTP' GROUP BY http_host))
|GROUP BY FQDN HAVING FQDN != '') as dbtable
""".stripMargin
@@ -183,7 +55,7 @@ object BaseClickhouseData {
frame
}
- def getVertexIpDf: DataFrame ={
+ def getVertexIpDf: DataFrame = {
val where = "common_recv_time >= " + timeLimit._2 + " AND common_recv_time < " + timeLimit._1
val sql =
s"""
@@ -194,7 +66,7 @@ object BaseClickhouseData {
|SUM(common_c2s_byte_num+common_s2c_byte_num) as BYTES_SUM,
|groupUniqArray(2)(common_link_info_c2s)[2] as common_link_info,
|'client' as ip_type
- |FROM tsg_galaxy_v3.connection_record_log
+ |FROM connection_record_log
|where $where
|group by common_client_ip)
|UNION ALL
@@ -205,7 +77,7 @@ object BaseClickhouseData {
|SUM(common_c2s_byte_num+common_s2c_byte_num) as BYTES_SUM,
|groupUniqArray(2)(common_link_info_s2c)[2] as common_link_info,
|'server' as ip_type
- |FROM tsg_galaxy_v3.connection_record_log
+ |FROM connection_record_log
|where $where
|group by common_server_ip))) as dbtable
""".stripMargin
@@ -216,19 +88,19 @@ object BaseClickhouseData {
}
- def getRelationFqdnLocateIpDf: DataFrame ={
+ def getRelationFqdnLocateIpDf: DataFrame = {
val where = "common_recv_time >= " + timeLimit._2 + " AND common_recv_time < " + timeLimit._1
val sql =
s"""
|(SELECT * FROM
|((SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,
|toString(groupUniqArray(${ApplicationConfig.DISTINCT_CLIENT_IP_NUM})(common_client_ip)) AS DIST_CIP_RECENT,'TLS' AS schema_type
- |FROM tsg_galaxy_v3.connection_record_log
+ |FROM connection_record_log
|WHERE $where and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip)
|UNION ALL
|(SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,
|toString(groupUniqArray(${ApplicationConfig.DISTINCT_CLIENT_IP_NUM})(common_client_ip)) AS DIST_CIP_RECENT,'HTTP' AS schema_type
- |FROM tsg_galaxy_v3.connection_record_log
+ |FROM connection_record_log
|WHERE $where and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip))
|WHERE FQDN != '') as dbtable
""".stripMargin
@@ -238,7 +110,7 @@ object BaseClickhouseData {
frame
}
- def getRelationSubidLocateIpDf: DataFrame ={
+ def getRelationSubidLocateIpDf: DataFrame = {
val where =
s"""
| common_recv_time >= ${timeLimit._2}
@@ -260,7 +132,7 @@ object BaseClickhouseData {
frame
}
- def getVertexSubidDf: DataFrame ={
+ def getVertexSubidDf: DataFrame = {
val where =
s"""
| common_recv_time >= ${timeLimit._2}
@@ -281,7 +153,7 @@ object BaseClickhouseData {
frame
}
- def getVertexFramedIpDf: DataFrame ={
+ def getVertexFramedIpDf: DataFrame = {
val where =
s"""
| common_recv_time >= ${timeLimit._2}
@@ -302,7 +174,7 @@ object BaseClickhouseData {
}
- private def getTimeLimit: (Long,Long) ={
+ private def getTimeLimit: (Long, Long) = {
var maxTime = 0L
var minTime = 0L
ApplicationConfig.CLICKHOUSE_TIME_LIMIT_TYPE match {
diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala
index 3094691..309c1a7 100644
--- a/ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala
+++ b/ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala
@@ -3,37 +3,31 @@ package cn.ac.iie.service.transform
import java.util.regex.Pattern
import cn.ac.iie.config.ApplicationConfig
-import cn.ac.iie.dao.BaseClickhouseData
-import cn.ac.iie.spark.ArangoSpark
+import cn.ac.iie.dao.{BaseArangoData, BaseClickhouseData}
import cn.ac.iie.spark.partition.CustomPartitioner
-import cn.ac.iie.spark.rdd.ReadOptions
+import cn.ac.iie.spark.rdd.ArangoRdd
import com.arangodb.entity.{BaseDocument, BaseEdgeDocument}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.apache.spark.sql.functions._
import org.slf4j.LoggerFactory
-import cn.ac.iie.utils.SparkSessionUtil._
object MergeDataFrame {
private val LOG = LoggerFactory.getLogger(MergeDataFrame.getClass)
private val pattern = Pattern.compile("^[\\d]*$")
- private val options = ReadOptions(ApplicationConfig.ARANGODB_DB_NAME)
- def mergeVertexFqdn(): RDD[(String, (Option[BaseDocument], Option[Row]))] ={
- val fqdnAccmu = getLongAccumulator("FQDN Accumulator")
- val fqdnRddRow = BaseClickhouseData.getVertexFqdnDf
+ def mergeVertexFqdn(): RDD[(String, (Option[BaseDocument], Row))] = {
+ val fqdnRddRow: RDD[(String, Row)] = BaseClickhouseData.getVertexFqdnDf
.rdd.filter(row => isDomain(row.getAs[String](0))).map(row => {
- fqdnAccmu.add(1)
(row.getAs[String]("FQDN"), row)
}).partitionBy(new CustomPartitioner(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS))
- fqdnRddRow.cache()
- val fqdnRddDoc = ArangoSpark.load[BaseDocument](sparkContext,"FQDN",options)
- fqdnRddDoc.map(doc => (doc.getKey, doc)).fullOuterJoin(fqdnRddRow)
+ val fqdnRddDoc: ArangoRdd[BaseDocument] = BaseArangoData.loadArangoRdd[BaseDocument]("FQDN")
+
+ fqdnRddDoc.map(doc => (doc.getKey, doc)).rightOuterJoin(fqdnRddRow)
}
- def mergeVertexIp(): RDD[(String, (Option[BaseDocument], Option[Row]))]={
- val ipAccum = getLongAccumulator("IP Accumulator")
+ def mergeVertexIp(): RDD[(String, (Option[BaseDocument], Row))] = {
val vertexIpDf = BaseClickhouseData.getVertexIpDf
val frame = vertexIpDf.groupBy("IP").agg(
min("FIRST_FOUND_TIME").alias("FIRST_FOUND_TIME"),
@@ -44,17 +38,14 @@ object MergeDataFrame {
last("common_link_info").alias("common_link_info")
)
val ipRddRow = frame.rdd.map(row => {
- ipAccum.add(1)
(row.getAs[String]("IP"), row)
}).partitionBy(new CustomPartitioner(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS))
- val ipRddDoc = ArangoSpark.load[BaseDocument](sparkContext,"IP",options)
-
- ipRddDoc.map(doc => (doc.getKey, doc)).fullOuterJoin(ipRddRow)
+ val ipRddDoc = BaseArangoData.loadArangoRdd[BaseDocument]("IP")
+ ipRddDoc.map(doc => (doc.getKey, doc)).rightOuterJoin(ipRddRow)
}
- def mergeRelationFqdnLocateIp(): RDD[(String, (Option[BaseEdgeDocument], Option[Row]))] ={
- val fqdnLocIpAccum = getLongAccumulator("R_LOCATE_FQDN2IP Accumulator")
+ def mergeRelationFqdnLocateIp(): RDD[(String, (Option[BaseEdgeDocument], Row))] = {
val frame = BaseClickhouseData.getRelationFqdnLocateIpDf.filter(row => isDomain(row.getAs[String]("FQDN")))
.groupBy("FQDN", "common_server_ip")
.agg(
@@ -68,54 +59,46 @@ object MergeDataFrame {
val fqdn = row.getAs[String]("FQDN")
val serverIp = row.getAs[String]("common_server_ip")
val key = fqdn.concat("-" + serverIp)
- fqdnLocIpAccum.add(1)
(key, row)
}).partitionBy(new CustomPartitioner(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS))
- val fqdnLocIpRddDoc = ArangoSpark.load[BaseEdgeDocument](sparkContext,"R_LOCATE_FQDN2IP",options)
-
- fqdnLocIpRddDoc.map(doc => (doc.getKey, doc)).fullOuterJoin(fqdnLocIpRddRow)
+ val fqdnLocIpRddDoc = BaseArangoData.loadArangoRdd[BaseEdgeDocument]("R_LOCATE_FQDN2IP")
+ fqdnLocIpRddDoc.map(doc => (doc.getKey, doc)).rightOuterJoin(fqdnLocIpRddRow)
}
- def mergeRelationSubidLocateIp(): RDD[(String, (Option[BaseEdgeDocument], Option[Row]))] ={
- val subidLocIpAccum = getLongAccumulator("R_LOCATE_SUBSCRIBER2IP Accumulator")
+ def mergeRelationSubidLocateIp(): RDD[(String, (Option[BaseEdgeDocument], Row))] = {
val subidLocIpRddRow = BaseClickhouseData.getRelationSubidLocateIpDf
.repartition(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS)
.rdd.map(row => {
val commonSubscriberId = row.getAs[String]("common_subscriber_id")
val ip = row.getAs[String]("radius_framed_ip")
val key = commonSubscriberId.concat("-" + ip)
- subidLocIpAccum.add(1)
(key, row)
}).partitionBy(new CustomPartitioner(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS))
- val subidLocIpRddDoc = ArangoSpark.load[BaseEdgeDocument](sparkContext,"R_LOCATE_SUBSCRIBER2IP",options)
+ val subidLocIpRddDoc = BaseArangoData.loadArangoRdd[BaseEdgeDocument]("R_LOCATE_SUBSCRIBER2IP")
- subidLocIpRddDoc.map(doc => (doc.getKey, doc)).fullOuterJoin(subidLocIpRddRow)
+ subidLocIpRddDoc.map(doc => (doc.getKey, doc)).rightOuterJoin(subidLocIpRddRow)
}
- def mergeVertexSubid(): RDD[(String, (Option[BaseDocument], Option[Row]))] ={
- val subidAccum = getLongAccumulator("SUBSCRIBER Accumulator")
+ def mergeVertexSubid(): RDD[(String, (Option[BaseDocument], Row))] = {
val subidRddRow = BaseClickhouseData.getVertexSubidDf
.repartition(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS)
.rdd.map(row => {
val commonSubscriberId = row.getAs[String]("common_subscriber_id")
- subidAccum.add(1)
(commonSubscriberId, row)
}).partitionBy(new CustomPartitioner(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS))
- val subidRddDoc = ArangoSpark.load[BaseDocument](sparkContext,"SUBSCRIBER",options)
+ val subidRddDoc = BaseArangoData.loadArangoRdd[BaseDocument]("SUBSCRIBER")
- subidRddDoc.map(doc => (doc.getKey, doc)).fullOuterJoin(subidRddRow)
+ subidRddDoc.map(doc => (doc.getKey, doc)).rightOuterJoin(subidRddRow)
}
- def mergeVertexFrameIp: RDD[Row] ={
- val framedIpAccum = getLongAccumulator("framed ip Accumulator")
+ def mergeVertexFrameIp: RDD[Row] = {
val values = BaseClickhouseData.getVertexFramedIpDf
.repartition(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS)
.rdd.map(row => {
val ip = row.getAs[String]("radius_framed_ip")
- framedIpAccum.add(1)
(ip, row)
}).partitionBy(new CustomPartitioner(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS)).values
values
@@ -129,7 +112,7 @@ object MergeDataFrame {
val fqdnArr = fqdn.split(":")(0).split("\\.")
- if (fqdnArr.length != 4){
+ if (fqdnArr.length != 4) {
return true
}
for (f <- fqdnArr) {
diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocHandler.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocHandler.scala
index a275ab3..06d731a 100644
--- a/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocHandler.scala
+++ b/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocHandler.scala
@@ -12,37 +12,39 @@ import scala.collection.mutable
import scala.collection.mutable.WrappedArray.ofRef
object UpdateDocHandler {
- val PROTOCOL_SET: Set[String] = Set("HTTP","TLS","DNS")
+ val PROTOCOL_SET: Set[String] = Set("HTTP", "TLS", "DNS")
- def updateMaxAttribute(hisDoc: BaseDocument,newAttribute:Long,attributeName:String): Unit ={
- if(hisDoc.getProperties.containsKey(attributeName)){
+ def updateMaxAttribute(hisDoc: BaseDocument, newAttribute: Long, attributeName: String): Unit = {
+ if (hisDoc.getProperties.containsKey(attributeName)) {
var hisAttritube = hisDoc.getAttribute(attributeName).toString.toLong
- if (newAttribute > hisAttritube){
+ if (newAttribute > hisAttritube) {
hisAttritube = newAttribute
}
- hisDoc.addAttribute(attributeName,hisAttritube)
+ hisDoc.addAttribute(attributeName, hisAttritube)
}
}
- def updateSumAttribute(hisDoc: BaseDocument,newAttribute:Long,attributeName:String): Unit ={
- if (hisDoc.getProperties.containsKey(attributeName)){
+ def updateSumAttribute(hisDoc: BaseDocument, newAttribute: Long, attributeName: String): Unit = {
+ if (hisDoc.getProperties.containsKey(attributeName)) {
val hisAttritube = hisDoc.getAttribute(attributeName).toString.toLong
- hisDoc.addAttribute(attributeName,newAttribute+hisAttritube)
+ hisDoc.addAttribute(attributeName, newAttribute + hisAttritube)
}
+
}
- def replaceAttribute(hisDoc: BaseDocument,newAttribute:String,attributeName:String): Unit ={
- hisDoc.addAttribute(attributeName,newAttribute)
+ def replaceAttribute(hisDoc: BaseDocument, newAttribute: String, attributeName: String): Unit = {
+ // hisDoc.addAttribute(attributeName,newAttribute)
+ hisDoc.updateAttribute(attributeName, newAttribute)
}
- def separateAttributeByIpType(ipTypeList:ofRef[String],
- sessionCountList:ofRef[AnyRef],
- bytesSumList:ofRef[AnyRef]): (Long,Long,Long,Long) ={
+ def separateAttributeByIpType(ipTypeList: ofRef[String],
+ sessionCountList: ofRef[AnyRef],
+ bytesSumList: ofRef[AnyRef]): (Long, Long, Long, Long) = {
var serverSessionCount = 0L
var serverBytesSum = 0L
var clientSessionCount = 0L
var clientBytesSum = 0L
- if (ipTypeList.length == sessionCountList.length && ipTypeList.length == bytesSumList.length){
+ if (ipTypeList.length == sessionCountList.length && ipTypeList.length == bytesSumList.length) {
sessionCountList.zip(bytesSumList).zip(ipTypeList).foreach(t => {
t._2 match {
case "server" =>
@@ -57,51 +59,51 @@ object UpdateDocHandler {
(serverSessionCount, serverBytesSum, clientSessionCount, clientBytesSum)
}
- def separateAttributeByProtocol(schemaTypeList:ofRef[AnyRef],countTotalList:ofRef[AnyRef]): Map[String, Long] ={
+ def separateAttributeByProtocol(schemaTypeList: ofRef[AnyRef], countTotalList: ofRef[AnyRef]): Map[String, Long] = {
var protocolMap: Map[String, Long] = Map()
- if (schemaTypeList.length == countTotalList.length){
- protocolMap = schemaTypeList.zip(countTotalList).map(t => (t._1.toString,t._2.toString.toLong)).toMap
+ if (schemaTypeList.length == countTotalList.length) {
+ protocolMap = schemaTypeList.zip(countTotalList).map(t => (t._1.toString, t._2.toString.toLong)).toMap
}
PROTOCOL_SET.foreach(protocol => {
- if (!protocolMap.contains(protocol)){
+ if (!protocolMap.contains(protocol)) {
protocolMap += (protocol -> 0L)
}
})
protocolMap
}
- def updateProtocolAttritube(hisDoc:BaseEdgeDocument, protocolMap: Map[String, Long]): Unit ={
- if (hisDoc.getProperties.containsKey("PROTOCOL_TYPE")){
+ def updateProtocolAttritube(hisDoc: BaseEdgeDocument, protocolMap: Map[String, Long]): Unit = {
+ if (hisDoc.getProperties.containsKey("PROTOCOL_TYPE")) {
var protocolType = hisDoc.getAttribute("PROTOCOL_TYPE").toString
protocolMap.foreach((t: (String, Long)) => {
- if (t._2 > 0 && !protocolType.contains(t._1)){
- protocolType = protocolType.concat(","+ t._1)
+ if (t._2 > 0 && !protocolType.contains(t._1)) {
+ protocolType = protocolType.concat("," + t._1)
}
val cntTotalName = t._1.concat("_CNT_TOTAL")
val cntRecentName = t._1.concat("_CNT_RECENT")
val cntRecent = hisDoc.getAttribute(cntRecentName).asInstanceOf[Array[Long]]
- cntRecent.update(0,t._2)
- updateSumAttribute(hisDoc,t._2,cntTotalName)
- hisDoc.addAttribute(cntRecentName,cntRecent)
+ cntRecent.update(0, t._2)
+ updateSumAttribute(hisDoc, t._2, cntTotalName)
+ hisDoc.addAttribute(cntRecentName, cntRecent)
})
- hisDoc.addAttribute("PROTOCOL_TYPE",protocolType)
+ hisDoc.addAttribute("PROTOCOL_TYPE", protocolType)
}
}
- def putProtocolAttritube(doc:BaseEdgeDocument, protocolMap: Map[String, Long]): Unit ={
+ def putProtocolAttritube(doc: BaseEdgeDocument, protocolMap: Map[String, Long]): Unit = {
val protocolTypeBuilder = new mutable.StringBuilder()
protocolMap.foreach(t => {
- if (t._2 > 0){
- protocolTypeBuilder.append(","+t._1)
+ if (t._2 > 0) {
+ protocolTypeBuilder.append("," + t._1)
}
val cntTotalName = t._1.concat("_CNT_TOTAL")
val cntRecentName = t._1.concat("_CNT_RECENT")
val cntRecent: Array[Long] = new Array[Long](ApplicationConfig.RECENT_COUNT_HOUR)
- cntRecent.update(0,t._2)
- doc.addAttribute(cntTotalName,t._2)
- doc.addAttribute(cntRecentName,cntRecent)
+ cntRecent.update(0, t._2)
+ doc.addAttribute(cntTotalName, t._2)
+ doc.addAttribute(cntRecentName, cntRecent)
})
- doc.addAttribute("PROTOCOL_TYPE",protocolTypeBuilder.toString().replaceFirst(",",""))
+ doc.addAttribute("PROTOCOL_TYPE", protocolTypeBuilder.toString().replaceFirst(",", ""))
}
def updateProtocolDocument(doc: BaseEdgeDocument): Unit = {
@@ -118,37 +120,37 @@ object UpdateDocHandler {
}
}
- def mergeDistinctIp(distCipRecent:ofRef[String]): Array[String] ={
+ def mergeDistinctIp(distCipRecent: ofRef[String]): Array[String] = {
distCipRecent.flatMap(str => {
- str.replaceAll("\\[","")
- .replaceAll("\\]","")
- .replaceAll("\\'","")
+ str.replaceAll("\\[", "")
+ .replaceAll("\\]", "")
+ .replaceAll("\\'", "")
.split(",")
}).distinct.take(ApplicationConfig.DISTINCT_CLIENT_IP_NUM).toArray
}
- def putDistinctIp(doc:BaseEdgeDocument,newDistinctIp:Array[String]): Unit ={
+ def putDistinctIp(doc: BaseEdgeDocument, newDistinctIp: Array[String]): Unit = {
val map = newDistinctIp.map(ip => {
(ip, ReadHistoryArangoData.currentHour)
}).toMap
- doc.addAttribute("DIST_CIP",map.keys.toArray)
- doc.addAttribute("DIST_CIP_TS",map.values.toArray)
+ doc.addAttribute("DIST_CIP", map.keys.toArray)
+ doc.addAttribute("DIST_CIP_TS", map.values.toArray)
}
- def updateDistinctIp(hisDoc:BaseEdgeDocument,newDistinctIp:Array[String]): Unit ={
- if (hisDoc.getProperties.containsKey("DIST_CIP") && hisDoc.getProperties.containsKey("DIST_CIP_TS")){
+ def updateDistinctIp(hisDoc: BaseEdgeDocument, newDistinctIp: Array[String]): Unit = {
+ if (hisDoc.getProperties.containsKey("DIST_CIP") && hisDoc.getProperties.containsKey("DIST_CIP_TS")) {
val hisDistCip = hisDoc.getAttribute("DIST_CIP").asInstanceOf[util.ArrayList[String]]
val hisDistCipTs = hisDoc.getAttribute("DIST_CIP_TS").asInstanceOf[util.ArrayList[Long]]
- if (hisDistCip.length == hisDistCipTs.length){
+ if (hisDistCip.length == hisDistCipTs.length) {
val distCipToTsMap: Map[String, Long] = hisDistCip.zip(hisDistCipTs).toMap
- val muDistCipToTsMap: mutable.Map[String, Long] = mutable.Map(distCipToTsMap.toSeq:_*)
+ val muDistCipToTsMap: mutable.Map[String, Long] = mutable.Map(distCipToTsMap.toSeq: _*)
newDistinctIp.foreach(cip => {
- muDistCipToTsMap.put(cip,ReadHistoryArangoData.currentHour)
+ muDistCipToTsMap.put(cip, ReadHistoryArangoData.currentHour)
})
val resultMap = muDistCipToTsMap.toList.sortBy(-_._2).take(ApplicationConfig.DISTINCT_CLIENT_IP_NUM).toMap
- hisDoc.addAttribute("DIST_CIP",resultMap.keys.toArray)
- hisDoc.addAttribute("DIST_CIP_TS",resultMap.values.toArray)
+ hisDoc.addAttribute("DIST_CIP", resultMap.keys.toArray)
+ hisDoc.addAttribute("DIST_CIP_TS", resultMap.values.toArray)
}
}
}
diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala
index 5162834..59f30a4 100644
--- a/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala
+++ b/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala
@@ -19,13 +19,13 @@ object UpdateDocument {
def update(): Unit = {
try {
- updateDocument("FQDN", getVertexFqdnRow, mergeVertexFqdn)
+// updateDocument("FQDN", getVertexFqdnRow, mergeVertexFqdn)
- updateDocument("SUBSCRIBER",getVertexSubidRow,mergeVertexSubid)
+// updateDocument("SUBSCRIBER", getVertexSubidRow, mergeVertexSubid)
- insertFrameIp()
+// insertFrameIp()
- updateDocument("R_LOCATE_SUBSCRIBER2IP",getRelationSubidLocateIpRow,mergeRelationSubidLocateIp)
+// updateDocument("R_LOCATE_SUBSCRIBER2IP", getRelationSubidLocateIpRow, mergeRelationSubidLocateIp)
updateDocument("R_LOCATE_FQDN2IP", getRelationFqdnLocateIpRow, mergeRelationFqdnLocateIp)
@@ -41,18 +41,23 @@ object UpdateDocument {
}
private def updateDocument[T <: BaseDocument](collName: String,
- getDocumentRow: ((String, (Option[T], Option[Row]))) => T,
- getJoinRdd: () => RDD[(String, (Option[T], Option[Row]))]
+ getDocumentRow: ((String, (Option[T], Row))) => T,
+ getJoinRdd: () => RDD[(String, (Option[T], Row))]
): Unit = {
try {
val start = System.currentTimeMillis()
val joinRdd = getJoinRdd()
+
+ val fqdnAccmu = SparkSessionUtil.getLongAccumulator(s"$collName Accumulator")
+
joinRdd.foreachPartition(iter => {
val resultDocumentList = new util.ArrayList[T]
var i = 0
iter.foreach(row => {
- val document = getDocumentRow(row)
- if (document != null){
+ val document: T = getDocumentRow(row)
+ if (document != null) {
+ fqdnAccmu.add(1)
+
resultDocumentList.add(document)
}
i += 1
@@ -67,14 +72,17 @@ object UpdateDocument {
LOG.warn(s"更新$collName:" + i)
}
})
+
+ LOG.warn(s"更新$collName 条数:${fqdnAccmu.value}")
+
val last = System.currentTimeMillis()
- LOG.warn(s"更新$collName 时间:${last - start}")
+ LOG.warn(s"更新$collName 时间:${last - start}")
} catch {
case e: Exception => e.printStackTrace()
}
}
- private def insertFrameIp(): Unit ={
+ private def insertFrameIp(): Unit = {
mergeVertexFrameIp.foreachPartition(iter => {
val resultDocumentList = new util.ArrayList[BaseDocument]
var i = 0
@@ -95,15 +103,15 @@ object UpdateDocument {
})
}
- private def getVertexFrameipRow(row: Row): BaseDocument ={
+ private def getVertexFrameipRow(row: Row): BaseDocument = {
val ip = row.getAs[String]("radius_framed_ip")
val document = new BaseDocument()
document.setKey(ip)
- document.addAttribute("IP",ip)
+ document.addAttribute("IP", ip)
document
}
- private def getRelationSubidLocateIpRow(joinRow: (String, (Option[BaseEdgeDocument], Option[Row]))): BaseEdgeDocument ={
+ private def getRelationSubidLocateIpRow(joinRow: (String, (Option[BaseEdgeDocument], Row))): BaseEdgeDocument = {
val subidLocIpDocOpt = joinRow._2._1
var subidLocIpDoc = subidLocIpDocOpt match {
@@ -111,83 +119,83 @@ object UpdateDocument {
case None => null
}
- val subidLocIpRowOpt = joinRow._2._2
+ val subidLocIpRow = joinRow._2._2
- val subidLocIpRow = subidLocIpRowOpt match {
- case Some(r) => r
- case None => null
- }
+ // val subidLocIpRow = subidLocIpRowOpt match {
+ // case Some(r) => r
+ // case None => null
+ // }
- if (subidLocIpRow != null){
+ if (subidLocIpRow != null) {
val subId = subidLocIpRow.getAs[String]("common_subscriber_id")
val ip = subidLocIpRow.getAs[String]("radius_framed_ip")
val lastFoundTime = subidLocIpRow.getAs[Long]("LAST_FOUND_TIME")
val firstFoundTime = subidLocIpRow.getAs[Long]("FIRST_FOUND_TIME")
- val key = subId.concat("-"+ip)
- if (subidLocIpDoc != null){
- updateMaxAttribute(subidLocIpDoc,lastFoundTime,"LAST_FOUND_TIME")
+ val key = subId.concat("-" + ip)
+ if (subidLocIpDoc != null) {
+ updateMaxAttribute(subidLocIpDoc, lastFoundTime, "LAST_FOUND_TIME")
} else {
subidLocIpDoc = new BaseEdgeDocument()
subidLocIpDoc.setKey(key)
subidLocIpDoc.setFrom("SUBSCRIBER/" + subId)
subidLocIpDoc.setTo("IP/" + ip)
- subidLocIpDoc.addAttribute("SUBSCRIBER",subId)
- subidLocIpDoc.addAttribute("IP",ip)
- subidLocIpDoc.addAttribute("FIRST_FOUND_TIME",firstFoundTime)
- subidLocIpDoc.addAttribute("LAST_FOUND_TIME",lastFoundTime)
+ subidLocIpDoc.addAttribute("SUBSCRIBER", subId)
+ subidLocIpDoc.addAttribute("IP", ip)
+ subidLocIpDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime)
+ subidLocIpDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime)
}
}
subidLocIpDoc
}
- private def getVertexSubidRow(joinRow: (String, (Option[BaseDocument], Option[Row]))): BaseDocument ={
+ private def getVertexSubidRow(joinRow: (String, (Option[BaseDocument], Row))): BaseDocument = {
val subidDocOpt = joinRow._2._1
var subidDoc = subidDocOpt match {
case Some(doc) => doc
case None => null
}
- val subidRowOpt = joinRow._2._2
+ val subidRow = joinRow._2._2
- val subidRow = subidRowOpt match {
- case Some(r) => r
- case None => null
- }
+ // val subidRow = subidRowOpt match {
+ // case Some(r) => r
+ // case None => null
+ // }
- if (subidRow != null){
+ if (subidRow != null) {
val subId = subidRow.getAs[String]("common_subscriber_id")
val subLastFoundTime = subidRow.getAs[Long]("LAST_FOUND_TIME")
val subFirstFoundTime = subidRow.getAs[Long]("FIRST_FOUND_TIME")
- if (subidDoc != null){
- updateMaxAttribute(subidDoc,subLastFoundTime,"LAST_FOUND_TIME")
+ if (subidDoc != null) {
+ updateMaxAttribute(subidDoc, subLastFoundTime, "LAST_FOUND_TIME")
} else {
subidDoc = new BaseDocument()
subidDoc.setKey(subId)
- subidDoc.addAttribute("SUBSCRIBER",subId)
- subidDoc.addAttribute("FIRST_FOUND_TIME",subFirstFoundTime)
- subidDoc.addAttribute("LAST_FOUND_TIME",subLastFoundTime)
+ subidDoc.addAttribute("SUBSCRIBER", subId)
+ subidDoc.addAttribute("FIRST_FOUND_TIME", subFirstFoundTime)
+ subidDoc.addAttribute("LAST_FOUND_TIME", subLastFoundTime)
}
}
subidDoc
}
- private def getVertexFqdnRow(joinRow: (String, (Option[BaseDocument], Option[Row]))): BaseDocument = {
+ private def getVertexFqdnRow(joinRow: (String, (Option[BaseDocument], Row))): BaseDocument = {
val fqdnDocOpt = joinRow._2._1
var fqdnDoc = fqdnDocOpt match {
case Some(doc) => doc
case None => null
}
- val fqdnRowOpt = joinRow._2._2
+ val fqdnRow: Row = joinRow._2._2
- val fqdnRow = fqdnRowOpt match {
- case Some(r) => r
- case None => null
- }
+ // val fqdnRow = fqdnRowOpt match {
+ // case Some(r) => r
+ // case None => null
+ // }
- if (fqdnRow != null){
+ if (fqdnRow != null) {
val fqdn = fqdnRow.getAs[String]("FQDN")
val lastFoundTime = fqdnRow.getAs[Long]("LAST_FOUND_TIME")
val firstFoundTime = fqdnRow.getAs[Long]("FIRST_FOUND_TIME")
@@ -205,21 +213,21 @@ object UpdateDocument {
fqdnDoc
}
- private def getVertexIpRow(joinRow: (String, (Option[BaseDocument], Option[Row]))): BaseDocument = {
+ private def getVertexIpRow(joinRow: (String, (Option[BaseDocument], Row))): BaseDocument = {
val ipDocOpt = joinRow._2._1
var ipDoc = ipDocOpt match {
case Some(doc) => doc
case None => null
}
- val ipRowOpt = joinRow._2._2
+ val ipRow = joinRow._2._2
- val ipRow = ipRowOpt match {
- case Some(r) => r
- case None => null
- }
+ // val ipRow = ipRowOpt match {
+ // case Some(r) => r
+ // case None => null
+ // }
- if (ipRow != null){
+ if (ipRow != null) {
val ip = ipRow.getAs[String]("IP")
val firstFoundTime = ipRow.getAs[Long]("FIRST_FOUND_TIME")
val lastFoundTime = ipRow.getAs[Long]("LAST_FOUND_TIME")
@@ -235,7 +243,7 @@ object UpdateDocument {
updateSumAttribute(ipDoc, sepAttributeTuple._2, "SERVER_BYTES_SUM")
updateSumAttribute(ipDoc, sepAttributeTuple._3, "CLIENT_SESSION_COUNT")
updateSumAttribute(ipDoc, sepAttributeTuple._4, "CLIENT_BYTES_SUM")
- replaceAttribute(ipDoc,linkInfo,"COMMON_LINK_INFO")
+ replaceAttribute(ipDoc, linkInfo, "COMMON_LINK_INFO")
} else {
ipDoc = new BaseDocument
ipDoc.setKey(ip)
@@ -253,7 +261,7 @@ object UpdateDocument {
ipDoc
}
- private def getRelationFqdnLocateIpRow(joinRow: (String, (Option[BaseEdgeDocument], Option[Row]))): BaseEdgeDocument = {
+ private def getRelationFqdnLocateIpRow(joinRow: (String, (Option[BaseEdgeDocument], Row))): BaseEdgeDocument = {
val fqdnLocIpDocOpt = joinRow._2._1
var fqdnLocIpDoc = fqdnLocIpDocOpt match {
@@ -261,18 +269,18 @@ object UpdateDocument {
case None => null
}
- val fqdnLocIpRowOpt = joinRow._2._2
+ val fqdnLocIpRow = joinRow._2._2
- val fqdnLocIpRow = fqdnLocIpRowOpt match {
- case Some(r) => r
- case None => null
- }
+ // val fqdnLocIpRow = fqdnLocIpRowOpt match {
+ // case Some(r) => r
+ // case None => null
+ // }
- if (fqdnLocIpDoc != null){
+ if (fqdnLocIpDoc != null) {
updateProtocolDocument(fqdnLocIpDoc)
}
- if (fqdnLocIpRow != null){
+ if (fqdnLocIpRow != null) {
val fqdn = fqdnLocIpRow.getAs[String]("FQDN")
val serverIp = fqdnLocIpRow.getAs[String]("common_server_ip")
val firstFoundTime = fqdnLocIpRow.getAs[Long]("FIRST_FOUND_TIME")
diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/utils/SparkSessionUtil.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/utils/SparkSessionUtil.scala
index 5bececa..36c114a 100644
--- a/ip-learning-spark/src/main/scala/cn/ac/iie/utils/SparkSessionUtil.scala
+++ b/ip-learning-spark/src/main/scala/cn/ac/iie/utils/SparkSessionUtil.scala
@@ -30,7 +30,7 @@ object SparkSessionUtil {
spark
}
- def getContext: SparkContext = {
+ private def getContext: SparkContext = {
@transient var sc: SparkContext = null
if (sparkContext == null) sc = spark.sparkContext
sc