diff options
| author | zhanghongqing <[email protected]> | 2023-11-13 18:43:26 +0800 |
|---|---|---|
| committer | zhanghongqing <[email protected]> | 2023-11-13 18:43:26 +0800 |
| commit | bdf8b00285a1a0f35c84091f820fbc1ba4934423 (patch) | |
| tree | 1240553a6d267344e045f3f00612ab95a1be8268 /ip-learning-spark | |
| parent | 219e66cea16f2a5f666a59222c5f19f9efbe3bd8 (diff) | |
处理VSYS_ID类型转换异常tsg-v2311
Diffstat (limited to 'ip-learning-spark')
6 files changed, 15 insertions, 43 deletions
diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala index a6c691d..cfe8b5e 100644 --- a/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala +++ b/ip-learning-spark/src/main/scala/cn/ac/iie/dao/BaseClickhouseData.scala @@ -100,12 +100,12 @@ object BaseClickhouseData { s""" |(SELECT * FROM |((SELECT ssl_sni AS FQDN,server_ip,MAX(recv_time) AS LAST_FOUND_TIME,MIN(recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL, - |toString(groupUniqArray(${ApplicationConfig.DISTINCT_CLIENT_IP_NUM})(client_ip)) AS DIST_CIP_RECENT,'TLS' AS schema_type,vsys_id AS VSYS_ID + |toString(groupUniqArray(${ApplicationConfig.DISTINCT_CLIENT_IP_NUM})(client_ip)) AS DIST_CIP_RECENT,'TLS' AS decoded_as,vsys_id AS VSYS_ID |FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE} |WHERE $where and decoded_as = 'SSL' GROUP BY ssl_sni,server_ip,vsys_id) |UNION ALL |(SELECT http_host AS FQDN,server_ip,MAX(recv_time) AS LAST_FOUND_TIME,MIN(recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL, - |toString(groupUniqArray(${ApplicationConfig.DISTINCT_CLIENT_IP_NUM})(client_ip)) AS DIST_CIP_RECENT,'HTTP' AS schema_type,vsys_id AS VSYS_ID + |toString(groupUniqArray(${ApplicationConfig.DISTINCT_CLIENT_IP_NUM})(client_ip)) AS DIST_CIP_RECENT,'HTTP' AS decoded_as,vsys_id AS VSYS_ID |FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE} |WHERE $where and decoded_as = 'HTTP' GROUP BY http_host,server_ip,vsys_id)) |WHERE FQDN != '') as dbtable diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/main/IpRecommendApplication.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/main/IpRecommendApplication.scala index 99b188d..7fa6d62 100644 --- a/ip-learning-spark/src/main/scala/cn/ac/iie/main/IpRecommendApplication.scala +++ b/ip-learning-spark/src/main/scala/cn/ac/iie/main/IpRecommendApplication.scala @@ -1,7 +1,7 @@ package cn.ac.iie.main import cn.ac.iie.service.update.UpdateDocument -@deprecated + object IpRecommendApplication { def main(args: Array[String]): Unit = { diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/main/SubscriberRecommendApplication.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/main/SubscriberRecommendApplication.scala index 3ee06b2..04cbec4 100644 --- a/ip-learning-spark/src/main/scala/cn/ac/iie/main/SubscriberRecommendApplication.scala +++ b/ip-learning-spark/src/main/scala/cn/ac/iie/main/SubscriberRecommendApplication.scala @@ -1,7 +1,7 @@ package cn.ac.iie.main import cn.ac.iie.service.update.UpdateDocument -@deprecated + object SubscriberRecommendApplication { def main(args: Array[String]): Unit = { diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala index 352a650..20ca1e8 100644 --- a/ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala +++ b/ip-learning-spark/src/main/scala/cn/ac/iie/service/transform/MergeDataFrame.scala @@ -39,7 +39,7 @@ object MergeDataFrame { ) val ipRddRow = frame.rdd.map(row => { - val vsysId = row.getAs[Long]("VSYS_ID") + val vsysId = row.getAs[Integer]("VSYS_ID").toLong val ip = row.getAs[String]("IP") ( ip + "-" + vsysId, row) }) /*.partitionBy(new CustomPartitioner(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS))*/ @@ -51,18 +51,18 @@ object MergeDataFrame { def mergeRelationFqdnLocateIp(): RDD[(String, (Option[BaseEdgeDocument], Row))] = { val frame = BaseClickhouseData.getRelationFqdnLocateIpDf .repartition().filter(row => isDomain(row.getAs[String]("FQDN"))) - .groupBy("FQDN", "common_server_ip", "VSYS_ID") + .groupBy("FQDN", "server_ip", "VSYS_ID") .agg( min("FIRST_FOUND_TIME").alias("FIRST_FOUND_TIME"), max("LAST_FOUND_TIME").alias("LAST_FOUND_TIME"), collect_list("COUNT_TOTAL").alias("COUNT_TOTAL_LIST"), - collect_list("schema_type").alias("schema_type_list"), + collect_list("decoded_as").alias("decoded_as_list"), collect_set("DIST_CIP_RECENT").alias("DIST_CIP_RECENT") ) val fqdnLocIpRddRow = frame.rdd.map(row => { val fqdn = row.getAs[String]("FQDN") - val serverIp = row.getAs[String]("common_server_ip") - val vsysId = row.getAs[Long]("VSYS_ID") + val serverIp = row.getAs[String]("server_ip") + val vsysId = row.getAs[Integer]("VSYS_ID").toLong val key = fqdn.concat("-" + serverIp + "-" + vsysId) (key, row) }) /*.partitionBy(new CustomPartitioner(ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS))*/ diff --git a/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala b/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala index 2bd7732..96b3dcd 100644 --- a/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala +++ b/ip-learning-spark/src/main/scala/cn/ac/iie/service/update/UpdateDocument.scala @@ -192,34 +192,6 @@ object UpdateDocument { subidDoc } - private def getVertexFqdnRow(joinRow: (String, (Option[BaseDocument], Row))): BaseDocument = { - val fqdnDocOpt = joinRow._2._1 - var fqdnDoc = fqdnDocOpt match { - case Some(doc) => doc - case None => null - } - val fqdnRow: Row = joinRow._2._2 - if (fqdnRow != null) { - val fqdn = fqdnRow.getAs[String]("FQDN") - val lastFoundTime = fqdnRow.getAs[Long]("LAST_FOUND_TIME") - val firstFoundTime = fqdnRow.getAs[Long]("FIRST_FOUND_TIME") - val vsysId = fqdnRow.getAs[Long]("VSYS_ID") - - if (fqdnDoc != null) { - updateMaxAttribute(fqdnDoc, lastFoundTime, "LAST_FOUND_TIME") - fqdnDoc.addAttribute("VSYS_ID", vsysId) - } else { - fqdnDoc = new BaseDocument - fqdnDoc.setKey(fqdn + "-" + vsysId) - fqdnDoc.addAttribute("FQDN_NAME", fqdn) - fqdnDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime) - fqdnDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime) - fqdnDoc.addAttribute("VSYS_ID", vsysId) - } - } - fqdnDoc - } - private def getVertexIpRow(joinRow: (String, (Option[BaseDocument], Row))): BaseDocument = { val ipDocOpt = joinRow._2._1 var ipDoc = ipDocOpt match { @@ -236,7 +208,7 @@ object UpdateDocument { val ipTypeList = ipRow.getAs[ofRef[String]]("ip_type_list") val linkInfo = ipRow.getAs[String]("common_link_info") val sepAttributeTuple = separateAttributeByIpType(ipTypeList, sessionCountList, bytesSumList) - val vsysId = ipRow.getAs[Long]("VSYS_ID") + val vsysId = ipRow.getAs[Integer]("VSYS_ID").toLong if (ipDoc != null) { updateMaxAttribute(ipDoc, lastFoundTime, "LAST_FOUND_TIME") @@ -276,13 +248,13 @@ object UpdateDocument { } if (fqdnLocIpRow != null) { val fqdn = fqdnLocIpRow.getAs[String]("FQDN") - val serverIp = fqdnLocIpRow.getAs[String]("common_server_ip") + val serverIp = fqdnLocIpRow.getAs[String]("server_ip") val firstFoundTime = fqdnLocIpRow.getAs[Long]("FIRST_FOUND_TIME") val lastFoundTime = fqdnLocIpRow.getAs[Long]("LAST_FOUND_TIME") val countTotalList = fqdnLocIpRow.getAs[ofRef[AnyRef]]("COUNT_TOTAL_LIST") - val schemaTypeList = fqdnLocIpRow.getAs[ofRef[AnyRef]]("schema_type_list") + val schemaTypeList = fqdnLocIpRow.getAs[ofRef[AnyRef]]("decoded_as_list") val distCipRecent = fqdnLocIpRow.getAs[ofRef[String]]("DIST_CIP_RECENT") - val vsysId = fqdnLocIpRow.getAs[Long]("VSYS_ID") + val vsysId = fqdnLocIpRow.getAs[Integer]("VSYS_ID").toLong val sepAttritubeMap: Map[String, Long] = separateAttributeByProtocol(schemaTypeList, countTotalList) val distinctIp: Array[String] = mergeDistinctIp(distCipRecent) diff --git a/ip-learning-spark/src/test/scala/cn/ac/iie/dao/BaseClickhouseDataTest.scala b/ip-learning-spark/src/test/scala/cn/ac/iie/dao/BaseClickhouseDataTest.scala index 01804eb..3fcf09f 100644 --- a/ip-learning-spark/src/test/scala/cn/ac/iie/dao/BaseClickhouseDataTest.scala +++ b/ip-learning-spark/src/test/scala/cn/ac/iie/dao/BaseClickhouseDataTest.scala @@ -19,7 +19,7 @@ object BaseClickhouseDataTest { | FROM | global_temp.dbtable | WHERE - | common_schema_type = 'SSL' GROUP BY ssl_sni + | decoded_as = 'SSL' GROUP BY ssl_sni | ) | UNION ALL | (SELECT @@ -27,7 +27,7 @@ object BaseClickhouseDataTest { | FROM | global_temp.dbtable | WHERE - | common_schema_type = 'HTTP' GROUP BY http_host + | decoded_as = 'HTTP' GROUP BY http_host | ) | ) |GROUP BY |
