2020-08-06 16:11:16 +08:00
|
|
|
package cn.ac.iie.dao
|
|
|
|
|
|
2021-10-29 18:54:18 +08:00
|
|
|
import java.util.Date
|
|
|
|
|
|
2020-08-06 16:13:59 +08:00
|
|
|
import cn.ac.iie.config.ApplicationConfig
|
|
|
|
|
import cn.ac.iie.utils.SparkSessionUtil.spark
|
2021-10-29 18:54:18 +08:00
|
|
|
import com.zdjizhi.utils.DateUtils
|
2020-08-06 16:13:59 +08:00
|
|
|
import org.apache.spark.sql.DataFrame
|
|
|
|
|
import org.slf4j.LoggerFactory
|
|
|
|
|
|
2020-08-06 16:11:16 +08:00
|
|
|
object BaseClickhouseData {
|
2020-08-06 16:13:59 +08:00
|
|
|
private val LOG = LoggerFactory.getLogger(BaseClickhouseData.getClass)
|
|
|
|
|
|
|
|
|
|
val currentHour: Long = System.currentTimeMillis / (60 * 60 * 1000) * 60 * 60
|
|
|
|
|
private val timeLimit: (Long, Long) = getTimeLimit
|
|
|
|
|
|
2021-03-23 11:26:55 +08:00
|
|
|
private def initClickhouseData(sql: String): DataFrame = {
|
2020-08-06 16:13:59 +08:00
|
|
|
|
|
|
|
|
val dataFrame: DataFrame = spark.read.format("jdbc")
|
|
|
|
|
.option("url", ApplicationConfig.SPARK_READ_CLICKHOUSE_URL)
|
|
|
|
|
.option("dbtable", sql)
|
|
|
|
|
.option("driver", ApplicationConfig.SPARK_READ_CLICKHOUSE_DRIVER)
|
|
|
|
|
.option("user", ApplicationConfig.SPARK_READ_CLICKHOUSE_USER)
|
|
|
|
|
.option("password", ApplicationConfig.SPARK_READ_CLICKHOUSE_PASSWORD)
|
|
|
|
|
.option("numPartitions", ApplicationConfig.NUMPARTITIONS)
|
|
|
|
|
.option("partitionColumn", ApplicationConfig.SPARK_READ_CLICKHOUSE_PARTITIONCOLUMN)
|
|
|
|
|
.option("lowerBound", timeLimit._2)
|
|
|
|
|
.option("upperBound", timeLimit._1)
|
|
|
|
|
.option("fetchsize", ApplicationConfig.SPARK_READ_CLICKHOUSE_FETCHSIZE)
|
2021-03-23 11:26:55 +08:00
|
|
|
.option("socket_timeout", ApplicationConfig.CLICKHOUSE_SOCKET_TIMEOUT)
|
2020-08-06 16:13:59 +08:00
|
|
|
.load()
|
|
|
|
|
dataFrame.printSchema()
|
|
|
|
|
dataFrame.createOrReplaceGlobalTempView("dbtable")
|
2020-11-10 16:59:39 +08:00
|
|
|
|
|
|
|
|
dataFrame
|
2020-08-06 16:13:59 +08:00
|
|
|
}
|
|
|
|
|
|
2021-03-23 11:26:55 +08:00
|
|
|
def getVertexFqdnDf: DataFrame = {
|
2023-11-02 18:04:32 +08:00
|
|
|
val where = "recv_time >= " + timeLimit._2 + " AND recv_time < " + timeLimit._1
|
2020-11-10 16:59:39 +08:00
|
|
|
val sql =
|
2021-08-02 18:49:43 +08:00
|
|
|
s"""
|
2021-10-29 18:54:18 +08:00
|
|
|
|(SELECT
|
2023-11-02 18:04:32 +08:00
|
|
|
| FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME,vsys_id AS VSYS_ID
|
2021-10-29 18:54:18 +08:00
|
|
|
|FROM
|
|
|
|
|
| ((SELECT
|
2023-11-02 18:04:32 +08:00
|
|
|
| ssl_sni AS FQDN,MAX( recv_time ) AS LAST_FOUND_TIME,MIN( recv_time ) AS FIRST_FOUND_TIME,vsys_id AS VSYS_ID
|
2021-10-29 18:54:18 +08:00
|
|
|
| FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
|
2023-11-02 18:04:32 +08:00
|
|
|
| WHERE $where and decoded_as = 'SSL' GROUP BY ssl_sni,vsys_id
|
2021-10-29 18:54:18 +08:00
|
|
|
| )UNION ALL
|
|
|
|
|
| (SELECT
|
2023-11-02 18:04:32 +08:00
|
|
|
| http_host AS FQDN,MAX( recv_time ) AS LAST_FOUND_TIME,MIN( recv_time ) AS FIRST_FOUND_TIME,vsys_id AS VSYS_ID
|
2021-10-29 18:54:18 +08:00
|
|
|
| FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
|
2023-11-02 18:04:32 +08:00
|
|
|
| WHERE $where and decoded_as = 'HTTP' GROUP BY http_host,vsys_id))
|
2022-09-19 10:05:51 +08:00
|
|
|
|GROUP BY FQDN,VSYS_ID HAVING FQDN != '') as dbtable
|
2020-11-10 16:59:39 +08:00
|
|
|
""".stripMargin
|
|
|
|
|
LOG.warn(sql)
|
|
|
|
|
val frame = initClickhouseData(sql)
|
|
|
|
|
frame.printSchema()
|
|
|
|
|
frame
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-23 11:26:55 +08:00
|
|
|
def getVertexIpDf: DataFrame = {
|
2023-11-02 18:04:32 +08:00
|
|
|
val where = "recv_time >= " + timeLimit._2 + " AND recv_time < " + timeLimit._1
|
2020-11-10 16:59:39 +08:00
|
|
|
val sql =
|
|
|
|
|
s"""
|
|
|
|
|
|(SELECT * FROM
|
2023-11-02 18:04:32 +08:00
|
|
|
|((SELECT client_ip AS IP,MIN(recv_time) AS FIRST_FOUND_TIME,
|
|
|
|
|
|MAX(recv_time) AS LAST_FOUND_TIME,
|
2020-11-10 16:59:39 +08:00
|
|
|
|count(*) as SESSION_COUNT,
|
2023-11-02 18:04:32 +08:00
|
|
|
|SUM(sent_bytes+received_bytes) as BYTES_SUM,
|
|
|
|
|
|'' as common_link_info,
|
2020-11-10 16:59:39 +08:00
|
|
|
|'client' as ip_type
|
2023-11-02 18:04:32 +08:00
|
|
|
|,vsys_id AS VSYS_ID
|
2021-09-16 11:08:01 +08:00
|
|
|
|FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
|
2020-11-10 16:59:39 +08:00
|
|
|
|where $where
|
2023-11-02 18:04:32 +08:00
|
|
|
|group by client_ip,vsys_id)
|
2020-11-10 16:59:39 +08:00
|
|
|
|UNION ALL
|
2023-11-02 18:04:32 +08:00
|
|
|
|(SELECT server_ip AS IP,
|
|
|
|
|
|MIN(recv_time) AS FIRST_FOUND_TIME,
|
|
|
|
|
|MAX(recv_time) AS LAST_FOUND_TIME,
|
2020-11-10 16:59:39 +08:00
|
|
|
|count(*) as SESSION_COUNT,
|
2023-11-02 18:04:32 +08:00
|
|
|
|SUM(sent_bytes+received_bytes) as BYTES_SUM,
|
|
|
|
|
|'' as common_link_info,
|
2020-11-10 16:59:39 +08:00
|
|
|
|'server' as ip_type
|
2023-11-02 18:04:32 +08:00
|
|
|
|,vsys_id AS VSYS_ID
|
2021-09-16 11:08:01 +08:00
|
|
|
|FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
|
2020-11-10 16:59:39 +08:00
|
|
|
|where $where
|
2023-11-02 18:04:32 +08:00
|
|
|
|group by server_ip,vsys_id))) as dbtable
|
2020-11-10 16:59:39 +08:00
|
|
|
""".stripMargin
|
|
|
|
|
LOG.warn(sql)
|
|
|
|
|
val frame = initClickhouseData(sql)
|
|
|
|
|
frame.printSchema()
|
|
|
|
|
frame
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2021-03-23 11:26:55 +08:00
|
|
|
def getRelationFqdnLocateIpDf: DataFrame = {
|
2023-11-02 18:04:32 +08:00
|
|
|
val where = "recv_time >= " + timeLimit._2 + " AND recv_time < " + timeLimit._1
|
2020-11-10 16:59:39 +08:00
|
|
|
val sql =
|
|
|
|
|
s"""
|
|
|
|
|
|(SELECT * FROM
|
2023-11-02 18:04:32 +08:00
|
|
|
|((SELECT ssl_sni AS FQDN,server_ip,MAX(recv_time) AS LAST_FOUND_TIME,MIN(recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,
|
2024-10-24 11:42:28 +08:00
|
|
|
|toString(groupUniqArray(${ApplicationConfig.DISTINCT_CLIENT_IP_NUM})(client_ip)) AS DIST_CIP_RECENT,'TLS' AS decoded_as_list, vsys_id AS VSYS_ID
|
2021-09-16 11:08:01 +08:00
|
|
|
|FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
|
2023-11-02 18:04:32 +08:00
|
|
|
|WHERE $where and decoded_as = 'SSL' GROUP BY ssl_sni,server_ip,vsys_id)
|
2020-11-10 16:59:39 +08:00
|
|
|
|UNION ALL
|
2023-11-02 18:04:32 +08:00
|
|
|
|(SELECT http_host AS FQDN,server_ip,MAX(recv_time) AS LAST_FOUND_TIME,MIN(recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,
|
2024-10-24 11:42:28 +08:00
|
|
|
|toString(groupUniqArray(${ApplicationConfig.DISTINCT_CLIENT_IP_NUM})(client_ip)) AS DIST_CIP_RECENT,'HTTP' AS decoded_as_list,vsys_id AS VSYS_ID
|
2021-09-16 11:08:01 +08:00
|
|
|
|FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
|
2023-11-02 18:04:32 +08:00
|
|
|
|WHERE $where and decoded_as = 'HTTP' GROUP BY http_host,server_ip,vsys_id))
|
2020-11-10 16:59:39 +08:00
|
|
|
|WHERE FQDN != '') as dbtable
|
|
|
|
|
""".stripMargin
|
|
|
|
|
LOG.warn(sql)
|
|
|
|
|
val frame = initClickhouseData(sql)
|
|
|
|
|
frame.printSchema()
|
|
|
|
|
frame
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-23 11:26:55 +08:00
|
|
|
def getRelationSubidLocateIpDf: DataFrame = {
|
2020-11-10 16:59:39 +08:00
|
|
|
val where =
|
|
|
|
|
s"""
|
2021-10-29 18:54:18 +08:00
|
|
|
| common_recv_time >= ${getRadiusTimeRange._2}
|
|
|
|
|
| AND common_recv_time < ${getRadiusTimeRange._1}
|
2020-11-10 16:59:39 +08:00
|
|
|
| AND common_subscriber_id != ''
|
|
|
|
|
| AND radius_framed_ip != ''
|
2021-10-29 18:54:18 +08:00
|
|
|
| AND radius_packet_type = 4
|
|
|
|
|
| AND radius_acct_status_type = 1
|
2020-11-10 16:59:39 +08:00
|
|
|
""".stripMargin
|
|
|
|
|
val sql =
|
|
|
|
|
s"""
|
|
|
|
|
|(
|
2022-09-19 10:05:51 +08:00
|
|
|
|SELECT common_subscriber_id,radius_framed_ip,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME,common_vsys_id AS VSYS_ID
|
2021-09-16 11:08:01 +08:00
|
|
|
|FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_RADIUS_TABLE}
|
2022-09-19 10:05:51 +08:00
|
|
|
|WHERE $where GROUP BY common_subscriber_id,radius_framed_ip,common_vsys_id
|
2020-11-10 16:59:39 +08:00
|
|
|
|) as dbtable
|
|
|
|
|
""".stripMargin
|
|
|
|
|
LOG.warn(sql)
|
|
|
|
|
val frame = initClickhouseData(sql)
|
|
|
|
|
frame.printSchema()
|
|
|
|
|
frame
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-23 11:26:55 +08:00
|
|
|
def getVertexSubidDf: DataFrame = {
|
2020-11-10 16:59:39 +08:00
|
|
|
val where =
|
|
|
|
|
s"""
|
2021-10-29 18:54:18 +08:00
|
|
|
| common_recv_time >= ${getRadiusTimeRange._2}
|
|
|
|
|
| AND common_recv_time < ${getRadiusTimeRange._1}
|
2020-11-10 16:59:39 +08:00
|
|
|
| AND common_subscriber_id != ''
|
|
|
|
|
| AND radius_framed_ip != ''
|
2021-10-25 14:47:22 +08:00
|
|
|
| AND radius_packet_type = 4
|
|
|
|
|
| AND radius_acct_status_type = 1
|
2020-11-10 16:59:39 +08:00
|
|
|
""".stripMargin
|
2022-09-20 11:08:54 +08:00
|
|
|
|
2020-11-10 16:59:39 +08:00
|
|
|
val sql =
|
|
|
|
|
s"""
|
|
|
|
|
|(
|
2022-09-19 10:05:51 +08:00
|
|
|
|SELECT common_subscriber_id,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME,common_vsys_id AS VSYS_ID FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_RADIUS_TABLE}
|
|
|
|
|
|WHERE $where GROUP BY common_subscriber_id,common_vsys_id
|
2020-11-10 16:59:39 +08:00
|
|
|
|)as dbtable
|
|
|
|
|
""".stripMargin
|
|
|
|
|
LOG.warn(sql)
|
|
|
|
|
val frame = initClickhouseData(sql)
|
|
|
|
|
frame.printSchema()
|
|
|
|
|
frame
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-23 11:26:55 +08:00
|
|
|
def getVertexFramedIpDf: DataFrame = {
|
2020-11-10 16:59:39 +08:00
|
|
|
val where =
|
|
|
|
|
s"""
|
2021-10-29 18:54:18 +08:00
|
|
|
| common_recv_time >= ${getRadiusTimeRange._2}
|
|
|
|
|
| AND common_recv_time < ${getRadiusTimeRange._1}
|
2020-11-10 16:59:39 +08:00
|
|
|
| AND common_subscriber_id != ''
|
|
|
|
|
| AND radius_framed_ip != ''
|
2021-10-25 14:47:22 +08:00
|
|
|
| AND radius_packet_type = 4
|
|
|
|
|
| AND radius_acct_status_type = 1
|
2020-11-10 16:59:39 +08:00
|
|
|
""".stripMargin
|
|
|
|
|
val sql =
|
|
|
|
|
s"""
|
|
|
|
|
|(
|
2022-09-19 10:05:51 +08:00
|
|
|
|SELECT radius_framed_ip,MAX(common_recv_time) as LAST_FOUND_TIME,common_vsys_id AS VSYS_ID FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_RADIUS_TABLE} WHERE $where
|
|
|
|
|
|GROUP BY radius_framed_ip,common_vsys_id
|
2020-11-10 16:59:39 +08:00
|
|
|
|)as dbtable
|
|
|
|
|
""".stripMargin
|
|
|
|
|
LOG.warn(sql)
|
|
|
|
|
val frame = initClickhouseData(sql)
|
|
|
|
|
frame.printSchema()
|
|
|
|
|
frame
|
|
|
|
|
}
|
|
|
|
|
|
2021-10-29 18:54:18 +08:00
|
|
|
private def getRadiusTimeRange: (Long, Long) = {
|
|
|
|
|
val date = DateUtils.getTimeFloor(new Date(System.currentTimeMillis()), "PT1M")
|
|
|
|
|
val max = date.getTime / 1000
|
|
|
|
|
val min = DateUtils.getSomeMinute(date, ApplicationConfig.READ_RADIUS_GRANULARITY).getTime / 1000
|
|
|
|
|
(max, min)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def main(args: Array[String]): Unit = {
|
|
|
|
|
println(getRadiusTimeRange)
|
|
|
|
|
println(getRadiusTimeRange._2 - getRadiusTimeRange._1)
|
|
|
|
|
}
|
2020-08-06 16:13:59 +08:00
|
|
|
|
2021-03-23 11:26:55 +08:00
|
|
|
private def getTimeLimit: (Long, Long) = {
|
2020-08-06 16:13:59 +08:00
|
|
|
var maxTime = 0L
|
|
|
|
|
var minTime = 0L
|
|
|
|
|
ApplicationConfig.CLICKHOUSE_TIME_LIMIT_TYPE match {
|
|
|
|
|
case 0 =>
|
|
|
|
|
maxTime = currentHour
|
|
|
|
|
minTime = maxTime - ApplicationConfig.UPDATE_INTERVAL
|
|
|
|
|
case 1 =>
|
|
|
|
|
maxTime = ApplicationConfig.READ_CLICKHOUSE_MAX_TIME
|
|
|
|
|
minTime = ApplicationConfig.READ_CLICKHOUSE_MIN_TIME
|
|
|
|
|
case _ =>
|
|
|
|
|
}
|
|
|
|
|
(maxTime, minTime)
|
|
|
|
|
}
|
2020-08-06 16:11:16 +08:00
|
|
|
|
|
|
|
|
}
|