资源隔离三个任务,增加spark.executor.cores、spark.cores.max资源控制参数。

This commit is contained in:
wanglihui
2021-10-29 18:54:18 +08:00
parent 264afdaa3e
commit bf707f8b2e
9 changed files with 341 additions and 158 deletions

View File

@@ -7,6 +7,8 @@ object ApplicationConfig {
val SPARK_SQL_SHUFFLE_PARTITIONS: Int = config.getInt("spark.sql.shuffle.partitions")
val SPARK_EXECUTOR_MEMORY: String = config.getString("spark.executor.memory")
val SPARK_EXECUTOR_CORES: String = config.getString("spark.executor.cores")
val SPARK_CORES_MAX: String = config.getString("spark.cores.max")
val SPARK_APP_NAME: String = config.getString("spark.app.name")
val SPARK_NETWORK_TIMEOUT: String = config.getString("spark.network.timeout")
// val REPARTITION_NUMBER: Int = config.getInt("repartitionNumber")
@@ -46,4 +48,6 @@ object ApplicationConfig {
val ARANGODB_TOTAL_NUM: Long = config.getLong("arangodb.total.num")
val READ_RADIUS_GRANULARITY: Int = config.getInt("read.radius.granularity")
}

View File

@@ -1,7 +1,10 @@
package cn.ac.iie.dao
import java.util.Date
import cn.ac.iie.config.ApplicationConfig
import cn.ac.iie.utils.SparkSessionUtil.spark
import com.zdjizhi.utils.DateUtils
import org.apache.spark.sql.DataFrame
import org.slf4j.LoggerFactory
@@ -36,19 +39,19 @@ object BaseClickhouseData {
val where = "common_recv_time >= " + timeLimit._2 + " AND common_recv_time < " + timeLimit._1
val sql =
s"""
|(SELECT
| FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME
|FROM
| ((SELECT
| ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
| FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
| WHERE $where and common_schema_type = 'SSL' GROUP BY ssl_sni
| )UNION ALL
| (SELECT
| http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
| FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
| WHERE $where and common_schema_type = 'HTTP' GROUP BY http_host))
|GROUP BY FQDN HAVING FQDN != '') as dbtable
|(SELECT
| FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME
|FROM
| ((SELECT
| ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
| FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
| WHERE $where and common_schema_type = 'SSL' GROUP BY ssl_sni
| )UNION ALL
| (SELECT
| http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
| FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
| WHERE $where and common_schema_type = 'HTTP' GROUP BY http_host))
|GROUP BY FQDN HAVING FQDN != '') as dbtable
""".stripMargin
LOG.warn(sql)
val frame = initClickhouseData(sql)
@@ -114,10 +117,12 @@ object BaseClickhouseData {
def getRelationSubidLocateIpDf: DataFrame = {
val where =
s"""
| common_recv_time >= ${timeLimit._2}
| AND common_recv_time < ${timeLimit._1}
| common_recv_time >= ${getRadiusTimeRange._2}
| AND common_recv_time < ${getRadiusTimeRange._1}
| AND common_subscriber_id != ''
| AND radius_framed_ip != ''
| AND radius_packet_type = 4
| AND radius_acct_status_type = 1
""".stripMargin
val sql =
s"""
@@ -136,8 +141,8 @@ object BaseClickhouseData {
def getVertexSubidDf: DataFrame = {
val where =
s"""
| common_recv_time >= ${timeLimit._2}
| AND common_recv_time < ${timeLimit._1}
| common_recv_time >= ${getRadiusTimeRange._2}
| AND common_recv_time < ${getRadiusTimeRange._1}
| AND common_subscriber_id != ''
| AND radius_framed_ip != ''
| AND radius_packet_type = 4
@@ -159,8 +164,8 @@ object BaseClickhouseData {
def getVertexFramedIpDf: DataFrame = {
val where =
s"""
| common_recv_time >= ${timeLimit._2}
| AND common_recv_time < ${timeLimit._1}
| common_recv_time >= ${getRadiusTimeRange._2}
| AND common_recv_time < ${getRadiusTimeRange._1}
| AND common_subscriber_id != ''
| AND radius_framed_ip != ''
| AND radius_packet_type = 4
@@ -180,6 +185,17 @@ object BaseClickhouseData {
frame
}
private def getRadiusTimeRange: (Long, Long) = {
val date = DateUtils.getTimeFloor(new Date(System.currentTimeMillis()), "PT1M")
val max = date.getTime / 1000
val min = DateUtils.getSomeMinute(date, ApplicationConfig.READ_RADIUS_GRANULARITY).getTime / 1000
(max, min)
}
def main(args: Array[String]): Unit = {
println(getRadiusTimeRange)
println(getRadiusTimeRange._2 - getRadiusTimeRange._1)
}
private def getTimeLimit: (Long, Long) = {
var maxTime = 0L

View File

@@ -5,6 +5,6 @@ import cn.ac.iie.service.update.UpdateDocument
object IpLearningApplication {
def main(args: Array[String]): Unit = {
UpdateDocument.update()
UpdateDocument.ipLearning()
}
}

View File

@@ -0,0 +1,11 @@
package cn.ac.iie.main
import cn.ac.iie.service.update.UpdateDocument
object IpRecommendApplication {
def main(args: Array[String]): Unit = {
UpdateDocument.ipRecommend()
}
}

View File

@@ -0,0 +1,11 @@
package cn.ac.iie.main
import cn.ac.iie.service.update.UpdateDocument
object SubscriberRecommendApplication {
def main(args: Array[String]): Unit = {
UpdateDocument.subscriberRecommend()
}
}

View File

@@ -17,20 +17,35 @@ object UpdateDocument {
private val arangoManger: ArangoDBConnect = ArangoDBConnect.getInstance()
private val LOG = LoggerFactory.getLogger(UpdateDocument.getClass)
def update(): Unit = {
def ipLearning(): Unit = {
try {
// updateDocument("FQDN", getVertexFqdnRow, mergeVertexFqdn)
updateDocument("SUBSCRIBER", getVertexSubidRow, mergeVertexSubid)
insertFrameIp()
updateDocument("R_LOCATE_SUBSCRIBER2IP", getRelationSubidLocateIpRow, mergeRelationSubidLocateIp)
updateDocument("R_LOCATE_FQDN2IP", getRelationFqdnLocateIpRow, mergeRelationFqdnLocateIp)
} catch {
case e: Exception => e.printStackTrace()
} finally {
arangoManger.clean()
SparkSessionUtil.closeSpark()
System.exit(0)
}
}
def subscriberRecommend(): Unit = {
try {
updateDocument("SUBSCRIBER", getVertexSubidRow, mergeVertexSubid)
insertFrameIp()
updateDocument("R_LOCATE_SUBSCRIBER2IP", getRelationSubidLocateIpRow, mergeRelationSubidLocateIp)
} catch {
case e: Exception => e.printStackTrace()
} finally {
arangoManger.clean()
SparkSessionUtil.closeSpark()
System.exit(0)
}
}
def ipRecommend(): Unit = {
try {
updateDocument("IP", getVertexIpRow, mergeVertexIp)
} catch {
case e: Exception => e.printStackTrace()
} finally {
@@ -57,7 +72,7 @@ object UpdateDocument {
val document: T = getDocumentRow(row)
if (document != null) {
fqdnAccmu.add(1)
// println(document)
resultDocumentList.add(document)
}
i += 1
@@ -118,14 +133,7 @@ object UpdateDocument {
case Some(doc) => doc
case None => null
}
val subidLocIpRow = joinRow._2._2
// val subidLocIpRow = subidLocIpRowOpt match {
// case Some(r) => r
// case None => null
// }
if (subidLocIpRow != null) {
val subId = subidLocIpRow.getAs[String]("common_subscriber_id")
val ip = subidLocIpRow.getAs[String]("radius_framed_ip")
@@ -155,18 +163,12 @@ object UpdateDocument {
case Some(doc) => doc
case None => null
}
val subidRow = joinRow._2._2
// val subidRow = subidRowOpt match {
// case Some(r) => r
// case None => null
// }
if (subidRow != null) {
val subId = subidRow.getAs[String]("common_subscriber_id")
val subLastFoundTime = subidRow.getAs[Long]("LAST_FOUND_TIME")
val subFirstFoundTime = subidRow.getAs[Long]("FIRST_FOUND_TIME")
if (subidDoc != null) {
updateMaxAttribute(subidDoc, subLastFoundTime, "LAST_FOUND_TIME")
} else {
@@ -177,7 +179,6 @@ object UpdateDocument {
subidDoc.addAttribute("LAST_FOUND_TIME", subLastFoundTime)
}
}
subidDoc
}
@@ -187,18 +188,12 @@ object UpdateDocument {
case Some(doc) => doc
case None => null
}
val fqdnRow: Row = joinRow._2._2
// val fqdnRow = fqdnRowOpt match {
// case Some(r) => r
// case None => null
// }
if (fqdnRow != null) {
val fqdn = fqdnRow.getAs[String]("FQDN")
val lastFoundTime = fqdnRow.getAs[Long]("LAST_FOUND_TIME")
val firstFoundTime = fqdnRow.getAs[Long]("FIRST_FOUND_TIME")
if (fqdnDoc != null) {
updateMaxAttribute(fqdnDoc, lastFoundTime, "LAST_FOUND_TIME")
} else {
@@ -209,7 +204,6 @@ object UpdateDocument {
fqdnDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime)
}
}
fqdnDoc
}
@@ -219,14 +213,7 @@ object UpdateDocument {
case Some(doc) => doc
case None => null
}
val ipRow = joinRow._2._2
// val ipRow = ipRowOpt match {
// case Some(r) => r
// case None => null
// }
if (ipRow != null) {
val ip = ipRow.getAs[String]("IP")
val firstFoundTime = ipRow.getAs[Long]("FIRST_FOUND_TIME")
@@ -257,7 +244,6 @@ object UpdateDocument {
ipDoc.addAttribute("COMMON_LINK_INFO", "")
}
}
ipDoc
}
@@ -268,18 +254,10 @@ object UpdateDocument {
case Some(doc) => doc
case None => null
}
val fqdnLocIpRow = joinRow._2._2
// val fqdnLocIpRow = fqdnLocIpRowOpt match {
// case Some(r) => r
// case None => null
// }
if (fqdnLocIpDoc != null) {
updateProtocolDocument(fqdnLocIpDoc)
}
if (fqdnLocIpRow != null) {
val fqdn = fqdnLocIpRow.getAs[String]("FQDN")
val serverIp = fqdnLocIpRow.getAs[String]("common_server_ip")
@@ -291,9 +269,7 @@ object UpdateDocument {
val sepAttritubeMap: Map[String, Long] = separateAttributeByProtocol(schemaTypeList, countTotalList)
val distinctIp: Array[String] = mergeDistinctIp(distCipRecent)
val key = fqdn.concat("-" + serverIp)
if (fqdnLocIpDoc != null) {
updateMaxAttribute(fqdnLocIpDoc, lastFoundTime, "LAST_FOUND_TIME")
updateProtocolAttritube(fqdnLocIpDoc, sepAttritubeMap)
@@ -309,7 +285,6 @@ object UpdateDocument {
putDistinctIp(fqdnLocIpDoc, distinctIp)
}
}
fqdnLocIpDoc
}

View File

@@ -21,6 +21,8 @@ object SparkSessionUtil {
.config("spark.network.timeout", ApplicationConfig.SPARK_NETWORK_TIMEOUT)
.config("spark.sql.shuffle.partitions", ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS)
.config("spark.executor.memory", ApplicationConfig.SPARK_EXECUTOR_MEMORY)
.config("spark.executor.cores",ApplicationConfig.SPARK_EXECUTOR_CORES)
.config("spark.cores.max",ApplicationConfig.SPARK_CORES_MAX)
.config("arangodb.hosts", s"${ApplicationConfig.ARANGODB_HOST}:${ApplicationConfig.ARANGODB_PORT}")
.config("arangodb.user", ApplicationConfig.ARANGODB_USER)
.config("arangodb.password", ApplicationConfig.ARANGODB_PASSWORD)