资源隔离三个任务,增加spark.executor.cores、spark.cores.max资源控制参数。

This commit is contained in:
wanglihui
2021-10-29 18:54:18 +08:00
parent 264afdaa3e
commit bf707f8b2e
9 changed files with 341 additions and 158 deletions

View File

@@ -7,99 +7,263 @@
<groupId>cn.ac.iie</groupId> <groupId>cn.ac.iie</groupId>
<artifactId>ip-learning-spark</artifactId> <artifactId>ip-learning-spark</artifactId>
<version>1.0-SNAPSHOT</version> <version>1.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.0.1</version>
</dependency>
<dependency> <repositories>
<groupId>org.apache.httpcomponents</groupId> <repository>
<artifactId>httpclient</artifactId> <id>nexus</id>
<version>4.5.2</version> <name>Team Nexus Repository</name>
</dependency> <url>http://192.168.40.125:8099/content/groups/public</url>
</repository>
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore --> <repository>
<dependency> <id>ebi</id>
<groupId>org.apache.httpcomponents</groupId> <name>www.ebi.ac.uk</name>
<artifactId>httpcore</artifactId> <url>http://www.ebi.ac.uk/intact/maven/nexus/content/groups/public/</url>
<version>4.4.6</version> </repository>
</dependency>
<dependency> <repository>
<groupId>com.google.guava</groupId> <id>maven-ali</id>
<artifactId>guava</artifactId> <url>http://maven.aliyun.com/nexus/content/groups/public/</url>
<version>19.0</version> <releases>
</dependency> <enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
<updatePolicy>always</updatePolicy>
<checksumPolicy>fail</checksumPolicy>
</snapshots>
</repository>
<dependency> </repositories>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.2.3</version>
</dependency>
<dependency> <dependencies>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>2.2.3</version>
</dependency>
<dependency> <dependency>
<groupId>ru.yandex.clickhouse</groupId> <groupId>com.zdjizhi</groupId>
<artifactId>clickhouse-jdbc</artifactId> <artifactId>galaxy</artifactId>
<version>0.1.54</version> <version>1.0.6</version>
</dependency> <exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-over-slf4j</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>com.typesafe</groupId> <groupId>org.slf4j</groupId>
<artifactId>config</artifactId> <artifactId>slf4j-log4j12</artifactId>
<version>1.2.1</version> <version>1.7.25</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.arangodb</groupId> <groupId>javax.servlet</groupId>
<artifactId>arangodb-java-driver</artifactId> <artifactId>javax.servlet-api</artifactId>
<version>6.6.3</version> <version>3.0.1</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.arangodb</groupId> <groupId>org.apache.httpcomponents</groupId>
<artifactId>velocypack-module-jdk8</artifactId> <artifactId>httpclient</artifactId>
<version>1.1.0</version> <version>4.5.2</version>
</dependency> <exclusions>
<exclusion>
<artifactId>httpcore</artifactId>
<groupId>org.apache.httpcomponents</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency> <!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore -->
<groupId>com.arangodb</groupId> <dependency>
<artifactId>velocypack-module-scala_2.11</artifactId> <groupId>org.apache.httpcomponents</groupId>
<version>1.2.0</version> <artifactId>httpcore</artifactId>
</dependency> <version>4.4.6</version>
</dependency>
<dependency> <dependency>
<groupId>org.scala-lang</groupId> <groupId>com.google.guava</groupId>
<artifactId>scala-library</artifactId> <artifactId>guava</artifactId>
<version>2.11.8</version> <version>19.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>net.alchim31.maven</groupId> <groupId>org.apache.spark</groupId>
<artifactId>scala-maven-plugin</artifactId> <artifactId>spark-core_2.11</artifactId>
<version>3.2.0</version> <version>2.2.3</version>
</dependency> <exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>jaxb-api</artifactId>
<groupId>javax.xml.bind</groupId>
</exclusion>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
<exclusion>
<artifactId>httpclient</artifactId>
<groupId>org.apache.httpcomponents</groupId>
</exclusion>
<exclusion>
<artifactId>httpcore</artifactId>
<groupId>org.apache.httpcomponents</groupId>
</exclusion>
<exclusion>
<artifactId>zookeeper</artifactId>
<groupId>org.apache.zookeeper</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.scala-lang.modules</groupId> <groupId>org.apache.spark</groupId>
<artifactId>scala-xml_2.11</artifactId> <artifactId>spark-sql_2.11</artifactId>
<version>1.0.4</version> <version>2.2.3</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.scala-tools</groupId> <groupId>ru.yandex.clickhouse</groupId>
<artifactId>maven-scala-plugin</artifactId> <artifactId>clickhouse-jdbc</artifactId>
<version>2.15.2</version> <version>0.1.54</version>
</dependency> <exclusions>
<exclusion>
<artifactId>jackson-databind</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies> <dependency>
<groupId>com.typesafe</groupId>
<artifactId>config</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>com.arangodb</groupId>
<artifactId>arangodb-java-driver</artifactId>
<version>6.6.3</version>
</dependency>
<dependency>
<groupId>com.arangodb</groupId>
<artifactId>velocypack-module-jdk8</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>com.arangodb</groupId>
<artifactId>velocypack-module-scala_2.11</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.11.8</version>
</dependency>
<dependency>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.0</version>
<exclusions>
<exclusion>
<artifactId>maven-artifact</artifactId>
<groupId>org.apache.maven</groupId>
</exclusion>
<exclusion>
<artifactId>maven-core</artifactId>
<groupId>org.apache.maven</groupId>
</exclusion>
<exclusion>
<artifactId>maven-model</artifactId>
<groupId>org.apache.maven</groupId>
</exclusion>
<exclusion>
<artifactId>maven-plugin-api</artifactId>
<groupId>org.apache.maven</groupId>
</exclusion>
<exclusion>
<artifactId>maven-repository-metadata</artifactId>
<groupId>org.apache.maven</groupId>
</exclusion>
<exclusion>
<artifactId>maven-settings</artifactId>
<groupId>org.apache.maven</groupId>
</exclusion>
<exclusion>
<artifactId>doxia-sink-api</artifactId>
<groupId>org.apache.maven.doxia</groupId>
</exclusion>
<exclusion>
<artifactId>plexus-container-default</artifactId>
<groupId>org.codehaus.plexus</groupId>
</exclusion>
<exclusion>
<artifactId>plexus-interpolation</artifactId>
<groupId>org.codehaus.plexus</groupId>
</exclusion>
<exclusion>
<artifactId>plexus-utils</artifactId>
<groupId>org.codehaus.plexus</groupId>
</exclusion>
<exclusion>
<artifactId>scala-library</artifactId>
<groupId>org.scala-lang</groupId>
</exclusion>
<exclusion>
<artifactId>scala-reflect</artifactId>
<groupId>org.scala-lang</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-xml_2.11</artifactId>
<version>1.0.4</version>
</dependency>
<dependency>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.2</version>
</dependency>
</dependencies>
<build> <build>
<plugins> <plugins>

View File

@@ -1,6 +1,8 @@
#spark任务配置 #spark任务配置
spark.sql.shuffle.partitions=10 spark.sql.shuffle.partitions=10
spark.executor.memory=4g spark.executor.memory=4g
spark.executor.cores=1
spark.cores.max=10
spark.app.name=test spark.app.name=test
spark.network.timeout=300s spark.network.timeout=300s
spark.serializer=org.apache.spark.serializer.KryoSerializer spark.serializer=org.apache.spark.serializer.KryoSerializer
@@ -17,22 +19,19 @@ spark.read.clickhouse.session.table=session_record
spark.read.clickhouse.radius.table=radius_record spark.read.clickhouse.radius.table=radius_record
clickhouse.socket.timeout=300000 clickhouse.socket.timeout=300000
#arangoDB配置 #arangoDB配置
#arangoDB.host=192.168.40.223
arangoDB.host=192.168.44.12 arangoDB.host=192.168.44.12
arangoDB.port=8529 arangoDB.port=8529
arangoDB.user=root arangoDB.user=root
#arangoDB.password=galaxy_2019
arangoDB.password=ceiec2019 arangoDB.password=ceiec2019
arangoDB.DB.name=tsg_galaxy_v3_test arangoDB.DB.name=tsg_galaxy_v3_test
#arangoDB.DB.name=iplearn_media_domain
arangoDB.ttl=3600 arangoDB.ttl=3600
thread.pool.number=10 thread.pool.number=10
#读取clickhouse时间范围方式0读取过去一小时1指定时间范围 #读取clickhouse时间范围方式0读取过去一小时1指定时间范围
clickhouse.time.limit.type=0 clickhouse.time.limit.type=1
read.clickhouse.max.time=1608518990 read.clickhouse.max.time=1634902508
read.clickhouse.min.time=1604851201 read.clickhouse.min.time=1631759985
update.arango.batch=10000 update.arango.batch=10000
@@ -40,5 +39,6 @@ distinct.client.ip.num=10000
recent.count.hour=24 recent.count.hour=24
update.interval=3600 update.interval=3600
arangodb.total.num=20000000 arangodb.total.num=20000000
#读取radius时间范围,与radius任务执行周期一致,单位:分钟
read.radius.granularity=-30

View File

@@ -7,6 +7,8 @@ object ApplicationConfig {
val SPARK_SQL_SHUFFLE_PARTITIONS: Int = config.getInt("spark.sql.shuffle.partitions") val SPARK_SQL_SHUFFLE_PARTITIONS: Int = config.getInt("spark.sql.shuffle.partitions")
val SPARK_EXECUTOR_MEMORY: String = config.getString("spark.executor.memory") val SPARK_EXECUTOR_MEMORY: String = config.getString("spark.executor.memory")
val SPARK_EXECUTOR_CORES: String = config.getString("spark.executor.cores")
val SPARK_CORES_MAX: String = config.getString("spark.cores.max")
val SPARK_APP_NAME: String = config.getString("spark.app.name") val SPARK_APP_NAME: String = config.getString("spark.app.name")
val SPARK_NETWORK_TIMEOUT: String = config.getString("spark.network.timeout") val SPARK_NETWORK_TIMEOUT: String = config.getString("spark.network.timeout")
// val REPARTITION_NUMBER: Int = config.getInt("repartitionNumber") // val REPARTITION_NUMBER: Int = config.getInt("repartitionNumber")
@@ -46,4 +48,6 @@ object ApplicationConfig {
val ARANGODB_TOTAL_NUM: Long = config.getLong("arangodb.total.num") val ARANGODB_TOTAL_NUM: Long = config.getLong("arangodb.total.num")
val READ_RADIUS_GRANULARITY: Int = config.getInt("read.radius.granularity")
} }

View File

@@ -1,7 +1,10 @@
package cn.ac.iie.dao package cn.ac.iie.dao
import java.util.Date
import cn.ac.iie.config.ApplicationConfig import cn.ac.iie.config.ApplicationConfig
import cn.ac.iie.utils.SparkSessionUtil.spark import cn.ac.iie.utils.SparkSessionUtil.spark
import com.zdjizhi.utils.DateUtils
import org.apache.spark.sql.DataFrame import org.apache.spark.sql.DataFrame
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
@@ -36,19 +39,19 @@ object BaseClickhouseData {
val where = "common_recv_time >= " + timeLimit._2 + " AND common_recv_time < " + timeLimit._1 val where = "common_recv_time >= " + timeLimit._2 + " AND common_recv_time < " + timeLimit._1
val sql = val sql =
s""" s"""
|(SELECT |(SELECT
| FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME | FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME
|FROM |FROM
| ((SELECT | ((SELECT
| ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME | ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
| FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE} | FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
| WHERE $where and common_schema_type = 'SSL' GROUP BY ssl_sni | WHERE $where and common_schema_type = 'SSL' GROUP BY ssl_sni
| )UNION ALL | )UNION ALL
| (SELECT | (SELECT
| http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME | http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
| FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE} | FROM ${ApplicationConfig.SPARK_READ_CLICKHOUSE_SESSION_TABLE}
| WHERE $where and common_schema_type = 'HTTP' GROUP BY http_host)) | WHERE $where and common_schema_type = 'HTTP' GROUP BY http_host))
|GROUP BY FQDN HAVING FQDN != '') as dbtable |GROUP BY FQDN HAVING FQDN != '') as dbtable
""".stripMargin """.stripMargin
LOG.warn(sql) LOG.warn(sql)
val frame = initClickhouseData(sql) val frame = initClickhouseData(sql)
@@ -114,10 +117,12 @@ object BaseClickhouseData {
def getRelationSubidLocateIpDf: DataFrame = { def getRelationSubidLocateIpDf: DataFrame = {
val where = val where =
s""" s"""
| common_recv_time >= ${timeLimit._2} | common_recv_time >= ${getRadiusTimeRange._2}
| AND common_recv_time < ${timeLimit._1} | AND common_recv_time < ${getRadiusTimeRange._1}
| AND common_subscriber_id != '' | AND common_subscriber_id != ''
| AND radius_framed_ip != '' | AND radius_framed_ip != ''
| AND radius_packet_type = 4
| AND radius_acct_status_type = 1
""".stripMargin """.stripMargin
val sql = val sql =
s""" s"""
@@ -136,8 +141,8 @@ object BaseClickhouseData {
def getVertexSubidDf: DataFrame = { def getVertexSubidDf: DataFrame = {
val where = val where =
s""" s"""
| common_recv_time >= ${timeLimit._2} | common_recv_time >= ${getRadiusTimeRange._2}
| AND common_recv_time < ${timeLimit._1} | AND common_recv_time < ${getRadiusTimeRange._1}
| AND common_subscriber_id != '' | AND common_subscriber_id != ''
| AND radius_framed_ip != '' | AND radius_framed_ip != ''
| AND radius_packet_type = 4 | AND radius_packet_type = 4
@@ -159,8 +164,8 @@ object BaseClickhouseData {
def getVertexFramedIpDf: DataFrame = { def getVertexFramedIpDf: DataFrame = {
val where = val where =
s""" s"""
| common_recv_time >= ${timeLimit._2} | common_recv_time >= ${getRadiusTimeRange._2}
| AND common_recv_time < ${timeLimit._1} | AND common_recv_time < ${getRadiusTimeRange._1}
| AND common_subscriber_id != '' | AND common_subscriber_id != ''
| AND radius_framed_ip != '' | AND radius_framed_ip != ''
| AND radius_packet_type = 4 | AND radius_packet_type = 4
@@ -180,6 +185,17 @@ object BaseClickhouseData {
frame frame
} }
private def getRadiusTimeRange: (Long, Long) = {
val date = DateUtils.getTimeFloor(new Date(System.currentTimeMillis()), "PT1M")
val max = date.getTime / 1000
val min = DateUtils.getSomeMinute(date, ApplicationConfig.READ_RADIUS_GRANULARITY).getTime / 1000
(max, min)
}
def main(args: Array[String]): Unit = {
println(getRadiusTimeRange)
println(getRadiusTimeRange._2 - getRadiusTimeRange._1)
}
private def getTimeLimit: (Long, Long) = { private def getTimeLimit: (Long, Long) = {
var maxTime = 0L var maxTime = 0L

View File

@@ -5,6 +5,6 @@ import cn.ac.iie.service.update.UpdateDocument
object IpLearningApplication { object IpLearningApplication {
def main(args: Array[String]): Unit = { def main(args: Array[String]): Unit = {
UpdateDocument.update() UpdateDocument.ipLearning()
} }
} }

View File

@@ -0,0 +1,11 @@
package cn.ac.iie.main
import cn.ac.iie.service.update.UpdateDocument
object IpRecommendApplication {
def main(args: Array[String]): Unit = {
UpdateDocument.ipRecommend()
}
}

View File

@@ -0,0 +1,11 @@
package cn.ac.iie.main
import cn.ac.iie.service.update.UpdateDocument
object SubscriberRecommendApplication {
def main(args: Array[String]): Unit = {
UpdateDocument.subscriberRecommend()
}
}

View File

@@ -17,20 +17,35 @@ object UpdateDocument {
private val arangoManger: ArangoDBConnect = ArangoDBConnect.getInstance() private val arangoManger: ArangoDBConnect = ArangoDBConnect.getInstance()
private val LOG = LoggerFactory.getLogger(UpdateDocument.getClass) private val LOG = LoggerFactory.getLogger(UpdateDocument.getClass)
def update(): Unit = { def ipLearning(): Unit = {
try { try {
// updateDocument("FQDN", getVertexFqdnRow, mergeVertexFqdn)
updateDocument("SUBSCRIBER", getVertexSubidRow, mergeVertexSubid)
insertFrameIp()
updateDocument("R_LOCATE_SUBSCRIBER2IP", getRelationSubidLocateIpRow, mergeRelationSubidLocateIp)
updateDocument("R_LOCATE_FQDN2IP", getRelationFqdnLocateIpRow, mergeRelationFqdnLocateIp) updateDocument("R_LOCATE_FQDN2IP", getRelationFqdnLocateIpRow, mergeRelationFqdnLocateIp)
} catch {
case e: Exception => e.printStackTrace()
} finally {
arangoManger.clean()
SparkSessionUtil.closeSpark()
System.exit(0)
}
}
def subscriberRecommend(): Unit = {
try {
updateDocument("SUBSCRIBER", getVertexSubidRow, mergeVertexSubid)
insertFrameIp()
updateDocument("R_LOCATE_SUBSCRIBER2IP", getRelationSubidLocateIpRow, mergeRelationSubidLocateIp)
} catch {
case e: Exception => e.printStackTrace()
} finally {
arangoManger.clean()
SparkSessionUtil.closeSpark()
System.exit(0)
}
}
def ipRecommend(): Unit = {
try {
updateDocument("IP", getVertexIpRow, mergeVertexIp) updateDocument("IP", getVertexIpRow, mergeVertexIp)
} catch { } catch {
case e: Exception => e.printStackTrace() case e: Exception => e.printStackTrace()
} finally { } finally {
@@ -57,7 +72,7 @@ object UpdateDocument {
val document: T = getDocumentRow(row) val document: T = getDocumentRow(row)
if (document != null) { if (document != null) {
fqdnAccmu.add(1) fqdnAccmu.add(1)
// println(document)
resultDocumentList.add(document) resultDocumentList.add(document)
} }
i += 1 i += 1
@@ -118,14 +133,7 @@ object UpdateDocument {
case Some(doc) => doc case Some(doc) => doc
case None => null case None => null
} }
val subidLocIpRow = joinRow._2._2 val subidLocIpRow = joinRow._2._2
// val subidLocIpRow = subidLocIpRowOpt match {
// case Some(r) => r
// case None => null
// }
if (subidLocIpRow != null) { if (subidLocIpRow != null) {
val subId = subidLocIpRow.getAs[String]("common_subscriber_id") val subId = subidLocIpRow.getAs[String]("common_subscriber_id")
val ip = subidLocIpRow.getAs[String]("radius_framed_ip") val ip = subidLocIpRow.getAs[String]("radius_framed_ip")
@@ -155,18 +163,12 @@ object UpdateDocument {
case Some(doc) => doc case Some(doc) => doc
case None => null case None => null
} }
val subidRow = joinRow._2._2 val subidRow = joinRow._2._2
// val subidRow = subidRowOpt match {
// case Some(r) => r
// case None => null
// }
if (subidRow != null) { if (subidRow != null) {
val subId = subidRow.getAs[String]("common_subscriber_id") val subId = subidRow.getAs[String]("common_subscriber_id")
val subLastFoundTime = subidRow.getAs[Long]("LAST_FOUND_TIME") val subLastFoundTime = subidRow.getAs[Long]("LAST_FOUND_TIME")
val subFirstFoundTime = subidRow.getAs[Long]("FIRST_FOUND_TIME") val subFirstFoundTime = subidRow.getAs[Long]("FIRST_FOUND_TIME")
if (subidDoc != null) { if (subidDoc != null) {
updateMaxAttribute(subidDoc, subLastFoundTime, "LAST_FOUND_TIME") updateMaxAttribute(subidDoc, subLastFoundTime, "LAST_FOUND_TIME")
} else { } else {
@@ -177,7 +179,6 @@ object UpdateDocument {
subidDoc.addAttribute("LAST_FOUND_TIME", subLastFoundTime) subidDoc.addAttribute("LAST_FOUND_TIME", subLastFoundTime)
} }
} }
subidDoc subidDoc
} }
@@ -187,18 +188,12 @@ object UpdateDocument {
case Some(doc) => doc case Some(doc) => doc
case None => null case None => null
} }
val fqdnRow: Row = joinRow._2._2 val fqdnRow: Row = joinRow._2._2
// val fqdnRow = fqdnRowOpt match {
// case Some(r) => r
// case None => null
// }
if (fqdnRow != null) { if (fqdnRow != null) {
val fqdn = fqdnRow.getAs[String]("FQDN") val fqdn = fqdnRow.getAs[String]("FQDN")
val lastFoundTime = fqdnRow.getAs[Long]("LAST_FOUND_TIME") val lastFoundTime = fqdnRow.getAs[Long]("LAST_FOUND_TIME")
val firstFoundTime = fqdnRow.getAs[Long]("FIRST_FOUND_TIME") val firstFoundTime = fqdnRow.getAs[Long]("FIRST_FOUND_TIME")
if (fqdnDoc != null) { if (fqdnDoc != null) {
updateMaxAttribute(fqdnDoc, lastFoundTime, "LAST_FOUND_TIME") updateMaxAttribute(fqdnDoc, lastFoundTime, "LAST_FOUND_TIME")
} else { } else {
@@ -209,7 +204,6 @@ object UpdateDocument {
fqdnDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime) fqdnDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime)
} }
} }
fqdnDoc fqdnDoc
} }
@@ -219,14 +213,7 @@ object UpdateDocument {
case Some(doc) => doc case Some(doc) => doc
case None => null case None => null
} }
val ipRow = joinRow._2._2 val ipRow = joinRow._2._2
// val ipRow = ipRowOpt match {
// case Some(r) => r
// case None => null
// }
if (ipRow != null) { if (ipRow != null) {
val ip = ipRow.getAs[String]("IP") val ip = ipRow.getAs[String]("IP")
val firstFoundTime = ipRow.getAs[Long]("FIRST_FOUND_TIME") val firstFoundTime = ipRow.getAs[Long]("FIRST_FOUND_TIME")
@@ -257,7 +244,6 @@ object UpdateDocument {
ipDoc.addAttribute("COMMON_LINK_INFO", "") ipDoc.addAttribute("COMMON_LINK_INFO", "")
} }
} }
ipDoc ipDoc
} }
@@ -268,18 +254,10 @@ object UpdateDocument {
case Some(doc) => doc case Some(doc) => doc
case None => null case None => null
} }
val fqdnLocIpRow = joinRow._2._2 val fqdnLocIpRow = joinRow._2._2
// val fqdnLocIpRow = fqdnLocIpRowOpt match {
// case Some(r) => r
// case None => null
// }
if (fqdnLocIpDoc != null) { if (fqdnLocIpDoc != null) {
updateProtocolDocument(fqdnLocIpDoc) updateProtocolDocument(fqdnLocIpDoc)
} }
if (fqdnLocIpRow != null) { if (fqdnLocIpRow != null) {
val fqdn = fqdnLocIpRow.getAs[String]("FQDN") val fqdn = fqdnLocIpRow.getAs[String]("FQDN")
val serverIp = fqdnLocIpRow.getAs[String]("common_server_ip") val serverIp = fqdnLocIpRow.getAs[String]("common_server_ip")
@@ -291,9 +269,7 @@ object UpdateDocument {
val sepAttritubeMap: Map[String, Long] = separateAttributeByProtocol(schemaTypeList, countTotalList) val sepAttritubeMap: Map[String, Long] = separateAttributeByProtocol(schemaTypeList, countTotalList)
val distinctIp: Array[String] = mergeDistinctIp(distCipRecent) val distinctIp: Array[String] = mergeDistinctIp(distCipRecent)
val key = fqdn.concat("-" + serverIp) val key = fqdn.concat("-" + serverIp)
if (fqdnLocIpDoc != null) { if (fqdnLocIpDoc != null) {
updateMaxAttribute(fqdnLocIpDoc, lastFoundTime, "LAST_FOUND_TIME") updateMaxAttribute(fqdnLocIpDoc, lastFoundTime, "LAST_FOUND_TIME")
updateProtocolAttritube(fqdnLocIpDoc, sepAttritubeMap) updateProtocolAttritube(fqdnLocIpDoc, sepAttritubeMap)
@@ -309,7 +285,6 @@ object UpdateDocument {
putDistinctIp(fqdnLocIpDoc, distinctIp) putDistinctIp(fqdnLocIpDoc, distinctIp)
} }
} }
fqdnLocIpDoc fqdnLocIpDoc
} }

View File

@@ -21,6 +21,8 @@ object SparkSessionUtil {
.config("spark.network.timeout", ApplicationConfig.SPARK_NETWORK_TIMEOUT) .config("spark.network.timeout", ApplicationConfig.SPARK_NETWORK_TIMEOUT)
.config("spark.sql.shuffle.partitions", ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS) .config("spark.sql.shuffle.partitions", ApplicationConfig.SPARK_SQL_SHUFFLE_PARTITIONS)
.config("spark.executor.memory", ApplicationConfig.SPARK_EXECUTOR_MEMORY) .config("spark.executor.memory", ApplicationConfig.SPARK_EXECUTOR_MEMORY)
.config("spark.executor.cores",ApplicationConfig.SPARK_EXECUTOR_CORES)
.config("spark.cores.max",ApplicationConfig.SPARK_CORES_MAX)
.config("arangodb.hosts", s"${ApplicationConfig.ARANGODB_HOST}:${ApplicationConfig.ARANGODB_PORT}") .config("arangodb.hosts", s"${ApplicationConfig.ARANGODB_HOST}:${ApplicationConfig.ARANGODB_PORT}")
.config("arangodb.user", ApplicationConfig.ARANGODB_USER) .config("arangodb.user", ApplicationConfig.ARANGODB_USER)
.config("arangodb.password", ApplicationConfig.ARANGODB_PASSWORD) .config("arangodb.password", ApplicationConfig.ARANGODB_PASSWORD)