优化代码及性能
This commit is contained in:
@@ -2,7 +2,7 @@ nacos:
|
|||||||
config:
|
config:
|
||||||
type: yaml
|
type: yaml
|
||||||
server-addr: 192.168.44.12:8848
|
server-addr: 192.168.44.12:8848
|
||||||
namespace: dev
|
namespace: test
|
||||||
data-id: p19-file-sync-service
|
data-id: p19-file-sync-service
|
||||||
auto-refresh: true
|
auto-refresh: true
|
||||||
group: Galaxy
|
group: Galaxy
|
||||||
|
|||||||
6
pom.xml
6
pom.xml
@@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
<groupId>com.zdjizhi</groupId>
|
<groupId>com.zdjizhi</groupId>
|
||||||
<artifactId>p19-file-sync-service</artifactId>
|
<artifactId>p19-file-sync-service</artifactId>
|
||||||
<version>23.03.09</version>
|
<version>23.09.26</version>
|
||||||
<name>p19-file-sync-service</name>
|
<name>p19-file-sync-service</name>
|
||||||
|
|
||||||
<parent>
|
<parent>
|
||||||
@@ -104,13 +104,11 @@
|
|||||||
<artifactId>hutool-all</artifactId>
|
<artifactId>hutool-all</artifactId>
|
||||||
<version>5.5.7</version>
|
<version>5.5.7</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-starter-test</artifactId>
|
<artifactId>spring-boot-starter-test</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.httpcomponents</groupId>
|
<groupId>org.apache.httpcomponents</groupId>
|
||||||
<artifactId>httpclient</artifactId>
|
<artifactId>httpclient</artifactId>
|
||||||
@@ -136,13 +134,11 @@
|
|||||||
<artifactId>commons-io</artifactId>
|
<artifactId>commons-io</artifactId>
|
||||||
<version>2.4</version>
|
<version>2.4</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.alibaba.boot</groupId>
|
<groupId>com.alibaba.boot</groupId>
|
||||||
<artifactId>nacos-config-spring-boot-starter</artifactId>
|
<artifactId>nacos-config-spring-boot-starter</artifactId>
|
||||||
<version>${nacos.config.version}</version>
|
<version>${nacos.config.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||||
|
|||||||
@@ -10,12 +10,13 @@ import org.apache.http.NoHttpResponseException;
|
|||||||
import org.apache.http.client.HttpRequestRetryHandler;
|
import org.apache.http.client.HttpRequestRetryHandler;
|
||||||
import org.apache.http.client.config.RequestConfig;
|
import org.apache.http.client.config.RequestConfig;
|
||||||
import org.apache.http.client.protocol.HttpClientContext;
|
import org.apache.http.client.protocol.HttpClientContext;
|
||||||
|
import org.apache.http.conn.ssl.NoopHostnameVerifier;
|
||||||
|
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
|
||||||
import org.apache.http.impl.client.CloseableHttpClient;
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
import org.apache.http.impl.client.HttpClientBuilder;
|
import org.apache.http.impl.client.HttpClientBuilder;
|
||||||
import org.apache.http.impl.client.HttpClients;
|
|
||||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||||
import org.apache.http.protocol.HttpContext;
|
import org.apache.http.protocol.HttpContext;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.apache.http.ssl.SSLContextBuilder;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
@@ -77,109 +78,71 @@ public class HttpClientPool {
|
|||||||
this.retryNum = retryNum;
|
this.retryNum = retryNum;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
@Bean(name = "httpClient")
|
||||||
* 首先实例化一个连接池管理器,设置最大连接数、并发连接数
|
public CloseableHttpClient getCloseableHttpClient(){
|
||||||
*
|
CloseableHttpClient httpClient = null;
|
||||||
* @return
|
try {
|
||||||
*/
|
HttpRequestRetryHandler httpRetryHandler = new HttpRequestRetryHandler() {
|
||||||
@Bean(name = "httpClientConnectionManager")
|
@Override
|
||||||
public PoolingHttpClientConnectionManager getHttpClientConnectionManager() {
|
public boolean retryRequest(IOException exception, int executionCount, HttpContext context) {
|
||||||
PoolingHttpClientConnectionManager httpClientConnectionManager = new PoolingHttpClientConnectionManager();
|
if (executionCount >= retryNum) {// 如果已经重试了3次,就放弃
|
||||||
//最大连接数
|
log.error("已完成重试次数");
|
||||||
httpClientConnectionManager.setMaxTotal(maxTotal);
|
return false;
|
||||||
//并发数
|
}
|
||||||
httpClientConnectionManager.setDefaultMaxPerRoute(defaultMaxPerRoute);
|
if (exception instanceof NoHttpResponseException) {// 如果服务器丢掉了连接,那么就重试
|
||||||
return httpClientConnectionManager;
|
return true;
|
||||||
}
|
}
|
||||||
|
if (exception instanceof SSLHandshakeException) {// 不要重试SSL握手异常
|
||||||
/**
|
return false;
|
||||||
* 实例化连接池,设置连接池管理器。
|
}
|
||||||
* 这里需要以参数形式注入上面实例化的连接池管理器
|
if (exception instanceof ConnectException) {// 连接被拒绝
|
||||||
*
|
return false;
|
||||||
* @param httpClientConnectionManager
|
}
|
||||||
* @return
|
if (exception instanceof InterruptedIOException) {// 超时
|
||||||
*/
|
return true;
|
||||||
@Bean(name = "httpClientBuilder")
|
}
|
||||||
public HttpClientBuilder getHttpClientBuilder(@Qualifier("httpClientConnectionManager") PoolingHttpClientConnectionManager httpClientConnectionManager) {
|
if (exception instanceof UnknownHostException) {// 目标服务器不可达
|
||||||
//HttpClientBuilder中的构造方法被protected修饰,所以这里不能直接使用new来实例化一个HttpClientBuilder,可以使用HttpClientBuilder提供的静态方法create()来获取HttpClientBuilder对象
|
return false;
|
||||||
HttpClientBuilder httpClientBuilder = HttpClientBuilder.create();
|
}
|
||||||
httpClientBuilder.setConnectionManager(httpClientConnectionManager);
|
if (exception instanceof SSLException) {// ssl握手异常
|
||||||
return httpClientBuilder;
|
return false;
|
||||||
}
|
}
|
||||||
|
HttpClientContext clientContext = HttpClientContext.adapt(context);
|
||||||
/**
|
HttpRequest request = clientContext.getRequest();
|
||||||
* 注入连接池,用于获取httpClient
|
// 如果请求是幂等的,就再次尝试
|
||||||
*
|
if (!(request instanceof HttpEntityEnclosingRequest)) {
|
||||||
* @param httpClientBuilder
|
return true;
|
||||||
* @return
|
}
|
||||||
*/
|
return false;
|
||||||
@Bean
|
}
|
||||||
public CloseableHttpClient getCloseableHttpClient(@Qualifier("httpClientBuilder") HttpClientBuilder httpClientBuilder,@Qualifier("httpRetryHandler") HttpRequestRetryHandler httpRetryHandler){
|
};
|
||||||
return httpClientBuilder
|
PoolingHttpClientConnectionManager httpClientConnectionManager = new PoolingHttpClientConnectionManager();
|
||||||
.setRetryHandler(httpRetryHandler)
|
httpClientConnectionManager.setMaxTotal(maxTotal);//最大连接数
|
||||||
.build();
|
httpClientConnectionManager.setDefaultMaxPerRoute(defaultMaxPerRoute);//并发数
|
||||||
}
|
SSLContext sslContext = SSLContextBuilder.create()
|
||||||
|
.loadTrustMaterial(new TrustSelfSignedStrategy())
|
||||||
/**
|
.build();
|
||||||
* Builder是RequestConfig的一个内部类
|
httpClient = HttpClientBuilder
|
||||||
* 通过RequestConfig的custom方法来获取到一个Builder对象
|
.create()
|
||||||
* 设置builder的连接信息
|
.setConnectionManager(httpClientConnectionManager)
|
||||||
* 这里还可以设置proxy,cookieSpec等属性。有需要的话可以在此设置
|
.setRetryHandler(httpRetryHandler)
|
||||||
*
|
.setSslcontext(sslContext)
|
||||||
* @return
|
.setSSLHostnameVerifier(new NoopHostnameVerifier())
|
||||||
*/
|
.build();
|
||||||
@Bean(name = "builder")
|
} catch (Exception e) {
|
||||||
public RequestConfig.Builder getBuilder() {
|
log.error("create httpClient error.", e);
|
||||||
RequestConfig.Builder builder = RequestConfig.custom();
|
}
|
||||||
return builder.setConnectTimeout(connectTimeout)
|
return httpClient;
|
||||||
.setConnectionRequestTimeout(connectionRequestTimeout)
|
|
||||||
.setSocketTimeout(socketTimeout)
|
|
||||||
.setStaleConnectionCheckEnabled(staleConnectionCheckEnabled);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 使用builder构建一个RequestConfig对象
|
* 使用builder构建一个RequestConfig对象
|
||||||
*/
|
*/
|
||||||
@Bean(name = "requestConfig")
|
@Bean(name = "requestConfig")
|
||||||
public RequestConfig getRequestConfig(@Qualifier("builder") RequestConfig.Builder builder) {
|
public RequestConfig getRequestConfig() {
|
||||||
return builder.build();
|
return RequestConfig.custom().setConnectTimeout(connectTimeout)
|
||||||
}
|
.setConnectionRequestTimeout(connectionRequestTimeout)
|
||||||
|
.setSocketTimeout(socketTimeout)
|
||||||
@Bean(name = "httpRetryHandler")
|
.setStaleConnectionCheckEnabled(staleConnectionCheckEnabled).build();
|
||||||
public HttpRequestRetryHandler getHttpRetryHandler() {
|
|
||||||
return new HttpRequestRetryHandler() {
|
|
||||||
@Override
|
|
||||||
public boolean retryRequest(IOException exception, int executionCount, HttpContext context) {
|
|
||||||
if (executionCount >= retryNum) {// 如果已经重试了3次,就放弃
|
|
||||||
log.error("已完成重试次数");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (exception instanceof NoHttpResponseException) {// 如果服务器丢掉了连接,那么就重试
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (exception instanceof SSLHandshakeException) {// 不要重试SSL握手异常
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (exception instanceof ConnectException) {// 连接被拒绝
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (exception instanceof InterruptedIOException) {// 超时
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (exception instanceof UnknownHostException) {// 目标服务器不可达
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (exception instanceof SSLException) {// ssl握手异常
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
HttpClientContext clientContext = HttpClientContext.adapt(context);
|
|
||||||
HttpRequest request = clientContext.getRequest();
|
|
||||||
// 如果请求是幂等的,就再次尝试
|
|
||||||
if (!(request instanceof HttpEntityEnclosingRequest)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration;
|
|||||||
import org.springframework.kafka.annotation.EnableKafka;
|
import org.springframework.kafka.annotation.EnableKafka;
|
||||||
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
|
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
|
||||||
import org.springframework.kafka.config.KafkaListenerContainerFactory;
|
import org.springframework.kafka.config.KafkaListenerContainerFactory;
|
||||||
import org.springframework.kafka.core.ConsumerFactory;
|
|
||||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||||
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
|
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
|
||||||
import org.springframework.kafka.listener.ContainerProperties;
|
import org.springframework.kafka.listener.ContainerProperties;
|
||||||
@@ -22,7 +21,7 @@ import java.util.Map;
|
|||||||
//@ConfigurationProperties(prefix = "kafka.consumer")
|
//@ConfigurationProperties(prefix = "kafka.consumer")
|
||||||
@NacosConfigurationProperties(prefix = "kafka.consumer", dataId = "${nacos.config.data-id}", groupId = "${nacos.config.group}", type = ConfigType.YAML, autoRefreshed = true)
|
@NacosConfigurationProperties(prefix = "kafka.consumer", dataId = "${nacos.config.data-id}", groupId = "${nacos.config.group}", type = ConfigType.YAML, autoRefreshed = true)
|
||||||
@EnableKafka
|
@EnableKafka
|
||||||
public class KafkaConsumerConfig {
|
public class KafkaConsumer {
|
||||||
|
|
||||||
private String servers;
|
private String servers;
|
||||||
private boolean enable_auto_commit;
|
private boolean enable_auto_commit;
|
||||||
@@ -88,7 +87,7 @@ public class KafkaConsumerConfig {
|
|||||||
@Bean
|
@Bean
|
||||||
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
|
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
|
||||||
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
|
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
|
||||||
factory.setConsumerFactory(consumerFactory());
|
factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(consumerConfigs()));
|
||||||
factory.setConcurrency(concurrency);
|
factory.setConcurrency(concurrency);
|
||||||
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
|
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
|
||||||
factory.setBatchListener(batch_listener);//设置为批量消费
|
factory.setBatchListener(batch_listener);//设置为批量消费
|
||||||
@@ -96,11 +95,6 @@ public class KafkaConsumerConfig {
|
|||||||
return factory;
|
return factory;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ConsumerFactory<String, String> consumerFactory() {
|
|
||||||
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private Map<String, Object> consumerConfigs() {
|
private Map<String, Object> consumerConfigs() {
|
||||||
Map<String, Object> propsMap = new HashMap<>();
|
Map<String, Object> propsMap = new HashMap<>();
|
||||||
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
|
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
|
||||||
@@ -112,9 +106,11 @@ public class KafkaConsumerConfig {
|
|||||||
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||||
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, group_id);
|
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, group_id);
|
||||||
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, auto_offset_reset);
|
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, auto_offset_reset);
|
||||||
propsMap.put("security.protocol", "SASL_PLAINTEXT");
|
if(servers.contains("9094")){
|
||||||
propsMap.put("sasl.mechanism", "PLAIN");
|
propsMap.put("security.protocol", "SASL_PLAINTEXT");
|
||||||
propsMap.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username="+sasl_username+" password="+sasl_password+";");
|
propsMap.put("sasl.mechanism", "PLAIN");
|
||||||
|
propsMap.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username="+sasl_username+" password="+sasl_password+";");
|
||||||
|
}
|
||||||
return propsMap;
|
return propsMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,5 +118,4 @@ public class KafkaConsumerConfig {
|
|||||||
public KafkaConsumerListener listener() {
|
public KafkaConsumerListener listener() {
|
||||||
return new KafkaConsumerListener();
|
return new KafkaConsumerListener();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -6,8 +6,7 @@ import com.alibaba.nacos.api.config.annotation.NacosConfigurationProperties;
|
|||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.*;
|
||||||
import java.util.concurrent.Executors;
|
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
//@ConfigurationProperties(prefix = "thread")
|
//@ConfigurationProperties(prefix = "thread")
|
||||||
@@ -25,7 +24,9 @@ public class ThreadPoolFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "threadPool")
|
@Bean(name = "threadPool")
|
||||||
public ExecutorService getThreadPool() {
|
public ThreadPoolExecutor getThreadPool() {
|
||||||
return Executors.newFixedThreadPool(maxSize);
|
return new ThreadPoolExecutor(
|
||||||
|
maxSize, maxSize,
|
||||||
|
0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(maxSize*2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import com.zdjizhi.syncfile.core.SyncFiles;
|
|||||||
import com.zdjizhi.syncfile.entity.Source;
|
import com.zdjizhi.syncfile.entity.Source;
|
||||||
import com.zdjizhi.syncfile.entity.SysFileSync;
|
import com.zdjizhi.syncfile.entity.SysFileSync;
|
||||||
import com.zdjizhi.syncfile.monitor.MonitorProperties;
|
import com.zdjizhi.syncfile.monitor.MonitorProperties;
|
||||||
|
import com.zdjizhi.syncfile.utils.HttpUtil;
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.kafka.annotation.KafkaListener;
|
import org.springframework.kafka.annotation.KafkaListener;
|
||||||
@@ -17,6 +18,7 @@ import org.springframework.stereotype.Component;
|
|||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.concurrent.ThreadPoolExecutor;
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
public class KafkaConsumerListener {
|
public class KafkaConsumerListener {
|
||||||
@@ -26,6 +28,10 @@ public class KafkaConsumerListener {
|
|||||||
SyncFiles syncFiles;
|
SyncFiles syncFiles;
|
||||||
@Autowired
|
@Autowired
|
||||||
MonitorProperties monitorProperties;
|
MonitorProperties monitorProperties;
|
||||||
|
@Autowired
|
||||||
|
HttpUtil httpUtil;
|
||||||
|
@Autowired
|
||||||
|
ThreadPoolExecutor threadPool;
|
||||||
|
|
||||||
@KafkaListener(topics = {"${kafka.consumer.topic}"}, containerFactory = "kafkaListenerContainerFactory")
|
@KafkaListener(topics = {"${kafka.consumer.topic}"}, containerFactory = "kafkaListenerContainerFactory")
|
||||||
public void listen(List<ConsumerRecord<?, ?>> records, Acknowledgment ack) {
|
public void listen(List<ConsumerRecord<?, ?>> records, Acknowledgment ack) {
|
||||||
@@ -36,19 +42,26 @@ public class KafkaConsumerListener {
|
|||||||
JSONObject jsonObj = (JSONObject) JSON.parse(record.value().toString());
|
JSONObject jsonObj = (JSONObject) JSON.parse(record.value().toString());
|
||||||
SysFileSync sysFileSync = JSON.toJavaObject(jsonObj, SysFileSync.class);
|
SysFileSync sysFileSync = JSON.toJavaObject(jsonObj, SysFileSync.class);
|
||||||
if (sysFileSync != null) {
|
if (sysFileSync != null) {
|
||||||
List<Source> sourceList = sysFileSync.getSourceList();
|
List<Source> sourceList = sysFileSync.getSource_list();
|
||||||
if(sourceList.size() < 1){
|
if (sourceList.size() < 1) {
|
||||||
log.error("kafka data error, sourceList is null. kafka data: "+record.value().toString());
|
log.error("kafka data error, sourceList is null. kafka data: " + record.value().toString());
|
||||||
monitorProperties.addFileSyncError();
|
monitorProperties.addFileSyncError();
|
||||||
}else {
|
} else {
|
||||||
fileList.add(sourceList);
|
fileList.add(sourceList);
|
||||||
}
|
}
|
||||||
}else {
|
} else {
|
||||||
log.error("parse kafka data error. kafka data: "+record.value().toString());
|
log.error("parse kafka data error. kafka data: " + record.value().toString());
|
||||||
monitorProperties.addFileSyncError();
|
monitorProperties.addFileSyncError();
|
||||||
}
|
}
|
||||||
|
monitorProperties.addKafkaRecordCount();
|
||||||
}
|
}
|
||||||
syncFiles.syncFiles(fileList);
|
while (true) {
|
||||||
|
if (threadPool.getActiveCount() < threadPool.getMaximumPoolSize()) {
|
||||||
|
threadPool.submit(() -> syncFiles.syncFiles(fileList));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ack.acknowledge();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("consume kafka data error.", e);
|
log.error("consume kafka data error.", e);
|
||||||
monitorProperties.addFileSyncError();
|
monitorProperties.addFileSyncError();
|
||||||
|
|||||||
@@ -2,17 +2,13 @@ package com.zdjizhi.syncfile.core;
|
|||||||
|
|
||||||
import cn.hutool.log.Log;
|
import cn.hutool.log.Log;
|
||||||
import cn.hutool.log.LogFactory;
|
import cn.hutool.log.LogFactory;
|
||||||
import com.zdjizhi.syncfile.config.ThreadPoolFactory;
|
|
||||||
import com.zdjizhi.syncfile.entity.Source;
|
import com.zdjizhi.syncfile.entity.Source;
|
||||||
import com.zdjizhi.syncfile.monitor.MonitorProperties;
|
import com.zdjizhi.syncfile.monitor.MonitorProperties;
|
||||||
import com.zdjizhi.syncfile.utils.HttpUtil;
|
import com.zdjizhi.syncfile.utils.HttpUtil;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
public class SyncFiles {
|
public class SyncFiles {
|
||||||
@@ -21,62 +17,33 @@ public class SyncFiles {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private HttpUtil httpUtil;
|
private HttpUtil httpUtil;
|
||||||
@Autowired
|
@Autowired
|
||||||
private ExecutorService threadPool;
|
|
||||||
@Autowired
|
|
||||||
private ThreadPoolFactory threadPoolFactory;
|
|
||||||
@Autowired
|
|
||||||
MonitorProperties monitorProperties;
|
MonitorProperties monitorProperties;
|
||||||
|
|
||||||
public void syncFiles(List<List<Source>> fileList) {
|
public void syncFiles(List<List<Source>> fileList) {
|
||||||
List<Callable<Boolean>> callableList = new ArrayList<>();
|
|
||||||
try {
|
try {
|
||||||
for (List<Source> sourceList : fileList) {
|
for (List<Source> sourceList : fileList) {
|
||||||
callableList.add(() -> {
|
for (Source source : sourceList) {
|
||||||
boolean status = false;
|
String source_oss_path = source.getSource_oss_path();
|
||||||
try {
|
String destination_oss_path = source.getDestination_oss_path();
|
||||||
for (Source source : sourceList) {
|
if (source_oss_path != null && !"".equals(source_oss_path) && destination_oss_path != null && !"".equals(destination_oss_path)) {
|
||||||
String source_oss_path = source.getSource_oss_path();
|
byte[] file = httpUtil.httpGetFile(source_oss_path);
|
||||||
String destination_oss_path = source.getDestination_oss_path();
|
if (file != null) {
|
||||||
if (source_oss_path != null && !"".equals(source_oss_path)
|
boolean isSuccess = httpUtil.httpPostFile(destination_oss_path, file);
|
||||||
&& destination_oss_path != null && !"".equals(destination_oss_path)) {
|
if (!isSuccess) {
|
||||||
byte[] file = httpUtil.httpGetFile(source_oss_path);
|
log.error("Sync file failed, post oss file error. destination_oss_path: {}", destination_oss_path);
|
||||||
if (file != null) {
|
monitorProperties.addPostFileErrorCount();
|
||||||
boolean isSuccess = httpUtil.httpPostFile(destination_oss_path, file);
|
|
||||||
if (!isSuccess) {
|
|
||||||
log.error("Sync file failed, post oss file error. destination_oss_path: {}", destination_oss_path);
|
|
||||||
monitorProperties.addPostFileErrorCount();
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
status = true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log.error("Sync file failed, get hos file error. source_oss_path: {}", source_oss_path);
|
|
||||||
monitorProperties.addDownloadFileErrorCount();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log.error("Sync file failed, source_oss_path or destination_oss_path is incorrect. source_oss_path: {} ,destination_oss_path: {}", source_oss_path, destination_oss_path);
|
|
||||||
monitorProperties.addFileSyncError();
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
log.error("Sync file failed, get hos file error. source_oss_path: {}", source_oss_path);
|
||||||
|
monitorProperties.addDownloadFileErrorCount();
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} else {
|
||||||
log.error("Sync file failed.", e);
|
log.error("Sync file failed, source_oss_path or destination_oss_path is incorrect. source_oss_path: {} ,destination_oss_path: {}", source_oss_path, destination_oss_path);
|
||||||
monitorProperties.addFileSyncError();
|
monitorProperties.addFileSyncError();
|
||||||
status = false;
|
|
||||||
}
|
}
|
||||||
return status;
|
|
||||||
});
|
|
||||||
if (callableList.size() == threadPoolFactory.getMaxSize()) {
|
|
||||||
threadPool.invokeAll(callableList);
|
|
||||||
callableList.clear();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (callableList.size() > 0) {
|
} catch (Exception e) {
|
||||||
threadPool.invokeAll(callableList);
|
|
||||||
callableList.clear();
|
|
||||||
}
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
log.error("Sync files failed.", e);
|
log.error("Sync files failed.", e);
|
||||||
monitorProperties.addFileSyncError();
|
monitorProperties.addFileSyncError();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,11 @@ public class Source {
|
|||||||
private String source_oss_path;
|
private String source_oss_path;
|
||||||
private String destination_oss_path;
|
private String destination_oss_path;
|
||||||
|
|
||||||
|
public Source(String source_oss_path, String destination_oss_path) {
|
||||||
|
this.source_oss_path = source_oss_path;
|
||||||
|
this.destination_oss_path = destination_oss_path;
|
||||||
|
}
|
||||||
|
|
||||||
public String getSource_oss_path() {
|
public String getSource_oss_path() {
|
||||||
return source_oss_path;
|
return source_oss_path;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,18 +3,18 @@ package com.zdjizhi.syncfile.entity;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class SysFileSync {
|
public class SysFileSync {
|
||||||
private List<Source> sourceList;
|
private List<Source> source_list;
|
||||||
private long common_log_id;
|
private long common_log_id;
|
||||||
private long common_recv_time;
|
private long common_recv_time;
|
||||||
private String common_schema_type;
|
private String common_schema_type;
|
||||||
private long processing_time;
|
private long processing_time;
|
||||||
|
|
||||||
public List<Source> getSourceList() {
|
public List<Source> getSource_list() {
|
||||||
return sourceList;
|
return source_list;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSourceList(List<Source> sourceList) {
|
public void setSource_list(List<Source> source_list) {
|
||||||
this.sourceList = sourceList;
|
this.source_list = source_list;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getCommon_log_id() {
|
public long getCommon_log_id() {
|
||||||
|
|||||||
@@ -57,12 +57,15 @@ public class LogChartMetricsFilter implements Filter {
|
|||||||
Long hosError = monitorProperties.getHosError();
|
Long hosError = monitorProperties.getHosError();
|
||||||
Long ossError = monitorProperties.getOssError();
|
Long ossError = monitorProperties.getOssError();
|
||||||
|
|
||||||
|
Long kafkaRecordCount = monitorProperties.getKafkaRecordCount();
|
||||||
|
|
||||||
dashboardMap.put("downloadFileSuccessCount", downloadFileSuccessCount);
|
dashboardMap.put("downloadFileSuccessCount", downloadFileSuccessCount);
|
||||||
dashboardMap.put("downloadFileErrorCount",downloadFileErrorCount);
|
dashboardMap.put("downloadFileErrorCount",downloadFileErrorCount);
|
||||||
dashboardMap.put("postFileSuccessCount", postFileSuccessCount);
|
dashboardMap.put("postFileSuccessCount", postFileSuccessCount);
|
||||||
dashboardMap.put("postFileErrorCount", postFileErrorCount);
|
dashboardMap.put("postFileErrorCount", postFileErrorCount);
|
||||||
dashboardMap.put("downloadFileSize", downloadFileSize);
|
dashboardMap.put("downloadFileSize", downloadFileSize);
|
||||||
dashboardMap.put("postFileSize", postFileSize);
|
dashboardMap.put("postFileSize", postFileSize);
|
||||||
|
dashboardMap.put("kafkaRecordCount",kafkaRecordCount);
|
||||||
|
|
||||||
errorTypeMap.put("fileSyncError",fileSyncError);
|
errorTypeMap.put("fileSyncError",fileSyncError);
|
||||||
errorTypeMap.put("hosError",hosError);
|
errorTypeMap.put("hosError",hosError);
|
||||||
@@ -91,5 +94,4 @@ public class LogChartMetricsFilter implements Filter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -4,6 +4,8 @@ import org.springframework.context.annotation.Configuration;
|
|||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
public class MonitorProperties {
|
public class MonitorProperties {
|
||||||
|
private static Long kafkaRecordCount = 0L;
|
||||||
|
|
||||||
private static Long downloadFileSuccessCount = 0L;
|
private static Long downloadFileSuccessCount = 0L;
|
||||||
private static Long downloadFileErrorCount = 0L;
|
private static Long downloadFileErrorCount = 0L;
|
||||||
private static Long postFileSuccessCount = 0L;
|
private static Long postFileSuccessCount = 0L;
|
||||||
@@ -15,6 +17,14 @@ public class MonitorProperties {
|
|||||||
private static Long hosError = 0L;
|
private static Long hosError = 0L;
|
||||||
private static Long ossError = 0L;
|
private static Long ossError = 0L;
|
||||||
|
|
||||||
|
public Long getKafkaRecordCount() {
|
||||||
|
return kafkaRecordCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addKafkaRecordCount() {
|
||||||
|
kafkaRecordCount = kafkaRecordCount + 1;
|
||||||
|
}
|
||||||
|
|
||||||
public void addDownloadFileSuccessCount() {
|
public void addDownloadFileSuccessCount() {
|
||||||
downloadFileSuccessCount = downloadFileSuccessCount + 1;
|
downloadFileSuccessCount = downloadFileSuccessCount + 1;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ public class HttpUtil {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private RequestConfig requestConfig;
|
private RequestConfig requestConfig;
|
||||||
@Autowired
|
@Autowired
|
||||||
MonitorProperties monitorProperties;
|
private MonitorProperties monitorProperties;
|
||||||
|
|
||||||
public byte[] httpGetFile(String url) {
|
public byte[] httpGetFile(String url) {
|
||||||
byte[] data = null;
|
byte[] data = null;
|
||||||
|
|||||||
Reference in New Issue
Block a user