1.修改http2无法发送策略日志
2.修改命中hjack无法处理
This commit is contained in:
@@ -31,7 +31,7 @@ struct pangu_logger
|
||||
unsigned int en_sendlog;
|
||||
unsigned int en_sendlog_meta;
|
||||
unsigned int en_sendlog_body;
|
||||
|
||||
|
||||
unsigned int local_ip_nr;
|
||||
void* global_logger;
|
||||
rd_kafka_t *kafka_handle;
|
||||
@@ -56,7 +56,7 @@ static unsigned int get_ip_by_eth_name(const char *ifname)
|
||||
unsigned int ip;
|
||||
|
||||
sockfd = socket(AF_INET, SOCK_DGRAM, 0);
|
||||
if (-1 == sockfd)
|
||||
if (-1 == sockfd)
|
||||
{
|
||||
goto error;
|
||||
}
|
||||
@@ -81,7 +81,7 @@ error:
|
||||
static rd_kafka_t * create_kafka_handle(const char* brokerlist)
|
||||
{
|
||||
char kafka_errstr[1024];
|
||||
rd_kafka_t *handle=NULL;
|
||||
rd_kafka_t *handle=NULL;
|
||||
rd_kafka_conf_t *rdkafka_conf = NULL;
|
||||
|
||||
rdkafka_conf = rd_kafka_conf_new();
|
||||
@@ -133,7 +133,7 @@ struct pangu_logger* pangu_log_handle_create(const char* profile, const char* s
|
||||
{
|
||||
return instance;
|
||||
}
|
||||
|
||||
|
||||
MESA_load_profile_string_def(profile, section, "NIC_NAME",nic_name,sizeof(nic_name),"eth0");
|
||||
instance->local_ip_nr=get_ip_by_eth_name(nic_name);
|
||||
if(instance->local_ip_nr==INADDR_NONE)
|
||||
@@ -143,7 +143,7 @@ struct pangu_logger* pangu_log_handle_create(const char* profile, const char* s
|
||||
}
|
||||
|
||||
inet_ntop(AF_INET,&(instance->local_ip_nr),instance->local_ip_str,sizeof(instance->local_ip_str));
|
||||
|
||||
|
||||
MESA_load_profile_int_def(profile, section, "ENTRANCE_ID",&(instance->entry_id),0);
|
||||
ret=MESA_load_profile_string_def(profile, section,"KAFKA_BROKERLIST", instance->brokerlist, sizeof(instance->brokerlist), NULL);
|
||||
if(ret<0)
|
||||
@@ -155,17 +155,21 @@ struct pangu_logger* pangu_log_handle_create(const char* profile, const char* s
|
||||
instance->kafka_handle=create_kafka_handle(instance->brokerlist);
|
||||
if(instance->kafka_handle==NULL)
|
||||
{
|
||||
TFE_LOG_ERROR(local_logger,"Pangu log init failed. Cannot create lafka handle with brokerlist: %s.", instance->brokerlist);
|
||||
TFE_LOG_ERROR(local_logger,"Pangu log init failed. Cannot create lafka handle with brokerlist: %s.", instance->brokerlist);
|
||||
goto error_out;
|
||||
}
|
||||
}
|
||||
|
||||
MESA_load_profile_string_def(profile, section,"KAFKA_TOPIC", instance->topic_name, sizeof(instance->topic_name), "POLICY-EVENT-LOG");
|
||||
|
||||
TFE_LOG_INFO(local_logger, "Pangu kafka brokerlist : %s", instance->brokerlist);
|
||||
TFE_LOG_INFO(local_logger, "Pangu kafka topic : %s", instance->topic_name);
|
||||
|
||||
instance->kafka_topic = rd_kafka_topic_new(instance->kafka_handle,instance->topic_name, NULL);
|
||||
log_file_upload_para=cache_evbase_parameter_new(profile, section, local_logger);
|
||||
instance->log_file_upload_instance=cache_evbase_instance_new(log_file_upload_para, local_logger);
|
||||
pthread_mutex_init(&(instance->mutex), NULL);
|
||||
return instance;
|
||||
|
||||
|
||||
error_out:
|
||||
free(instance);
|
||||
return NULL;
|
||||
@@ -185,11 +189,12 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
|
||||
char src_ip_str[MAX(INET6_ADDRSTRLEN,INET_ADDRSTRLEN)] = {0};
|
||||
char dst_ip_str[MAX(INET6_ADDRSTRLEN,INET_ADDRSTRLEN)] = {0};
|
||||
|
||||
const char *app_proto[]= {"unkonw","http1.0", "http2.0"};
|
||||
|
||||
struct json_spec req_fields[]={ {"cookie", TFE_HTTP_COOKIE},
|
||||
struct json_spec req_fields[]={ {"cookie", TFE_HTTP_COOKIE},
|
||||
{"referer", TFE_HTTP_REFERER},
|
||||
{"user_agent", TFE_HTTP_USER_AGENT} };
|
||||
|
||||
|
||||
struct json_spec resp_fields[]={ {"content_type", TFE_HTTP_CONT_TYPE},
|
||||
{"content_len", TFE_HTTP_CONT_LENGTH} };
|
||||
|
||||
@@ -203,7 +208,7 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
|
||||
|
||||
cJSON_AddNumberToObject(common_obj, "start_time", cur_time);
|
||||
cJSON_AddNumberToObject(common_obj, "end_time", cur_time);
|
||||
cJSON_AddNumberToObject(common_obj, "recv_time", cur_time);
|
||||
cJSON_AddStringToObject(common_obj, "app_proto", app_proto[http->major_version]);
|
||||
|
||||
switch(addr->addrtype)
|
||||
{
|
||||
@@ -237,6 +242,7 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
|
||||
cJSON_AddNumberToObject(common_obj, "entrance_id", handle->entry_id);
|
||||
cJSON_AddNumberToObject(common_obj, "device_id", 0);
|
||||
cJSON_AddStringToObject(common_obj, "url", http->req->req_spec.url);
|
||||
cJSON_AddStringToObject(common_obj, "host", http->req->req_spec.host);
|
||||
for(size_t i=0;i<sizeof(req_fields)/sizeof(struct json_spec);i++)
|
||||
{
|
||||
tmp_val=tfe_http_std_field_read(http->req, req_fields[i].field_id);
|
||||
@@ -268,9 +274,9 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
|
||||
snprintf(cont_type_whole, sizeof(cont_type_whole), "Content-Type:%s", cont_type_val);
|
||||
meta.std_hdr[0]=cont_type_whole;
|
||||
}
|
||||
tmp=cache_evbase_upload_once_evbuf(handle->log_file_upload_instance, NULL,
|
||||
log_msg->req_body,
|
||||
&meta,
|
||||
tmp=cache_evbase_upload_once_evbuf(handle->log_file_upload_instance, NULL,
|
||||
log_msg->req_body,
|
||||
&meta,
|
||||
log_file_upload_path, sizeof(log_file_upload_path));
|
||||
if(tmp==0)
|
||||
{
|
||||
@@ -293,9 +299,9 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
|
||||
snprintf(cont_type_whole, sizeof(cont_type_whole), "Content-Type:%s", cont_type_val);
|
||||
meta.std_hdr[0]=cont_type_whole;
|
||||
}
|
||||
tmp=cache_evbase_upload_once_evbuf(handle->log_file_upload_instance, NULL,
|
||||
log_msg->resp_body,
|
||||
&meta,
|
||||
tmp=cache_evbase_upload_once_evbuf(handle->log_file_upload_instance, NULL,
|
||||
log_msg->resp_body,
|
||||
&meta,
|
||||
log_file_upload_path, sizeof(log_file_upload_path));
|
||||
|
||||
if(tmp==0)
|
||||
@@ -312,7 +318,7 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
|
||||
|
||||
for(size_t i=0; i<log_msg->result_num; i++)
|
||||
{
|
||||
|
||||
|
||||
TFE_LOG_DEBUG(handle->local_logger, "URL: %s , hit cfg_id: %d service: %d",
|
||||
http->req->req_spec.url,
|
||||
log_msg->result[i].config_id,
|
||||
@@ -331,7 +337,7 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
|
||||
|
||||
TFE_LOG_DEBUG(handle->local_logger, "%s", log_payload);
|
||||
|
||||
kafka_status = rd_kafka_produce(handle->kafka_topic, RD_KAFKA_PARTITION_UA, RD_KAFKA_MSG_F_COPY,
|
||||
kafka_status = rd_kafka_produce(handle->kafka_topic, RD_KAFKA_PARTITION_UA, RD_KAFKA_MSG_F_COPY,
|
||||
log_payload, strlen(log_payload), NULL, 0, NULL);
|
||||
free(log_payload);
|
||||
cJSON_Delete(per_hit_obj);
|
||||
@@ -340,7 +346,7 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
|
||||
TFE_LOG_ERROR(handle->local_logger, "Kafka produce failed: %s", rd_kafka_err2name(rd_kafka_last_error()));
|
||||
}
|
||||
send_cnt++;
|
||||
}
|
||||
}
|
||||
|
||||
cJSON_Delete(common_obj);
|
||||
return send_cnt;
|
||||
|
||||
Reference in New Issue
Block a user