增加HTTP业务发送日志的调试日志,统一HTTP头部标志的形式。

This commit is contained in:
luqiuwen
2019-01-29 14:46:46 +06:00
parent f6c1ea0e28
commit 39ac1dede9
2 changed files with 5 additions and 2 deletions

View File

@@ -1225,7 +1225,8 @@ static void cache_read_on_succ(future_result_t * result, void * user)
tfe_http_std_field_write(ctx->cached_response, TFE_HTTP_CONT_TYPE, meta->content_type);
tfe_http_std_field_write(ctx->cached_response, TFE_HTTP_LAST_MODIFIED, meta->last_modified);
tfe_http_std_field_write(ctx->cached_response, TFE_HTTP_ETAG, meta->etag);
tfe_http_nonstd_field_write(ctx->cached_response, "X-Cache-Lookup", "Hit From TFE");
tfe_http_nonstd_field_write(ctx->cached_response, "X-TG-Cache-Lookup", "HIT");
snprintf(temp, sizeof(temp), "%lu", meta->content_length);
tfe_http_std_field_write(ctx->cached_response, TFE_HTTP_CONT_LENGTH, temp);

View File

@@ -294,12 +294,14 @@ int pangu_send_log(struct pangu_logger* handle, const struct pangu_log* log_msg)
{
continue;
}
per_hit_obj=cJSON_Duplicate(common_obj, 1);
cJSON_AddNumberToObject(per_hit_obj, "cfg_id", log_msg->result[i].config_id);
cJSON_AddNumberToObject(per_hit_obj, "service", log_msg->result[i].service_id);
log_payload = cJSON_Print(per_hit_obj);
fprintf(stderr, "%s\n", log_payload);
TFE_LOG_DEBUG(handle->local_logger, "%s", log_payload);
kafka_status = rd_kafka_produce(handle->kafka_topic, RD_KAFKA_PARTITION_UA, RD_KAFKA_MSG_F_COPY,
log_payload, strlen(log_payload), NULL, 0, NULL);
free(log_payload);