1.新增上传Log日志纯接口及针对全流程log日志字段的目的性验证关键字及测试用例
This commit is contained in:
@@ -247,7 +247,6 @@ def logapiverify(schemauerl,logurl, token, starttime, endtime,logtype):
|
||||
def loglistverify(logurl, schemauerl, token, starttime, endtime, logtype, filtervalue):
|
||||
a = schema(schemauerl, token)
|
||||
fields = a["data"]["fields"]
|
||||
print(fields)
|
||||
url = logurl # "http://192.168.44.72:8080/v1/log/list"
|
||||
headers = {"Content-Type": "application/json",
|
||||
"Authorization": token}
|
||||
@@ -258,12 +257,85 @@ def loglistverify(logurl, schemauerl, token, starttime, endtime, logtype, filter
|
||||
"fields": fields,
|
||||
"filter": filtervalue
|
||||
}
|
||||
# print(json.dumps(data))
|
||||
response1 = requests.post(url=url, data=json.dumps(data), headers=headers)
|
||||
code = response1.json()["code"]
|
||||
assert code == 200
|
||||
print(response1.json()["code"])
|
||||
return response1.json()
|
||||
|
||||
#目的性验证,循坏返回列表中所有字段进行查询
|
||||
def loglistverifys(logurl, schemaurl, token, starttime, endtime, logtype, datajson):
|
||||
nullkey = []
|
||||
data = datajson
|
||||
keylist = LogResponseVAL.getKeys(data)
|
||||
a = schema(schemaurl, token)
|
||||
fields = a["data"]["fields"]
|
||||
for i in keylist:
|
||||
conditions = data[i]
|
||||
for field in fields:
|
||||
name = field["name"]
|
||||
if field["doc"] == None or field["doc"]["visibility"] == None:
|
||||
if i == name:
|
||||
if conditions != None and conditions != "":
|
||||
if field["type"] == "string":
|
||||
if conditions[0] == "'" and conditions[-1] == "'":
|
||||
filtervalue = i + " = " + conditions
|
||||
VasserValue=i + " = " + conditions[1:-1]
|
||||
|
||||
else:
|
||||
filtervalue = i + " = " + "'" + conditions + "'"
|
||||
VasserValue= i + " = " + conditions
|
||||
else:
|
||||
if i == "common_recv_time" or i == "common_start_time" or i == "common_end_time" or i == "common_processing_time":
|
||||
timeArray = time.strptime(conditions, "%Y-%m-%d %H:%M:%S")
|
||||
timeStamp = str(int(time.mktime(timeArray)))
|
||||
filtervalue = i + " = " + timeStamp
|
||||
VasserValue = filtervalue
|
||||
|
||||
else:
|
||||
filtervalue = i + " = " + str(conditions)
|
||||
VasserValue = filtervalue
|
||||
print("filtervalue",filtervalue)
|
||||
#根据提取条件进行查询日志列表
|
||||
responsebody = loglistverify(logurl, schemaurl, token, starttime, endtime, logtype,
|
||||
filtervalue)
|
||||
filterlist=[VasserValue]
|
||||
print(VasserValue)
|
||||
LogResponseVAL.FieldValidation(responsebody,filterlist)
|
||||
|
||||
else:
|
||||
nullkey.append(i) #所有为None或者“”的字段
|
||||
return nullkey
|
||||
|
||||
# 多条循环 变量设置为公共参数 若循环内一个字段没有值 进行下次循坏
|
||||
def logAllFieldsListInterface(logurl, schemaurl, token, starttime, endtime, logtype, datajson,lognumber,logcycles):
|
||||
datalist = datajson["data"]["list"]
|
||||
keylist=[]
|
||||
number=0
|
||||
print(lognumber)
|
||||
print(type(lognumber))
|
||||
print(logcycles)
|
||||
print(type(logcycles))
|
||||
for i in range(0, len(datalist), int(lognumber)):# 循环取出count个列表元素
|
||||
number+=1
|
||||
nullkeylist=[]
|
||||
ret=datalist[i:i + int(lognumber)]
|
||||
for data in ret:
|
||||
nullkey=loglistverifys(logurl, schemaurl, token, starttime, endtime, logtype, data)
|
||||
nullkeylist.append(nullkey)
|
||||
print(nullkeylist)
|
||||
for j in nullkeylist:
|
||||
#对返回的为空的key进行取交集
|
||||
if len(keylist) == 0:
|
||||
keylist=j
|
||||
else:
|
||||
#取两个列表的交集
|
||||
keylist=list(set(keylist).intersection(set(j)))
|
||||
if len(keylist) == 0 or number >= int(logcycles):
|
||||
break
|
||||
print("最终数据中没有值的字段为:",keylist)
|
||||
|
||||
|
||||
# 事件日志和通联日志时间分布查询 ,日志检索条件校验(filter内容验证)
|
||||
def distributed_query(logurl, token):
|
||||
@@ -291,23 +363,54 @@ def LogRetrieve(schemaurl,host,port,token,logType,datajson):
|
||||
if i == name:
|
||||
if field["type"] == "string":
|
||||
filter = "logType=" + logType + "&" + "filter=" + i + "=" + "'" + str1 + "'"
|
||||
Logurl = "http://" + host + ":" + port + "/v1/interface/gateway/sql/galaxy/log/filter/validation?" + filter
|
||||
print(Logurl)
|
||||
responsebody = distributed_query(Logurl, token)
|
||||
else:
|
||||
if i == "common_recv_time" or i == "common_start_time" or i == "common_end_time" or i == "common_processing_time":
|
||||
timeArray = time.strptime(conditions, "%Y-%m-%d %H:%M:%S")
|
||||
timeStamp = str(int(time.mktime(timeArray)))
|
||||
filter = "logType=" + logType + "&" + "filter=" + i + "=" + timeStamp
|
||||
Logurl = "http://" + host + ":" + port + "/v1/interface/gateway/sql/galaxy/log/filter/validation?" + filter
|
||||
print(Logurl)
|
||||
responsebody = distributed_query(Logurl, token)
|
||||
else:
|
||||
filter = "logType=" + logType + "&" + "filter=" + i + "=" + str(number)
|
||||
Logurl = "http://" + host + ":" + port + "/v1/interface/gateway/sql/galaxy/log/filter/validation?" + filter
|
||||
print(Logurl)
|
||||
responsebody = distributed_query(Logurl, token)
|
||||
Logurl = "http://" + host + ":" + port + "/v1/interface/gateway/sql/galaxy/log/filter/validation?" + filter
|
||||
print(Logurl)
|
||||
responsebody = distributed_query(Logurl, token)
|
||||
|
||||
# 日志检索条件校验 复杂sql
|
||||
def LogRetrieveSql(schemaurl,host,port,token,logType,datajson):
|
||||
data = datajson["data"]["list"][0]
|
||||
keylist = LogResponseVAL.getKeys(data)
|
||||
sqllist=random.sample(keylist, 4)
|
||||
number = 45585
|
||||
str1 = random.choice('abcdefghijklmnopqrstuvwxyz')
|
||||
print(sqllist)
|
||||
a = schema(schemaurl, token)
|
||||
filterlist=[]
|
||||
fields=a["data"]["fields"]
|
||||
for i in sqllist:
|
||||
conditions = data[i]
|
||||
for field in fields:
|
||||
name = field["name"]
|
||||
if i == name:
|
||||
if field["type"] == "string":
|
||||
if conditions == "" or conditions == None:
|
||||
conditions=str1
|
||||
filter = i + "=" + "'" + conditions + "'"
|
||||
else:
|
||||
if i == "common_recv_time" or i == "common_start_time" or i == "common_end_time" or i == "common_processing_time":
|
||||
timeArray = time.strptime(conditions, "%Y-%m-%d %H:%M:%S")
|
||||
timeStamp = str(int(time.mktime(timeArray)))
|
||||
filter =i + "=" + timeStamp
|
||||
else:
|
||||
if conditions == "" or conditions == None:
|
||||
conditions = number
|
||||
filter = i + "=" + str(conditions)
|
||||
print(filter)
|
||||
filterlist.append(filter)
|
||||
sqlfilter = "(("+filterlist[0]+" OR "+filterlist[1]+") AND "+filterlist[2]+") OR "+filterlist[3]
|
||||
_filter = "logType=" + logType + "&" + "filter=" + sqlfilter
|
||||
Logurl = "http://" + host + ":" + port + "/v1/interface/gateway/sql/galaxy/log/filter/validation?" + _filter
|
||||
print(Logurl)
|
||||
responsebody = distributed_query(Logurl, token)
|
||||
print(sqlfilter)
|
||||
|
||||
# 原始日志检索时间分布计算
|
||||
def timedistribution(logurl, token, starttime, endtime, logtype, granularity, filtervalue):
|
||||
|
||||
Reference in New Issue
Block a user