Skip to content
Snippets Groups Projects
Commit e3743108 authored by Nikolay Stanchev's avatar Nikolay Stanchev
Browse files

[ Issue #56 ] - Refactored VerifySimResults to use pytest

parent de91fa74
No related branches found
No related tags found
No related merge requests found
import sys from urllib.parse import urlencode
import urllib.parse from urllib.request import Request, urlopen
import urllib.request
queryReference = { class TestSimulation(object):
"cpu_usage" : "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"cpu_usage\"", """
"ipendpoint_route" : "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"ipendpoint_route\"", A 'testing' class used to group all the tests related to the simulation data
"mpegdash_service" : "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"mpegdash_service\"", """
"net_port_io" : "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"net_port_io\"",
"vm_res_alloc" : "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"vm_res_alloc\"" # a class variable storing references between keywords and queries, which will be tested
} query_reference = {
"cpu_usage": "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"cpu_usage\"",
resultReference = { "ipendpoint_route": "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"ipendpoint_route\"",
"cpu_usage" : "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu_usage\",\"columns\":[\"time\",\"count_cpu_active_time\",\"count_cpu_idle_time\",\"count_cpu_usage\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200,7200]]}]}]}", "mpegdash_service": "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"mpegdash_service\"",
"ipendpoint_route" : "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"ipendpoint_route\",\"columns\":[\"time\",\"count_http_requests_fqdn_m\",\"count_network_fqdn_latency\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200]]}]}]}", "net_port_io": "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"net_port_io\"",
"mpegdash_service" : "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"mpegdash_service\",\"columns\":[\"time\",\"count_avg_response_time\",\"count_peak_response_time\",\"count_requests\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200,7200]]}]}]}", "vm_res_alloc": "SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"vm_res_alloc\""
"net_port_io" : "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"net_port_io\",\"columns\":[\"time\",\"count_RX_BYTES_PORT_M\",\"count_TX_BYTES_PORT_M\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200]]}]}]}", }
"vm_res_alloc" : "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"vm_res_alloc\",\"columns\":[\"time\",\"count_cpu\",\"count_memory\",\"count_storage\"],\"values\":[[\"1970-01-01T00:00:00Z\",12,12,12]]}]}]}"
} # a class variable storing references between keywords and results expected when executing the queries under test
result_reference = {
def checkResult( query, queryResult ): "cpu_usage": "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu_usage\",\"columns\":[\"time\",\"count_cpu_active_time\",\"count_cpu_idle_time\",\"count_cpu_usage\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200,7200]]}]}]}",
result = False "ipendpoint_route": "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"ipendpoint_route\",\"columns\":[\"time\",\"count_http_requests_fqdn_m\",\"count_network_fqdn_latency\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200]]}]}]}",
"mpegdash_service": "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"mpegdash_service\",\"columns\":[\"time\",\"count_avg_response_time\",\"count_peak_response_time\",\"count_requests\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200,7200]]}]}]}",
if query != None and queryResult != None: "net_port_io": "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"net_port_io\",\"columns\":[\"time\",\"count_RX_BYTES_PORT_M\",\"count_TX_BYTES_PORT_M\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200]]}]}]}",
if ( query in resultReference ): "vm_res_alloc": "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"vm_res_alloc\",\"columns\":[\"time\",\"count_cpu\",\"count_memory\",\"count_storage\"],\"values\":[[\"1970-01-01T00:00:00Z\",12,12,12]]}]}]}"
if ( resultReference[query] == queryResult ): }
print ( "Result correct" )
result = True def test_simulation(self):
else: """
print ( "Incorrect result for query: " + query ) This is the entry point of the test. This method will be found and executed when the module is ran using pytest
print ( "Expected = " + resultReference[query] ) """
print ( "Result = " + queryResult )
else: for query_key in self.query_reference:
print( "Could not find query result for: " + query ) query = self.query_reference[query_key]
else: query_result = self.send_query("http://localhost:8086", query)
print( "Could not check result: invalid parameters" )
self.check_result(query_key, query_result)
return result print("Successfully passed test for '{0}' query".format(query_key))
def sendInfluxQuery( url, query ): def check_result(self, query, query_result):
query = urllib.parse.urlencode( {'q': query} ) """
query = query.encode( 'ascii' ) A test for a single query result given the query reference and the actual result from executing it
req = urllib.request.Request( url + '/query ', query )
result = urllib.request.urlopen( req ) :param query: the executed query reference, which is under test
:param query_result: the result from the executed query
return result.read().decode("utf-8").strip() """
# Entry point assert query is not None, "The query argument must be an existing object."
# --------------------------------------------------------------------------------------- assert query_result is not None, "The query result argument must be an existing object."
testFailed = False
assert query in self.query_reference, "The query reference {0} is not found in the queries under test.".format(query)
for key in list( queryReference ):
query = queryReference[key] assert self.result_reference[query] == query_result, "Simulation test failure"
result = sendInfluxQuery( "http://localhost:8086", query )
@staticmethod
if checkResult( key, result ) == False: def send_query(url, query):
testFailed = True """
break An auxiliary static method to send a query to a url and retrieve the result
if testFailed : :param url: the target url to which the query is sent to - a string containing a valid URL address
print( "Failed simulation result test" ) :param query: the query to be executed on the given URL
sys.exit( 1 ) :return: the result of the executed query
else: """
print( "Test succeeded" )
query = urlencode({"q": query}).encode("ascii")
sys.exit( 0 ) request = Request("{0}/query".format(url), query)
\ No newline at end of file result = urlopen(request)
return result.read().decode("utf-8").strip()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment