Skip to content
Snippets Groups Projects
Commit cf0d7202 authored by Nikolay Stanchev's avatar Nikolay Stanchev
Browse files

[ Issue #56 ] - Code refactoring to include influxdb library

parent e983a001
No related branches found
No related tags found
No related merge requests found
#!/usr/bin/python3
from urllib.parse import urlencode
from urllib.request import Request, urlopen
from influxdb import InfluxDBClient
import pytest
from StreamingSim import run_simulation_fixture
......@@ -12,13 +11,18 @@ class TestSimulation(object):
"""
@pytest.mark.parametrize("query, expected_result", [
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"cpu_usage\"", "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu_usage\",\"columns\":[\"time\",\"count_cpu_active_time\",\"count_cpu_idle_time\",\"count_cpu_usage\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200,7200]]}]}]}"),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"ipendpoint_route\"", "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"ipendpoint_route\",\"columns\":[\"time\",\"count_http_requests_fqdn_m\",\"count_network_fqdn_latency\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200]]}]}]}"),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"mpegdash_service\"", "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"mpegdash_service\",\"columns\":[\"time\",\"count_avg_response_time\",\"count_peak_response_time\",\"count_requests\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200,7200]]}]}]}"),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"net_port_io\"", "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"net_port_io\",\"columns\":[\"time\",\"count_RX_BYTES_PORT_M\",\"count_TX_BYTES_PORT_M\"],\"values\":[[\"1970-01-01T00:00:00Z\",7200,7200]]}]}]}"),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"vm_res_alloc\"", "{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"vm_res_alloc\",\"columns\":[\"time\",\"count_cpu\",\"count_memory\",\"count_storage\"],\"values\":[[\"1970-01-01T00:00:00Z\",12,12,12]]}]}]}")
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"cpu_usage\"",
{"time": "1970-01-01T00:00:00Z", "count_cpu_active_time": 7200, "count_cpu_idle_time": 7200, "count_cpu_usage": 7200}),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"ipendpoint_route\"",
{"time": "1970-01-01T00:00:00Z", "count_http_requests_fqdn_m": 7200, "count_network_fqdn_latency": 7200}),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"mpegdash_service\"",
{"time": "1970-01-01T00:00:00Z", "count_avg_response_time": 7200, "count_peak_response_time": 7200, "count_requests": 7200}),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"net_port_io\"",
{"time": "1970-01-01T00:00:00Z", "count_RX_BYTES_PORT_M": 7200, "count_TX_BYTES_PORT_M": 7200}),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"vm_res_alloc\"",
{"time": "1970-01-01T00:00:00Z", "count_cpu": 12, "count_memory": 12, "count_storage": 12})
])
def test_simulation(self, query, expected_result, run_simulation_fixture):
def test_simulation(self, query, expected_result, get_db_client, run_simulation_fixture):
"""
This is the entry point of the test. This method will be found and executed when the module is ran using pytest
......@@ -27,27 +31,23 @@ class TestSimulation(object):
:param run_simulation_fixture: the imported fixture to use to generate the testing data - the return value of the fixture is not needed in this case
"""
print("\n") # prints a blank line for formatting purposes
# pytest automatically goes through all queries under test, declared in the parameters decorator
actual_result = self.send_query("http://localhost:8086", query)
assert expected_result == actual_result, "Simulation test failure"
print("\n") # prints a blank line for formatting purposes
print("Successfully passed test for the following query: {0}".format(query))
# the raise_errors=False argument is given so that we could actually test that the DB didn't return any errors instead of raising an exception
query_result = get_db_client.query(query, raise_errors=False)
@staticmethod
def send_query(url, query):
"""
An auxiliary static method to send a query to a url and retrieve the result
# test the error attribute of the result is None, that is no error is returned from executing the DB query
assert query_result.error is None, "An error was encountered while executing query {0}.".format(query)
:param url: the target url to which the query is sent to - a string containing a valid URL address
:param query: the query to be executed on the given URL
:return: the result of the executed query
"""
# get the dictionary of result points; the next() function just gets the first element of the query results iterator (we only expect one item in the iterator)
actual_result = next(query_result.get_points())
query = urlencode({"q": query}).encode("ascii")
request = Request("{0}/query".format(url), query)
result = urlopen(request)
assert expected_result == actual_result, "Simulation test failure"
print("Successfully passed test for the following query: {0}".format(query))
return result.read().decode("utf-8").strip()
@pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='class')
def get_db_client(self, request):
return InfluxDBClient(host='localhost', port=8086, database=request.param['database'], timeout=10)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment