Skip to content
Snippets Groups Projects
Commit b4c41bbf authored by Nikolay Stanchev's avatar Nikolay Stanchev
Browse files

Updated the integration test of the aggregator to use the CLMC service...

Updated the integration test of the aggregator to use the CLMC service directly instead of running the aggregator as a thread
parent bcc866d9
No related branches found
No related tags found
No related merge requests found
......@@ -28,7 +28,6 @@ import pkg_resources
from influxdb import InfluxDBClient
from clmctest.monitoring.StreamingSim import Sim
from clmctest.monitoring.E2ESim import Simulator
from clmcservice.aggregation.aggregator import AggregatorThread
@pytest.fixture(scope="module")
......@@ -36,8 +35,9 @@ def streaming_sim_config():
"""
Reads the service configuration deployed for the streaming simulation test.
:return: the python object representing the read YAML file
:return: the python object representing the read JSON file
"""
rspec = pkg_resources.resource_filename('clmctest', 'rspec.json')
print("\nrspec file: {0}".format(rspec))
......@@ -96,17 +96,3 @@ def e2e_simulator(streaming_sim_config):
influx_url = "http://" + streaming_sim_config[0]['ip_address'] + ":8086"
return Simulator(database_url=influx_url)
@pytest.fixture(scope="module")
def e2e_aggregator(streaming_sim_config):
"""
A fixture to obtain an instance of the Aggregator class with the configuration parameters.
:param streaming_sim_config: the configuration object
:return: an instance of the Aggregator class
"""
influx_url = "http://" + streaming_sim_config[0]['ip_address'] + ":8086"
return AggregatorThread(database_url=influx_url)
......@@ -25,7 +25,8 @@
import pytest
import random
import time
import threading
import requests
import urllib.parse
class TestE2ESimulation(object):
......@@ -34,47 +35,49 @@ class TestE2ESimulation(object):
"""
@pytest.fixture(scope='class', autouse=True)
def run_simulator(self, e2e_simulator, e2e_aggregator):
def run_simulator(self, e2e_simulator):
"""
A fixture, which runs the simulation before running the tests.
:param e2e_simulator: the simulator for the end-to-end data
:param e2e_aggregator: the aggregator which merges the network and service measurements
"""
random.seed(0) # Seed random function so we can reliably test for average queries
print("Starting aggregator...")
event = threading.Event()
e2e_aggregator.add_event_lock(event)
e2e_aggregator.start()
# Configure the aggregator through the CLMC service
influx_url = urllib.parse.urlparse(e2e_simulator.db_url)
aggregator_control_url = "http://{0}:9080/aggregator/control".format(influx_url.hostname)
aggregator_config_url = "http://{0}:9080/aggregator/config".format(influx_url.hostname)
event.wait() # wait until the aggregator thread has set the event lock (it has reached its run method and is ready to start)
print("Configuring aggregator with request to {0} ...".format(aggregator_config_url))
r = requests.put(aggregator_config_url, json={"aggregator_report_period": 5, "aggregator_database_name": e2e_simulator.db_name, "aggregator_database_url": e2e_simulator.db_url})
assert r.status_code == 200
print("Running simulation, please wait...")
e2e_simulator.run()
print("Waiting for INFLUX to finish receiving simulation data...")
time.sleep(e2e_simulator.SIMULATION_LENGTH+10) # wait for data to finish arriving at the INFLUX database
print("Starting aggregator with request to {0}...".format(aggregator_control_url))
r = requests.put(aggregator_control_url, json={"action": "start"}) # start the aggregator through the CLMC service
assert r.status_code == 200
print("Waiting for INFLUX to finish receiving data...")
time.sleep(e2e_simulator.SIMULATION_LENGTH) # wait for data to finish arriving at the INFLUX database
print("... simulation data fixture finished")
print("... stopping aggregator")
e2e_aggregator.stop()
print("... stopping aggregator with request to {0}...".format(aggregator_control_url))
r = requests.put(aggregator_control_url, json={"action": "stop"}) # stop the aggregator through the CLMC service
assert r.status_code == 200
@pytest.mark.parametrize("query, expected_result", [
@pytest.mark.parametrize("query, expected_result, equal_comparison", [
('SELECT count(*) FROM "CLMCMetrics"."autogen"."network_delays"',
{"time": "1970-01-01T00:00:00Z", "count_latency": 120, "count_bandwidth": 120}),
{"time": "1970-01-01T00:00:00Z", "count_latency": 120, "count_bandwidth": 120}, True),
('SELECT count(*) FROM "CLMCMetrics"."autogen"."service_delays"',
{"time": "1970-01-01T00:00:00Z", "count_response_time": 24, "count_request_size": 24, "count_response_size": 24}),
{"time": "1970-01-01T00:00:00Z", "count_response_time": 24, "count_request_size": 24, "count_response_size": 24}, True),
('SELECT count(*) FROM "CLMCMetrics"."autogen"."e2e_delays"',
{"time": "1970-01-01T00:00:00Z", "count_delay_forward": 46, "count_delay_reverse": 46, "count_delay_service": 46,
"count_avg_request_size": 46, "count_avg_response_size": 46, "count_avg_bandwidth": 46}),
('SELECT mean(*) FROM "CLMCMetrics"."autogen"."e2e_delays"',
{"time": "1970-01-01T00:00:00Z", "mean_delay_forward": 7.856884057971015, "mean_delay_reverse": 13.391304347826088, "mean_delay_service": 25,
'mean_avg_request_size': 10485760, 'mean_avg_response_size': 1024, 'mean_avg_bandwidth': 104857600}),
{"time": "1970-01-01T00:00:00Z", "count_delay_forward": 40, "count_delay_reverse": 40, "count_delay_service": 40,
"count_avg_request_size": 40, "count_avg_response_size": 40, "count_avg_bandwidth": 40}, False),
])
def test_simulation(self, influx_db, query, expected_result):
def test_simulation(self, influx_db, query, expected_result, equal_comparison):
"""
This is the entry point of the test. This method will be found and executed when the module is ran using pytest
......@@ -95,12 +98,9 @@ class TestE2ESimulation(object):
# get the dictionary of result points; the next() function just gets the first element of the query results generator (we only expect one item in the generator)
actual_result = next(query_result.get_points())
for key in expected_result:
print("expected_result == actual_result {0}, {1}".format(expected_result.get(key), actual_result.get(key)))
if type(expected_result.get(key)) == float:
assert expected_result.get(key) == pytest.approx(actual_result.get(key), 0.3) # approximate only when comparing float values
else:
assert expected_result.get(key) == actual_result.get(key), "E2E Simulation test failure"
# check if we want to compare for equality or for '>='
if equal_comparison:
assert expected_result == actual_result, "E2E Simulation test failure"
else:
for key in expected_result:
assert actual_result[key] >= expected_result[key], "E2E Simulation test failure"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment