Skip to content
Snippets Groups Projects
Commit d7b13f03 authored by Nikolay Stanchev's avatar Nikolay Stanchev
Browse files

[Issue #61 - added tests for mean configuration model values]

parent 6181c32b
No related branches found
No related tags found
No related merge requests found
...@@ -257,7 +257,7 @@ class Sim(object): ...@@ -257,7 +257,7 @@ class Sim(object):
agent_url = urllib.parse.urlparse(ip_endpoint["agent_url"]) agent_url = urllib.parse.urlparse(ip_endpoint["agent_url"])
agent_db_client = InfluxDBClient(host=agent_url.hostname, port=agent_url.port, database=self.influx_db_name, timeout=10) agent_db_client = InfluxDBClient(host=agent_url.hostname, port=agent_url.port, database=self.influx_db_name, timeout=10)
delay_time = self._changeVMState(agent_db_client, sim_time, ip_endpoint, config_delay_dist['connected'][0], delay_time = self._changeVMState(agent_db_client, sim_time, ip_endpoint, config_delay_dist['connected'][0],
config_delay_dist['connected'][0] * config_delay_dist['connected'][1], 0.25, config_delay_dist['connected'][0] * config_delay_dist['connected'][1], 0.7,
'connected', 'unplaced') 'connected', 'unplaced')
max_delay = max(delay_time, max_delay) max_delay = max(delay_time, max_delay)
sim_time += max_delay sim_time += max_delay
...@@ -331,8 +331,8 @@ class Sim(object): ...@@ -331,8 +331,8 @@ class Sim(object):
:return: the delay time :return: the delay time
""" """
# assert total delay time is at least 0 # assert total delay time is at least 1 second
total_delay_time = max(random.normalvariate(mu, sigma), 0.0) total_delay_time = max(random.normalvariate(mu, sigma), 1.0)
# part of the total delay time is the transition state period # part of the total delay time is the transition state period
transition_time = trans_ratio*total_delay_time transition_time = trans_ratio*total_delay_time
...@@ -376,8 +376,10 @@ class Sim(object): ...@@ -376,8 +376,10 @@ class Sim(object):
def run_simulation(generate=True, sTime=3600): def run_simulation(generate=True, sTime=3600):
""" """
A method which runs the data generation simulator A method which runs the data generation simulator.
:param generate: True for generating data, False for deleting the DB (optional argument, if not given, default value True is used) :param generate: True for generating data, False for deleting the DB (optional argument, if not given, default value True is used)
:param sTime: the number of 'seconds' the simulation will run
""" """
global INFLUX_DB_NAME global INFLUX_DB_NAME
...@@ -423,7 +425,7 @@ if __name__ == "__main__": ...@@ -423,7 +425,7 @@ if __name__ == "__main__":
elif opt in ('-t', '--time'): elif opt in ('-t', '--time'):
simTime = arg simTime = arg
if genOpt == True: if genOpt:
print("Running simulation to generate data") print("Running simulation to generate data")
print("Time period for this simulation: " + str(simTime) + " seconds") print("Time period for this simulation: " + str(simTime) + " seconds")
else: else:
......
...@@ -33,7 +33,6 @@ class TestSimulation(object): ...@@ -33,7 +33,6 @@ class TestSimulation(object):
"count_avg_booted": 3607, "count_connecting": 3607, "count_avg_connecting": 3607, "count_connected": 3607, "count_avg_connected": 3607, "count_cpus": 3607, "count_memory": 3607, "count_storage": 3607}), "count_avg_booted": 3607, "count_connecting": 3607, "count_avg_connecting": 3607, "count_connected": 3607, "count_avg_connected": 3607, "count_cpus": 3607, "count_memory": 3607, "count_storage": 3607}),
('SELECT count(*) FROM "CLMCMetrics"."autogen"."mpegdash_service_config" WHERE ipendpoint=\'adaptive_streaming_I1_apache1\'', ('SELECT count(*) FROM "CLMCMetrics"."autogen"."mpegdash_service_config" WHERE ipendpoint=\'adaptive_streaming_I1_apache1\'',
{"time": "1970-01-01T00:00:00Z", "count_avg_running": 3604, "count_avg_starting": 3604, "count_avg_stopped": 3604, "count_avg_stopping": 3604, "count_running": 3604, "count_starting": 3604, "count_stopped": 3604, "count_stopping": 3604}), {"time": "1970-01-01T00:00:00Z", "count_avg_running": 3604, "count_avg_starting": 3604, "count_avg_stopped": 3604, "count_avg_stopping": 3604, "count_running": 3604, "count_starting": 3604, "count_stopped": 3604, "count_stopping": 3604}),
('SELECT count(*) FROM "CLMCMetrics"."autogen"."mpegdash_service_config" WHERE ipendpoint=\'adaptive_streaming_I1_apache2\'', ('SELECT count(*) FROM "CLMCMetrics"."autogen"."mpegdash_service_config" WHERE ipendpoint=\'adaptive_streaming_I1_apache2\'',
{"time": "1970-01-01T00:00:00Z", "count_avg_running": 3604, "count_avg_starting": 3604, "count_avg_stopped": 3604, "count_avg_stopping": 3604, "count_running": 3604, "count_starting": 3604, "count_stopped": 3604, "count_stopping": 3604}), {"time": "1970-01-01T00:00:00Z", "count_avg_running": 3604, "count_avg_starting": 3604, "count_avg_stopped": 3604, "count_avg_stopping": 3604, "count_running": 3604, "count_starting": 3604, "count_stopped": 3604, "count_stopping": 3604}),
...@@ -56,9 +55,53 @@ class TestSimulation(object): ...@@ -56,9 +55,53 @@ class TestSimulation(object):
# test the error attribute of the result is None, that is no error is returned from executing the DB query # test the error attribute of the result is None, that is no error is returned from executing the DB query
assert query_result.error is None, "An error was encountered while executing query {0}.".format(query) assert query_result.error is None, "An error was encountered while executing query {0}.".format(query)
# get the dictionary of result points; the next() function just gets the first element of the query results iterator (we only expect one item in the iterator) # get the dictionary of result points; the next() function just gets the first element of the query results generator (we only expect one item in the generator)
actual_result = next(query_result.get_points()) actual_result = next(query_result.get_points())
assert expected_result == actual_result, "Simulation test failure" assert expected_result == actual_result, "Simulation test failure"
print("Successfully passed test for the following query: {0}".format(query)) print("Successfully passed test for the following query: {0}".format(query))
@pytest.mark.parametrize("query, field", [
('SELECT mean("placing") as "mean_transition_placing" FROM "CLMCMetrics"."autogen"."endpoint_config" where "placing" <> 0 and "placed" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache1\'',
'mean_transition_placing'),
('SELECT mean("placing") as "mean_target_placing" FROM "CLMCMetrics"."autogen"."endpoint_config" where "placing" <> 0 and "unplaced" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache1\'',
'mean_target_placing'),
('SELECT mean("booting") as "mean_transition_booting" FROM "CLMCMetrics"."autogen"."endpoint_config" where "booting" <> 0 and "booted" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache1\'',
'mean_transition_booting'),
('SELECT mean("booting") as "mean_target_booting" FROM "CLMCMetrics"."autogen"."endpoint_config" where "booting" <> 0 and "placed" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache2\'',
'mean_target_booting'),
('SELECT mean("connecting") as "mean_transition_connecting" FROM "CLMCMetrics"."autogen"."endpoint_config" where "connecting" <> 0 and "connected" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache2\'',
'mean_transition_connecting'),
('SELECT mean("connecting") as "mean_target_connecting" FROM "CLMCMetrics"."autogen"."endpoint_config" where "connecting" <> 0 and "booted" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache2\'',
'mean_target_connecting'),
])
def test_mean_config_queries(self, influx_db, query, field):
"""
Test queries for mean values in the configuration states model.
- 'mean_transition_{state}' - we want to know the mean time spent on the given state in cases where this was the actual transition state.
e.g. 'mean_transition_placing' - refers to the mean time spent on state 'placing' in transitions such as 'placing' -> 'placed'
- 'mean_target_{state}' - we want to know the mean time spent on the given state in cases where this was the actual target state
e.g. 'mean_target_placing' - refers to the mean time spent on state 'placing' in transitions such as 'unplaced' -> 'placing'
:param influx_db: influx db client
:param query: query under test
:param field: the field id to fetch
"""
# pytest automatically goes through all queries under test, declared in the parameters decorator
print("\n") # prints a blank line for formatting purposes
# the raise_errors=False argument is given so that we could actually test that the DB didn't return any errors instead of raising an exception
query_result = influx_db.query(query, raise_errors=False)
# test the error attribute of the result is None, that is no error is returned from executing the DB query
assert query_result.error is None, "An error was encountered while executing query {0}.".format(query)
# get the dictionary of result points; the next() function just gets the first element of the query results generator (we only expect one item in the generator)
result = next(query_result.get_points()).get(field)
assert float(result) >= 0.0, "Test failure. Reported mean values cannot be negative."
print("Successfully passed test for the following query: {0}".format(query))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment