From c5e5e6a7e5228bde1499c16e31cd08b586d839c5 Mon Sep 17 00:00:00 2001
From: Simon Crowle <sgc@it-innovation.soton.ac.uk>
Date: Wed, 28 Mar 2018 15:26:24 +0100
Subject: [PATCH] Refactors simulation configuration (time period) as fixture
 in TestSimulation class

conftest now simply returns an initialised simulator object
---
 clmctest/monitoring/conftest.py        | 42 +++++++-------------------
 clmctest/monitoring/test_simresults.py | 11 +++++--
 2 files changed, 20 insertions(+), 33 deletions(-)

diff --git a/clmctest/monitoring/conftest.py b/clmctest/monitoring/conftest.py
index c351aa8..0952948 100644
--- a/clmctest/monitoring/conftest.py
+++ b/clmctest/monitoring/conftest.py
@@ -22,23 +22,6 @@ def streaming_sim_config():
         data_loaded = yaml.load(stream)
     return data_loaded
 
-@pytest.fixture(scope="module")
-def streaming_sim_params(streaming_sim_config):
-    """
-    Uses attributes from the local streaming_sim_config and creates a dictionary of simulation parameters
-    """
-
-    sim_params = {}
-    sim_params["INFLUX_DB_URL"]  = "http://" + streaming_sim_config['hosts'][0]['ip_address'] + ":8086"
-    sim_params["INFLUX_DB_NAME"] = streaming_sim_config['hosts'][1]['database_name'] # Note: could this be specified in the clmc-service instead?
-    sim_params["AGENT1_URL"]     = "http://" + streaming_sim_config['hosts'][1]['ip_address'] + ":8186"
-    sim_params["AGENT2_URL"]     = "http://" + streaming_sim_config['hosts'][2]['ip_address'] + ":8186"
-
-    sim_params["SIMULATION_TIME_SEC"] = 60 * 60
-
-    return sim_params
-
-
 @pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='module')
 def influx_db(streaming_sim_config, request):
     """
@@ -49,24 +32,21 @@ def influx_db(streaming_sim_config, request):
     :return: the created Influx DB client
     """
 
-    return InfluxDBClient(host=streaming_sim_config['hosts'][0]['ip_address'], port=8086, database=request.param['database'], timeout=10)
+    return InfluxDBClient(host=streaming_sim_config['hosts'][0]['ip_address'], port='8086', database=request.param['database'], timeout=10)
 
-@pytest.fixture(scope='module')
-def run_simulation_fixture(streaming_sim_params):
-    """
-    A fixture, which checks if the the DB has been created, if not it runs the simulator with a 10 seconds timeout after that
-    """
+@pytest.fixture(scope="module")
+def simulator( streaming_sim_config ):
+
+    influx_url     = "http://" + streaming_sim_config['hosts'][0]['ip_address'] + ":8086"
+    influx_db_name = streaming_sim_config['hosts'][1]['database_name']
+    agent1_url     = "http://" + streaming_sim_config['hosts'][1]['ip_address'] + ":8186"
+    agent2_url     = "http://" + streaming_sim_config['hosts'][2]['ip_address'] + ":8186"
+
+    simulator = Sim( influx_url, influx_db_name, agent1_url, agent2_url )
 
-    simulator = Sim( streaming_sim_params['INFLUX_DB_URL'], streaming_sim_params['INFLUX_DB_NAME'], streaming_sim_params['AGENT1_URL'], streaming_sim_params['AGENT2_URL'])
     dbs = simulator.db_client.get_list_database()
     dbs = [db.get("name") for db in dbs]
 
-    # This check needed to be disabled as the CLMCMetrics database is always created when
-    # the test starts, irrespective of whether this is the 1st time or not
-    # if INFLUX_DB_NAME not in dbs:
     simulator.reset()
-    simulator.run(streaming_sim_params['SIMULATION_TIME_SEC'])
 
-    print("10 seconds timeout is given so that the data could properly be inserted into the database.")
-    import time
-    time.sleep(10)
+    return simulator
\ No newline at end of file
diff --git a/clmctest/monitoring/test_simresults.py b/clmctest/monitoring/test_simresults.py
index dd8344f..6631a41 100644
--- a/clmctest/monitoring/test_simresults.py
+++ b/clmctest/monitoring/test_simresults.py
@@ -1,12 +1,19 @@
 #!/usr/bin/python3
 
 import pytest
+import time
 
 
 class TestSimulation(object):
     """
     A testing class used to group all the tests related to the simulation data
     """
+    @pytest.fixture(scope='class')
+    def run_simulator( self, simulator ):
+        simulator.run( 3600 )
+      
+        print( "Waiting for INFLUX to finish receiving simulation data..." )
+        time.sleep( 10 ) # wait for data to finish arriving at the INFLUX database
 
     @pytest.mark.parametrize("query, expected_result", [
         ('SELECT count(*) FROM "CLMCMetrics"."autogen"."cpu_usage"',
@@ -25,7 +32,8 @@ class TestSimulation(object):
         ('SELECT count(*) FROM "CLMCMetrics"."autogen"."mpegdash_service_config" WHERE ipendpoint=\'adaptive_streaming_I1_apache2\'',
          {"time" : "1970-01-01T00:00:00Z", "count_avg_running" : 3604, "count_avg_starting" : 3604, "count_avg_stopped" : 3604, "count_avg_stopping" : 3604, "count_running" : 3604, "count_starting" : 3604, "count_stopped" : 3604, "count_stopping" : 3604}),
     ])
-    def test_simulation(self, query, expected_result, influx_db, run_simulation_fixture):
+
+    def test_simulation( self, run_simulator, influx_db, query, expected_result ):
         """
         This is the entry point of the test. This method will be found and executed when the module is ran using pytest
 
@@ -36,7 +44,6 @@ class TestSimulation(object):
         """
 
         # pytest automatically goes through all queries under test, declared in the parameters decorator
-
         print("\n")  # prints a blank line for formatting purposes
 
         # the raise_errors=False argument is given so that we could actually test that the DB didn't return any errors instead of raising an exception
-- 
GitLab