diff --git a/clmctest/monitoring/StreamingSim.py b/clmctest/monitoring/StreamingSim.py
index ff901f7756afad6b3949636e292b5f080d08f3e0..54deb8f7d87a7b0302e144bf3a03148df5d9f093 100644
--- a/clmctest/monitoring/StreamingSim.py
+++ b/clmctest/monitoring/StreamingSim.py
@@ -80,7 +80,7 @@ class Sim(object):
 
         # Simulation configuration of the media component (MC) state changes
         # "MC state", [average (sec), stddev]
-        mc_config_delay_dist = { "stopped":[1, 0.68], "starting": [5, 0.68], "running": [1, 0.68], "stopping": [2, 0.68]}
+        mc_config_delay_dist = {"stopped": [1, 0.68], "starting": [5, 0.68], "running": [1, 0.68], "stopping": [2, 0.68]}
 
         print("\nSimulation started. Generating data...")
 
@@ -257,7 +257,7 @@ class Sim(object):
             agent_url = urllib.parse.urlparse(ip_endpoint["agent_url"])
             agent_db_client = InfluxDBClient(host=agent_url.hostname, port=agent_url.port, database=self.influx_db_name, timeout=10)
             delay_time = self._changeVMState(agent_db_client, sim_time, ip_endpoint, config_delay_dist['connected'][0],
-                                             config_delay_dist['connected'][0] * config_delay_dist['connected'][1], 0.25,
+                                             config_delay_dist['connected'][0] * config_delay_dist['connected'][1], 0.7,
                                              'connected', 'unplaced')
             max_delay = max(delay_time, max_delay)
         sim_time += max_delay
@@ -331,8 +331,8 @@ class Sim(object):
         :return: the delay time
         """
 
-        # assert total delay time is at least 0
-        total_delay_time = max(random.normalvariate(mu, sigma), 0.0)
+        # assert total delay time is at least 1 second
+        total_delay_time = max(random.normalvariate(mu, sigma), 1.0)
 
         # part of the total delay time is the transition state period
         transition_time = trans_ratio*total_delay_time
@@ -363,11 +363,11 @@ class Sim(object):
         
         # Report time in transition (and add the same as average)
         mc_states[transition_state] = transition_time
-        mc_states["avg_" +transition_state] = transition_time
+        mc_states["avg_" + transition_state] = transition_time
 
         # Report time remaining in the next state (adding the same as the average)
         mc_states[next_state] = next_state_time
-        mc_states["avg_" +next_state] = next_state_time
+        mc_states["avg_" + next_state] = next_state_time
 
         agent_db_client.write_points(lp.generate_mc_service_config(mc_measurement, mc_states, sim_time))
 
@@ -376,8 +376,10 @@ class Sim(object):
 
 def run_simulation(generate=True, sTime=3600):
     """
-    A method which runs the data generation simulator
+    A method which runs the data generation simulator.
+
     :param generate: True for generating data, False for deleting the DB (optional argument, if not given, default value True is used)
+    :param sTime: the number of 'seconds' the simulation will run
     """
 
     global INFLUX_DB_NAME
@@ -409,24 +411,24 @@ if __name__ == "__main__":
 
     # Try get some options
     try:
-        opts, args = getopt.getopt( sys.argv[1:], "c:t:", ['clear','time='])
+        opts, args = getopt.getopt(sys.argv[1:], "c:t:", ['clear', 'time='])
 
     except getopt.GetoptError:
-        print( 'StreamingSim.py -c -t <seconds>' )
+        print('StreamingSim.py -c -t <seconds>')
         sys.exit(2)
     
     # Apply options, if any
     for opt, arg in opts:
-        if opt in ( '-c','--clear' ):
+        if opt in ('-c', '--clear'):
             genOpt = False
         
-        elif opt in ('-t','--time'):
+        elif opt in ('-t', '--time'):
             simTime = arg
 
-    if genOpt == True:
-        print( "Running simulation to generate data" )
-        print( "Time period for this simulation: " + str(simTime) + " seconds" )
+    if genOpt:
+        print("Running simulation to generate data")
+        print("Time period for this simulation: " + str(simTime) + " seconds")
     else:
-        print( "Clearing simulation data" )
+        print("Clearing simulation data")
 
-    run_simulation( genOpt, simTime )
+    run_simulation(genOpt, simTime)
diff --git a/clmctest/monitoring/conftest.py b/clmctest/monitoring/conftest.py
index 97eaf82c1f3b9789de5ebb2cc5db0cbd26d90f09..8d1ed6202174af4730e1618b30678408cb2ff925 100644
--- a/clmctest/monitoring/conftest.py
+++ b/clmctest/monitoring/conftest.py
@@ -36,15 +36,15 @@ def influx_db(streaming_sim_config, request):
 
 
 @pytest.fixture(scope="module")
-def simulator( streaming_sim_config ):
+def simulator(streaming_sim_config):
 
     influx_url     = "http://" + streaming_sim_config['hosts'][0]['ip_address'] + ":8086"
     influx_db_name = streaming_sim_config['hosts'][1]['database_name']
     agent1_url     = "http://" + streaming_sim_config['hosts'][1]['ip_address'] + ":8186"
     agent2_url     = "http://" + streaming_sim_config['hosts'][2]['ip_address'] + ":8186"
 
-    simulator = Sim( influx_url, influx_db_name, agent1_url, agent2_url )
+    simulator = Sim(influx_url, influx_db_name, agent1_url, agent2_url)
 
     simulator.reset()
 
-    return simulator
\ No newline at end of file
+    return simulator
diff --git a/clmctest/monitoring/test_simresults.py b/clmctest/monitoring/test_simresults.py
index fca8c0fa69031b0df3309b5850ddc54100725b7c..7dc2e60bc38009ecc0a5cc444ccb2528b160306c 100644
--- a/clmctest/monitoring/test_simresults.py
+++ b/clmctest/monitoring/test_simresults.py
@@ -9,11 +9,11 @@ class TestSimulation(object):
     A testing class used to group all the tests related to the simulation data
     """
     @pytest.fixture(scope='class', autouse=True)
-    def run_simulator( self, simulator ):
-        simulator.run( 3600 )
-      
-        print( "Waiting for INFLUX to finish receiving simulation data..." )
-        time.sleep( 10 ) # wait for data to finish arriving at the INFLUX database
+    def run_simulator(self, simulator):
+        simulator.run(3600)
+
+        print("Waiting for INFLUX to finish receiving simulation data...")
+        time.sleep(10)  # wait for data to finish arriving at the INFLUX database
 
     @pytest.mark.parametrize("query, expected_result", [
         ('SELECT count(*) FROM "CLMCMetrics"."autogen"."cpu_usage"',
@@ -33,12 +33,11 @@ class TestSimulation(object):
           "count_avg_booted": 3607, "count_connecting": 3607, "count_avg_connecting": 3607, "count_connected": 3607, "count_avg_connected": 3607, "count_cpus": 3607, "count_memory": 3607, "count_storage": 3607}),
 
         ('SELECT count(*) FROM "CLMCMetrics"."autogen"."mpegdash_service_config" WHERE ipendpoint=\'adaptive_streaming_I1_apache1\'',
-
          {"time": "1970-01-01T00:00:00Z", "count_avg_running": 3604, "count_avg_starting": 3604, "count_avg_stopped": 3604, "count_avg_stopping": 3604, "count_running": 3604, "count_starting": 3604, "count_stopped": 3604, "count_stopping": 3604}),
         ('SELECT count(*) FROM "CLMCMetrics"."autogen"."mpegdash_service_config" WHERE ipendpoint=\'adaptive_streaming_I1_apache2\'',
          {"time": "1970-01-01T00:00:00Z", "count_avg_running": 3604, "count_avg_starting": 3604, "count_avg_stopped": 3604, "count_avg_stopping": 3604, "count_running": 3604, "count_starting": 3604, "count_stopped": 3604, "count_stopping": 3604}),
     ])
-    def test_simulation( self, influx_db, query, expected_result ):
+    def test_simulation(self, influx_db, query, expected_result):
         """
         This is the entry point of the test. This method will be found and executed when the module is ran using pytest
 
@@ -56,9 +55,53 @@ class TestSimulation(object):
         # test the error attribute of the result is None, that is no error is returned from executing the DB query
         assert query_result.error is None, "An error was encountered while executing query {0}.".format(query)
 
-        # get the dictionary of result points; the next() function just gets the first element of the query results iterator (we only expect one item in the iterator)
+        # get the dictionary of result points; the next() function just gets the first element of the query results generator (we only expect one item in the generator)
         actual_result = next(query_result.get_points())
 
         assert expected_result == actual_result, "Simulation test failure"
 
         print("Successfully passed test for the following query: {0}".format(query))
+
+    @pytest.mark.parametrize("query, field", [
+        ('SELECT mean("placing") as "mean_transition_placing" FROM "CLMCMetrics"."autogen"."endpoint_config" where "placing" <> 0 and "placed" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache1\'',
+         'mean_transition_placing'),
+        ('SELECT mean("placing") as "mean_target_placing" FROM "CLMCMetrics"."autogen"."endpoint_config" where "placing" <> 0 and "unplaced" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache1\'',
+         'mean_target_placing'),
+        ('SELECT mean("booting") as "mean_transition_booting" FROM "CLMCMetrics"."autogen"."endpoint_config" where "booting" <> 0 and "booted" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache1\'',
+         'mean_transition_booting'),
+        ('SELECT mean("booting") as "mean_target_booting" FROM "CLMCMetrics"."autogen"."endpoint_config" where "booting" <> 0 and "placed" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache2\'',
+         'mean_target_booting'),
+        ('SELECT mean("connecting") as "mean_transition_connecting" FROM "CLMCMetrics"."autogen"."endpoint_config" where "connecting" <> 0 and "connected" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache2\'',
+         'mean_transition_connecting'),
+        ('SELECT mean("connecting") as "mean_target_connecting" FROM "CLMCMetrics"."autogen"."endpoint_config" where "connecting" <> 0 and "booted" <> 0 and "ipendpoint"=\'adaptive_streaming_I1_apache2\'',
+         'mean_target_connecting'),
+    ])
+    def test_mean_config_queries(self, influx_db, query, field):
+        """
+        Test queries for mean values in the configuration states model.
+
+        - 'mean_transition_{state}' - we want to know the mean time spent on the given state in cases where this was the actual transition state.
+            e.g. 'mean_transition_placing' - refers to the mean time spent on state 'placing' in transitions such as 'placing' -> 'placed'
+
+        - 'mean_target_{state}' - we want to know the mean time spent on the given state in cases where this was the actual target state
+            e.g. 'mean_target_placing' - refers to the mean time spent on state 'placing' in transitions such as 'unplaced' -> 'placing'
+
+        :param influx_db: influx db client
+        :param query: query under test
+        :param field: the field id to fetch
+        """
+
+        # pytest automatically goes through all queries under test, declared in the parameters decorator
+        print("\n")  # prints a blank line for formatting purposes
+
+        # the raise_errors=False argument is given so that we could actually test that the DB didn't return any errors instead of raising an exception
+        query_result = influx_db.query(query, raise_errors=False)
+
+        # test the error attribute of the result is None, that is no error is returned from executing the DB query
+        assert query_result.error is None, "An error was encountered while executing query {0}.".format(query)
+
+        # get the dictionary of result points; the next() function just gets the first element of the query results generator (we only expect one item in the generator)
+        result = next(query_result.get_points()).get(field)
+        assert float(result) >= 0.0, "Test failure. Reported mean values cannot be negative."
+
+        print("Successfully passed test for the following query: {0}".format(query))