Skip to content
Snippets Groups Projects
Commit b6af85e7 authored by Nikolay Stanchev's avatar Nikolay Stanchev
Browse files

[ Issue #56 ] - refactored the rspec test

parent fb33da19
No related branches found
No related tags found
No related merge requests found
......@@ -2,13 +2,29 @@
import pytest
import yaml
from influxdb import InfluxDBClient
@pytest.fixture(scope="module",
params=[{'config1': {'rspec': 'test/streaming-sim/rspec.yml', 'id': 'myid'}}])
@pytest.fixture(scope="module", params=[{'config1': {'rspec': 'test/streaming-sim/rspec.yml'}}])
def streaming_sim_config(request):
"""Returns the service configuration deployed for the streaming simulation test. In future this needs to be a parameterised fixture shared with other rspec.yml based tests"""
print(request.param['config1']['rspec'])
print(request.param['config1']['id'])
"""
Reads the service configuration deployed for the streaming simulation test.
:param request: access the parameters of the fixture
:return: the python object representing the read YAML file
"""
with open(request.param['config1']['rspec'], 'r') as stream:
data_loaded = yaml.load(stream)
return data_loaded
\ No newline at end of file
return data_loaded
@pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='module')
def get_db_client(request):
"""
Creates an Influx DB client for the CLMC metrics database
:param request: access the parameters of the fixture
:return: the created Influx DB client
"""
return InfluxDBClient(host='localhost', port=8086, database=request.param['database'], timeout=10)
\ No newline at end of file
#!/usr/bin/python3
import pytest
import os
from subprocess import run
from platform import system
def test_service_names(streaming_sim_config):
print(streaming_sim_config['hosts'][0]['name'])
assert streaming_sim_config['hosts'][0]['name'] == 'clmc-service'
assert streaming_sim_config['hosts'][1]['name'] == 'ipendpoint1'
assert streaming_sim_config['hosts'][2]['name'] == 'ipendpoint2'
"""
Tests the service names in the configuration.
:param streaming_sim_config: the configuration fixture collected from conftest.py
"""
assert streaming_sim_config['hosts'][0]['name'] == 'clmc-service', "Invalid service name: {0}".format(streaming_sim_config['hosts'][0]['name'])
assert streaming_sim_config['hosts'][1]['name'] == 'ipendpoint1', "Invalid service name: {0}".format(streaming_sim_config['hosts'][1]['name'])
assert streaming_sim_config['hosts'][2]['name'] == 'ipendpoint2', "Invalid service name: {0}".format(streaming_sim_config['hosts'][2]['name'])
def test_ping(streaming_sim_config):
"""This test will only run on linux due to using os.system library"""
for x in streaming_sim_config['hosts']:
print(x['ip_address'])
response = os.system("ping -c 1 " + x['ip_address'])
assert response == 0
\ No newline at end of file
"""
Pings each service to test for liveliness
:param streaming_sim_config: the configuration fixture collected from conftest.py
"""
print("\n") # blank line printed for formatting purposes
ping_count = 1
system_dependent_param = "-n" if system().lower() == "windows" else "-c"
for service in streaming_sim_config['hosts']:
command = ["ping", "{0} {1}".format(system_dependent_param, ping_count), service['ip_address']]
assert run(command).returncode == 0, "Service ping test failed for {0}".format(service['name'])
print("\nSuccessfully passed ping test for service: {0}\n".format(service['name']))
#!/usr/bin/python3
from influxdb import InfluxDBClient
import pytest
from StreamingSim import run_simulation_fixture
......@@ -11,15 +10,15 @@ class TestSimulation(object):
"""
@pytest.mark.parametrize("query, expected_result", [
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"cpu_usage\"",
('SELECT count(*) FROM "CLMCMetrics"."autogen"."cpu_usage"',
{"time": "1970-01-01T00:00:00Z", "count_cpu_active_time": 7200, "count_cpu_idle_time": 7200, "count_cpu_usage": 7200}),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"ipendpoint_route\"",
('SELECT count(*) FROM "CLMCMetrics"."autogen"."ipendpoint_route"',
{"time": "1970-01-01T00:00:00Z", "count_http_requests_fqdn_m": 7200, "count_network_fqdn_latency": 7200}),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"mpegdash_service\"",
('SELECT count(*) FROM "CLMCMetrics"."autogen"."mpegdash_service"',
{"time": "1970-01-01T00:00:00Z", "count_avg_response_time": 7200, "count_peak_response_time": 7200, "count_requests": 7200}),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"net_port_io\"",
('SELECT count(*) FROM "CLMCMetrics"."autogen"."net_port_io"',
{"time": "1970-01-01T00:00:00Z", "count_RX_BYTES_PORT_M": 7200, "count_TX_BYTES_PORT_M": 7200}),
("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"vm_res_alloc\"",
('SELECT count(*) FROM "CLMCMetrics"."autogen"."vm_res_alloc"',
{"time": "1970-01-01T00:00:00Z", "count_cpu": 12, "count_memory": 12, "count_storage": 12})
])
def test_simulation(self, query, expected_result, get_db_client, run_simulation_fixture):
......@@ -28,6 +27,7 @@ class TestSimulation(object):
:param query: the query to execute (value obtained from the pytest parameter decorator)
:param expected_result: the result expected from executing the query (value obtained from the pytest parameter decorator)
:param get_db_client the import db client fixture - imported from contest.py
:param run_simulation_fixture: the imported fixture to use to generate the testing data - the return value of the fixture is not needed in this case
"""
......@@ -47,7 +47,3 @@ class TestSimulation(object):
assert expected_result == actual_result, "Simulation test failure"
print("Successfully passed test for the following query: {0}".format(query))
@pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='class')
def get_db_client(self, request):
return InfluxDBClient(host='localhost', port=8086, database=request.param['database'], timeout=10)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment