diff --git a/README.md b/README.md index 4739ab358961acac0889245b2c1e4e4b60d953e2..f12d18fc94d7d85dd6bb512159686252ad4a514d 100644 --- a/README.md +++ b/README.md @@ -91,15 +91,19 @@ SSH into the CLMC server `vagrant --fixture=streaming-sim -- ssh clmc-service` +Then go to the 'vagrant' directory. + +`cd /vagrant` + The next step is to generate the test data, which could be done in two ways. First option is to run a python script to generate the test data sets -`python3 /vagrant/test/streaming-sim/StreamingSim.py` +`python3 test/streaming-sim/StreamingSim.py` This script could also be used to clear the generated data by using the '-c' option -`python3 /vagrant/test/streaming-sim/StreamingSim.py -c` +`python3 test/streaming-sim/StreamingSim.py -c` The second option is to directly run the testing module, which will detect if the data was generated, and if not, will automatically generate the data before executing the tests. Keep in mind that if the test data is being generated using this way, a 10 seconds timeout @@ -108,7 +112,7 @@ using the first option, only the tests would be executed. The command for running the testing module is -`pytest -s /vagrant/test/streaming-sim/test_simresults.py` +`pytest -s test/streaming-sim/test_simresults.py` The `-s` option in the command is used to output prints used in the test code and is, therefore, optional. diff --git a/test/streaming-sim/conftest.py b/test/streaming-sim/conftest.py index e5268d03bcac35b1a6277448ce453a76298a7502..a1b0c145cc890a2e3dca9a2bcca1ac954f8d55d5 100644 --- a/test/streaming-sim/conftest.py +++ b/test/streaming-sim/conftest.py @@ -20,11 +20,13 @@ def streaming_sim_config(request): @pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='module') -def get_db_client(request): +def get_db_client(streaming_sim_config, request): """ Creates an Influx DB client for the CLMC metrics database + + :param streaming_sim_config: the fixture returning the yaml configuration :param request: access the parameters of the fixture :return: the created Influx DB client """ - return InfluxDBClient(host='localhost', port=8086, database=request.param['database'], timeout=10) \ No newline at end of file + return InfluxDBClient(host=streaming_sim_config['hosts'][0]['ip_address'], port=8086, database=request.param['database'], timeout=10) diff --git a/test/streaming-sim/test_rspec.py b/test/streaming-sim/test_rspec.py index 86ef0db715ee2fd5acff7a0e5d0c5b05a05a1552..ecce587eab36aab6873b6c10c1c3924bcee93717 100644 --- a/test/streaming-sim/test_rspec.py +++ b/test/streaming-sim/test_rspec.py @@ -2,20 +2,24 @@ from subprocess import run from platform import system +import pytest -def test_service_names(streaming_sim_config): +@pytest.mark.parametrize("service_name", [ + 'clmc-service', + 'ipendpoint1', + 'ipendpoint2' +]) +def test_service_names(streaming_sim_config, service_name): """ Tests the service names in the configuration. :param streaming_sim_config: the configuration fixture collected from conftest.py + :param service_name the service name to test """ - assert streaming_sim_config['hosts'][0]['name'] == 'clmc-service', "Invalid service name: {0}".format(streaming_sim_config['hosts'][0]['name']) - assert streaming_sim_config['hosts'][1]['name'] == 'ipendpoint1', "Invalid service name: {0}".format(streaming_sim_config['hosts'][1]['name']) - assert streaming_sim_config['hosts'][2]['name'] == 'ipendpoint2', "Invalid service name: {0}".format(streaming_sim_config['hosts'][2]['name']) - - print("\nSuccessfully passed service names configuration test\n") + assert any(s['name'] == service_name for s in streaming_sim_config['hosts']), "{0} not in list of hosts".format(service_name) + print("\nSuccessfully passed configuration test for service name {0}\n".format(service_name)) def test_ping(streaming_sim_config): @@ -32,5 +36,5 @@ def test_ping(streaming_sim_config): for service in streaming_sim_config['hosts']: command = ["ping", system_dependent_param, str(ping_count), service['ip_address']] - assert run(command).returncode == 0, "Service ping test failed for {0}".format(service['name']) + assert run(command).returncode == 0, "Service ping test failed for {0} with ip address {1}".format(service['name'], service['ip_address']) print("\nSuccessfully passed ping test for service: {0}\n".format(service['name'])) diff --git a/test/streaming/test_rspec.py b/test/streaming/test_rspec.py index 51af8a94d6cc6545ac53afdf47fba634e8ad48f1..0bbea5403b59178f609661eb2f4fd280822b5b74 100644 --- a/test/streaming/test_rspec.py +++ b/test/streaming/test_rspec.py @@ -2,21 +2,25 @@ from subprocess import run from platform import system +import pytest -def test_service_names(streaming_config): +@pytest.mark.parametrize("service_name", [ + 'clmc-service', + 'nginx1', + 'nginx2', + 'loadtest-streaming' +]) +def test_service_names(streaming_config, service_name): """ Tests the service names in the configuration. :param streaming_config: the configuration fixture collected from conftest.py + :param service_name the service name to test """ - assert streaming_config['hosts'][0]['name'] == 'clmc-service', "Invalid service name: {0}".format(streaming_config['hosts'][0]['name']) - assert streaming_config['hosts'][1]['name'] == 'nginx1', "Invalid service name: {0}".format(streaming_config['hosts'][1]['name']) - assert streaming_config['hosts'][2]['name'] == 'nginx2', "Invalid service name: {0}".format(streaming_config['hosts'][2]['name']) - assert streaming_config['hosts'][3]['name'] == 'loadtest-streaming', "Invalid service name: {0}".format(streaming_config['hosts'][3]['name']) - - print("\nSuccessfully passed service names configuration test\n") + assert any(s['name'] == service_name for s in streaming_config['hosts']), "{0} not in list of hosts".format(service_name) + print("\nSuccessfully passed configuration test for service name {0}\n".format(service_name)) def test_ping(streaming_config): @@ -33,5 +37,5 @@ def test_ping(streaming_config): for service in streaming_config['hosts']: command = ["ping", system_dependent_param, str(ping_count), service['ip_address']] - assert run(command).returncode == 0, "Service ping test failed for {0}".format(service['name']) + assert run(command).returncode == 0, "Service ping test failed for {0} with ip address {1}".format(service['name'], service['ip_address']) print("\nSuccessfully passed ping test for service: {0}\n".format(service['name'])) diff --git a/test/telegraf-agents/conftest.py b/test/telegraf-agents/conftest.py index fedd6f9292b5a7d3061f336eabbd9f8fb2b614e0..b096dd4d6875bed42e3c2842148c7b2f6db7a32d 100644 --- a/test/telegraf-agents/conftest.py +++ b/test/telegraf-agents/conftest.py @@ -4,13 +4,29 @@ import pytest import yaml from influxdb import InfluxDBClient -@pytest.fixture(scope="module") -def telegraf_agent_config(): - """Returns the service configuration deployed for the telegraf conf tests.""" - with open("test/telegraf-agents/rspec.yml", 'r') as stream: + +@pytest.fixture(scope="module", params=[{'config': {'rspec': 'test/telegraf-agents/rspec.yml'}}]) +def telegraf_agent_config(request): + """ + Reads the service configuration deployed for the streaming simulation test. + + :param request: access the parameters of the fixture + :return: the python object representing the read YAML file + """ + + with open(request.param['config']['rspec'], 'r') as stream: data_loaded = yaml.load(stream) return data_loaded + @pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='module') def influxdb(telegraf_agent_config, request): - return InfluxDBClient(telegraf_agent_config['hosts'][0]['ip_address'], 8086, request.param['database']) \ No newline at end of file + """ + Creates an Influx DB client for the CLMC metrics database + + :param telegraf_agent_config: the fixture returning the yaml configuration + :param request: access the parameters of the fixture + :return: the created Influx DB client + """ + + return InfluxDBClient(host=telegraf_agent_config['hosts'][0]['ip_address'], port=8086, database=request.param['database'], timeout=10) diff --git a/test/telegraf-agents/test_rspec.py b/test/telegraf-agents/test_rspec.py new file mode 100644 index 0000000000000000000000000000000000000000..7a7c7ee441210077edbd4d455427ce0d47e152a1 --- /dev/null +++ b/test/telegraf-agents/test_rspec.py @@ -0,0 +1,43 @@ +#!/usr/bin/python3 + +from subprocess import run +from platform import system +import pytest + + +@pytest.mark.parametrize("service_name", [ + 'clmc-service', + 'apache', + 'nginx', + 'mongo', + 'ffmpeg', + 'ipendpoint' +]) +def test_service_name(telegraf_agent_config, service_name): + """ + Tests the service names in the configuration. + + :param telegraf_agent_config: the configuration fixture collected from conftest.py + :param service_name the service name to test + """ + + assert any(s['name'] == service_name for s in telegraf_agent_config['hosts']), "{0} not in list of hosts".format(service_name) + print("\nSuccessfully passed configuration test for service name {0}\n".format(service_name)) + + +def test_ping(telegraf_agent_config): + """ + Pings each service to test for liveliness + + :param telegraf_agent_config: the configuration fixture collected from conftest.py + """ + + print("\n") # blank line printed for formatting purposes + + ping_count = 1 + system_dependent_param = "-n" if system().lower() == "windows" else "-c" + + for service in telegraf_agent_config['hosts']: + command = ["ping", system_dependent_param, str(ping_count), service['ip_address']] + assert run(command).returncode == 0, "Service ping test failed for {0} with ip address {1}".format(service['name'], service['ip_address']) + print("\nSuccessfully passed ping test for service: {0}\n".format(service['name'])) diff --git a/test/telegraf-agents/test_telegraf_agents.py b/test/telegraf-agents/test_telegraf_agents.py index 6e710dffd18f40780f58538eb9c96c322db32813..2f35d0187668cad52177384f3bf6b60755c9a0ec 100644 --- a/test/telegraf-agents/test_telegraf_agents.py +++ b/test/telegraf-agents/test_telegraf_agents.py @@ -1,24 +1,7 @@ #!/usr/bin/python3 -import os + import pytest -from influxdb import InfluxDBClient -@pytest.mark.parametrize("service_name", [ - ('clmc-service'), - ('apache'), - ('nginx'), - ('mongo'), - ('ffmpeg'), - ('ipendpoint'), - ]) -def test_service_name(telegraf_agent_config, service_name): - assert any(s['name'] == service_name for s in telegraf_agent_config['hosts']), "{0} not in list of hosts".format(service_name) - -def test_ping(telegraf_agent_config): - """This test will only run on linux as the process call is not portable, there's a better way""" - for host in telegraf_agent_config['hosts']: - response = os.system("ping -c 1 " + host['ip_address']) - assert response == 0, "Could not ping {0} on ip address {1}".format(host['name'], host['ip_address']) @pytest.mark.parametrize("measurement, query, expected_result", [ ('nginx', 'SELECT mean("requests") AS "mean" FROM "CLMCMetrics"."autogen"."nginx"', 0), @@ -37,25 +20,26 @@ def test_all_inputs(influxdb, measurement, query, expected_result): actual_result = points[0]['mean'] assert actual_result > expected_result, "actual result {0} is not > expected result {1} for query {2}".format(actual_result, str(expected_result), query) + @pytest.mark.parametrize("ipendpoint, measurements", [ ('id1', [{'measurement': 'cpu', 'query': 'query', 'result': 'result'} , {'measurement': 'nginx', 'query': 'query', 'result': 'result'}, {'measurement': 'mongo', 'query': 'query', 'result': 'result'}]), - ('id2', [{'measurement': 'cpu', 'query': 'query', 'result': 'result'} , {'measurement': 'nginx', 'query': 'query', 'result': 'result'}]) - ]) + ('id2', [{'measurement': 'cpu', 'query': 'query', 'result': 'result'} , {'measurement': 'nginx', 'query': 'query', 'result': 'result'}]) +]) def test_multiple_inputs_on_a_service(influxdb, ipendpoint, measurements): """This test checks that a service configured with multiple input plugins as separate telegraf config files generates measurements in the database """ # for each item in the measurement list run the query and test the result assert 1 -@pytest.mark.parametrize("query, expected_result", - [('filter query', 0), - ('filter query', 0), - ('filter query', 0) - ]) + +@pytest.mark.parametrize("query, expected_result", [ + ('filter query', 0), + ('filter query', 0), + ('filter query', 0) +]) def test_global_tag_filtering(influxdb, query, expected_result): """Tests that the global tags are inserted correctly into the global configuration using the install CLMC script """ # run query # check result - assert 1 - + assert 1