Skip to content
Snippets Groups Projects
Commit 417be55b authored by Nikolay Stanchev's avatar Nikolay Stanchev
Browse files

Refactored system-dependent ping test for the telegraf-agents package

parent ef85b8f4
No related branches found
No related tags found
No related merge requests found
......@@ -91,15 +91,19 @@ SSH into the CLMC server
`vagrant --fixture=streaming-sim -- ssh clmc-service`
Then go to the 'vagrant' directory.
`cd /vagrant`
The next step is to generate the test data, which could be done in two ways.
First option is to run a python script to generate the test data sets
`python3 /vagrant/test/streaming-sim/StreamingSim.py`
`python3 test/streaming-sim/StreamingSim.py`
This script could also be used to clear the generated data by using the '-c' option
`python3 /vagrant/test/streaming-sim/StreamingSim.py -c`
`python3 test/streaming-sim/StreamingSim.py -c`
The second option is to directly run the testing module, which will detect if the data was generated, and if not, will automatically
generate the data before executing the tests. Keep in mind that if the test data is being generated using this way, a 10 seconds timeout
......@@ -108,7 +112,7 @@ using the first option, only the tests would be executed.
The command for running the testing module is
`pytest -s /vagrant/test/streaming-sim/test_simresults.py`
`pytest -s test/streaming-sim/test_simresults.py`
The `-s` option in the command is used to output prints used in the test code and is, therefore, optional.
......
......@@ -20,11 +20,13 @@ def streaming_sim_config(request):
@pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='module')
def get_db_client(request):
def get_db_client(streaming_sim_config, request):
"""
Creates an Influx DB client for the CLMC metrics database
:param streaming_sim_config: the fixture returning the yaml configuration
:param request: access the parameters of the fixture
:return: the created Influx DB client
"""
return InfluxDBClient(host='localhost', port=8086, database=request.param['database'], timeout=10)
\ No newline at end of file
return InfluxDBClient(host=streaming_sim_config['hosts'][0]['ip_address'], port=8086, database=request.param['database'], timeout=10)
......@@ -2,20 +2,24 @@
from subprocess import run
from platform import system
import pytest
def test_service_names(streaming_sim_config):
@pytest.mark.parametrize("service_name", [
'clmc-service',
'ipendpoint1',
'ipendpoint2'
])
def test_service_names(streaming_sim_config, service_name):
"""
Tests the service names in the configuration.
:param streaming_sim_config: the configuration fixture collected from conftest.py
:param service_name the service name to test
"""
assert streaming_sim_config['hosts'][0]['name'] == 'clmc-service', "Invalid service name: {0}".format(streaming_sim_config['hosts'][0]['name'])
assert streaming_sim_config['hosts'][1]['name'] == 'ipendpoint1', "Invalid service name: {0}".format(streaming_sim_config['hosts'][1]['name'])
assert streaming_sim_config['hosts'][2]['name'] == 'ipendpoint2', "Invalid service name: {0}".format(streaming_sim_config['hosts'][2]['name'])
print("\nSuccessfully passed service names configuration test\n")
assert any(s['name'] == service_name for s in streaming_sim_config['hosts']), "{0} not in list of hosts".format(service_name)
print("\nSuccessfully passed configuration test for service name {0}\n".format(service_name))
def test_ping(streaming_sim_config):
......@@ -32,5 +36,5 @@ def test_ping(streaming_sim_config):
for service in streaming_sim_config['hosts']:
command = ["ping", system_dependent_param, str(ping_count), service['ip_address']]
assert run(command).returncode == 0, "Service ping test failed for {0}".format(service['name'])
assert run(command).returncode == 0, "Service ping test failed for {0} with ip address {1}".format(service['name'], service['ip_address'])
print("\nSuccessfully passed ping test for service: {0}\n".format(service['name']))
......@@ -2,21 +2,25 @@
from subprocess import run
from platform import system
import pytest
def test_service_names(streaming_config):
@pytest.mark.parametrize("service_name", [
'clmc-service',
'nginx1',
'nginx2',
'loadtest-streaming'
])
def test_service_names(streaming_config, service_name):
"""
Tests the service names in the configuration.
:param streaming_config: the configuration fixture collected from conftest.py
:param service_name the service name to test
"""
assert streaming_config['hosts'][0]['name'] == 'clmc-service', "Invalid service name: {0}".format(streaming_config['hosts'][0]['name'])
assert streaming_config['hosts'][1]['name'] == 'nginx1', "Invalid service name: {0}".format(streaming_config['hosts'][1]['name'])
assert streaming_config['hosts'][2]['name'] == 'nginx2', "Invalid service name: {0}".format(streaming_config['hosts'][2]['name'])
assert streaming_config['hosts'][3]['name'] == 'loadtest-streaming', "Invalid service name: {0}".format(streaming_config['hosts'][3]['name'])
print("\nSuccessfully passed service names configuration test\n")
assert any(s['name'] == service_name for s in streaming_config['hosts']), "{0} not in list of hosts".format(service_name)
print("\nSuccessfully passed configuration test for service name {0}\n".format(service_name))
def test_ping(streaming_config):
......@@ -33,5 +37,5 @@ def test_ping(streaming_config):
for service in streaming_config['hosts']:
command = ["ping", system_dependent_param, str(ping_count), service['ip_address']]
assert run(command).returncode == 0, "Service ping test failed for {0}".format(service['name'])
assert run(command).returncode == 0, "Service ping test failed for {0} with ip address {1}".format(service['name'], service['ip_address'])
print("\nSuccessfully passed ping test for service: {0}\n".format(service['name']))
......@@ -4,13 +4,29 @@ import pytest
import yaml
from influxdb import InfluxDBClient
@pytest.fixture(scope="module")
def telegraf_agent_config():
"""Returns the service configuration deployed for the telegraf conf tests."""
with open("test/telegraf-agents/rspec.yml", 'r') as stream:
@pytest.fixture(scope="module", params=[{'config': {'rspec': 'test/telegraf-agents/rspec.yml'}}])
def telegraf_agent_config(request):
"""
Reads the service configuration deployed for the streaming simulation test.
:param request: access the parameters of the fixture
:return: the python object representing the read YAML file
"""
with open(request.param['config']['rspec'], 'r') as stream:
data_loaded = yaml.load(stream)
return data_loaded
@pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='module')
def influxdb(telegraf_agent_config, request):
return InfluxDBClient(telegraf_agent_config['hosts'][0]['ip_address'], 8086, request.param['database'])
\ No newline at end of file
"""
Creates an Influx DB client for the CLMC metrics database
:param telegraf_agent_config: the fixture returning the yaml configuration
:param request: access the parameters of the fixture
:return: the created Influx DB client
"""
return InfluxDBClient(host=telegraf_agent_config['hosts'][0]['ip_address'], port=8086, database=request.param['database'], timeout=10)
#!/usr/bin/python3
from subprocess import run
from platform import system
import pytest
@pytest.mark.parametrize("service_name", [
'clmc-service',
'apache',
'nginx',
'mongo',
'ffmpeg',
'ipendpoint'
])
def test_service_name(telegraf_agent_config, service_name):
"""
Tests the service names in the configuration.
:param telegraf_agent_config: the configuration fixture collected from conftest.py
:param service_name the service name to test
"""
assert any(s['name'] == service_name for s in telegraf_agent_config['hosts']), "{0} not in list of hosts".format(service_name)
print("\nSuccessfully passed configuration test for service name {0}\n".format(service_name))
def test_ping(telegraf_agent_config):
"""
Pings each service to test for liveliness
:param telegraf_agent_config: the configuration fixture collected from conftest.py
"""
print("\n") # blank line printed for formatting purposes
ping_count = 1
system_dependent_param = "-n" if system().lower() == "windows" else "-c"
for service in telegraf_agent_config['hosts']:
command = ["ping", system_dependent_param, str(ping_count), service['ip_address']]
assert run(command).returncode == 0, "Service ping test failed for {0} with ip address {1}".format(service['name'], service['ip_address'])
print("\nSuccessfully passed ping test for service: {0}\n".format(service['name']))
#!/usr/bin/python3
import os
import pytest
from influxdb import InfluxDBClient
@pytest.mark.parametrize("service_name", [
('clmc-service'),
('apache'),
('nginx'),
('mongo'),
('ffmpeg'),
('ipendpoint'),
])
def test_service_name(telegraf_agent_config, service_name):
assert any(s['name'] == service_name for s in telegraf_agent_config['hosts']), "{0} not in list of hosts".format(service_name)
def test_ping(telegraf_agent_config):
"""This test will only run on linux as the process call is not portable, there's a better way"""
for host in telegraf_agent_config['hosts']:
response = os.system("ping -c 1 " + host['ip_address'])
assert response == 0, "Could not ping {0} on ip address {1}".format(host['name'], host['ip_address'])
@pytest.mark.parametrize("measurement, query, expected_result", [
('nginx', 'SELECT mean("requests") AS "mean" FROM "CLMCMetrics"."autogen"."nginx"', 0),
......@@ -37,25 +20,26 @@ def test_all_inputs(influxdb, measurement, query, expected_result):
actual_result = points[0]['mean']
assert actual_result > expected_result, "actual result {0} is not > expected result {1} for query {2}".format(actual_result, str(expected_result), query)
@pytest.mark.parametrize("ipendpoint, measurements", [
('id1', [{'measurement': 'cpu', 'query': 'query', 'result': 'result'} , {'measurement': 'nginx', 'query': 'query', 'result': 'result'}, {'measurement': 'mongo', 'query': 'query', 'result': 'result'}]),
('id2', [{'measurement': 'cpu', 'query': 'query', 'result': 'result'} , {'measurement': 'nginx', 'query': 'query', 'result': 'result'}])
])
('id2', [{'measurement': 'cpu', 'query': 'query', 'result': 'result'} , {'measurement': 'nginx', 'query': 'query', 'result': 'result'}])
])
def test_multiple_inputs_on_a_service(influxdb, ipendpoint, measurements):
"""This test checks that a service configured with multiple input plugins as separate telegraf config files generates measurements in the database
"""
# for each item in the measurement list run the query and test the result
assert 1
@pytest.mark.parametrize("query, expected_result",
[('filter query', 0),
('filter query', 0),
('filter query', 0)
])
@pytest.mark.parametrize("query, expected_result", [
('filter query', 0),
('filter query', 0),
('filter query', 0)
])
def test_global_tag_filtering(influxdb, query, expected_result):
"""Tests that the global tags are inserted correctly into the global configuration using the install CLMC script
"""
# run query
# check result
assert 1
assert 1
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment