#!/usr/bin/python3

from influxdb import InfluxDBClient
import pytest
from StreamingSim import run_simulation_fixture


class TestSimulation(object):
    """
    A testing class used to group all the tests related to the simulation data
    """

    @pytest.mark.parametrize("query, expected_result", [
        ("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"cpu_usage\"",
         {"time": "1970-01-01T00:00:00Z", "count_cpu_active_time": 7200, "count_cpu_idle_time": 7200, "count_cpu_usage": 7200}),
        ("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"ipendpoint_route\"",
         {"time": "1970-01-01T00:00:00Z", "count_http_requests_fqdn_m": 7200, "count_network_fqdn_latency": 7200}),
        ("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"mpegdash_service\"",
         {"time": "1970-01-01T00:00:00Z", "count_avg_response_time": 7200, "count_peak_response_time": 7200, "count_requests": 7200}),
        ("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"net_port_io\"",
         {"time": "1970-01-01T00:00:00Z", "count_RX_BYTES_PORT_M": 7200, "count_TX_BYTES_PORT_M": 7200}),
        ("SELECT count(*) FROM \"CLMCMetrics\".\"autogen\".\"vm_res_alloc\"",
         {"time": "1970-01-01T00:00:00Z", "count_cpu": 12, "count_memory": 12, "count_storage": 12})
    ])
    def test_simulation(self, query, expected_result, get_db_client, run_simulation_fixture):
        """
        This is the entry point of the test. This method will be found and executed when the module is ran using pytest

        :param query: the query to execute (value obtained from the pytest parameter decorator)
        :param expected_result: the result expected from executing the query (value obtained from the pytest parameter decorator)
        :param run_simulation_fixture: the imported fixture to use to generate the testing data - the return value of the fixture is not needed in this case
        """

        # pytest automatically goes through all queries under test, declared in the parameters decorator

        print("\n")  # prints a blank line for formatting purposes

        # the raise_errors=False argument is given so that we could actually test that the DB didn't return any errors instead of raising an exception
        query_result = get_db_client.query(query, raise_errors=False)

        # test the error attribute of the result is None, that is no error is returned from executing the DB query
        assert query_result.error is None, "An error was encountered while executing query {0}.".format(query)

        # get the dictionary of result points; the next() function just gets the first element of the query results iterator (we only expect one item in the iterator)
        actual_result = next(query_result.get_points())

        assert expected_result == actual_result, "Simulation test failure"

        print("Successfully passed test for the following query: {0}".format(query))

    @pytest.fixture(params=[{'database': 'CLMCMetrics'}], scope='class')
    def get_db_client(self, request):
        return InfluxDBClient(host='localhost', port=8086, database=request.param['database'], timeout=10)