From c213a5281e5bdc0a17bc47c5cd329a5e0ac04357 Mon Sep 17 00:00:00 2001
From: Nikolay Stanchev <ns17@it-innovation.soton.ac.uk>
Date: Wed, 20 Feb 2019 10:31:54 +0000
Subject: [PATCH] Refactors responses of the graph api

---
 src/service/clmcservice/graphapi/tests.py | 71 +++++++++--------------
 src/service/clmcservice/graphapi/views.py |  9 ++-
 2 files changed, 31 insertions(+), 49 deletions(-)

diff --git a/src/service/clmcservice/graphapi/tests.py b/src/service/clmcservice/graphapi/tests.py
index ac62ee4..7eab647 100644
--- a/src/service/clmcservice/graphapi/tests.py
+++ b/src/service/clmcservice/graphapi/tests.py
@@ -24,20 +24,21 @@
 
 from json import dumps
 import pytest
+from unittest.mock import patch
 from pyramid import testing
 from clmcservice.graphapi.views import GraphAPI
 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound
 
 
-graph_1_id = None
-graph_2_id = None
-
-
 class TestGraphAPI(object):
     """
     A pytest-implementation test for the Graph API endpoints.
     """
 
+    # used to store graph UUIDs in the build test and reuse these in the delete test
+    graph_1_test_id = "graph_mock_uuid1"
+    graph_2_test_id = "graph_mock_uuid2"
+
     @pytest.fixture(autouse=True)
     def app_config(self):
         """
@@ -95,15 +96,15 @@ class TestGraphAPI(object):
             error_raised = True
         assert error_raised, error_msg
 
-    def test_build(self, db_testing_data):
+    @patch('clmcservice.graphapi.views.uuid4')
+    def test_build(self, uuid_mock, db_testing_data):
         """
         Tests the graph build API endpoint - it makes 2 API calls and checks that the expected graph was created (the influx data that's being used is reported to InfluxDB in the conftest file)
 
+        :param uuid_mock: mock object to mock the behaviour of the uuid4 function
         :param db_testing_data: pair of time stamps - the from-to range of the generated influx test data, test database name and the graph db client object (this is a fixture from conftest)
         """
 
-        global graph_1_id, graph_2_id  # these variables are used to store the ID of the graphs that were created during the execution of this test method; they are reused later when testing the delete method
-
         from_timestamp, to_timestamp, graph_db = db_testing_data
 
         ue_nodes = set([node["name"] for node in graph_db.nodes.match("UserEquipment")])
@@ -115,7 +116,7 @@ class TestGraphAPI(object):
         switch_nodes = set([node["name"] for node in graph_db.nodes.match("Switch")])
         assert switch_nodes == set("127.0.0." + str(i) for i in range(1, 7)), "Switch nodes must have been created by the db_testing_data fixture"
 
-        # test with invalid URL parameters naming
+        # test with invalid body
         service_functions = dict(nginx={"measurement_name": "nginx", "response_time_field": "mean(avg_processing_time)",
                                         "request_size_field": "mean(avg_request_size)", "response_size_field": "mean(avg_response_size)"},
                                  minio={"measurement_name": "minio_http", "response_time_field": "mean(total_processing_time)/mean(total_requests_count)",
@@ -129,6 +130,7 @@ class TestGraphAPI(object):
             GraphAPI(request).build_temporal_graph()
 
         # Create a valid build request and send it to the API endpoint
+        uuid_mock.return_value = self.graph_1_test_id
         service_functions = dict(nginx={"measurement_name": "nginx", "response_time_field": "mean(avg_processing_time)",
                                         "request_size_field": "mean(avg_request_size)", "response_size_field": "mean(avg_response_size)"},
                                  minio={"measurement_name": "minio_http", "response_time_field": "mean(total_processing_time)/mean(total_requests_count)",
@@ -140,16 +142,15 @@ class TestGraphAPI(object):
         request = testing.DummyRequest()
         request.body = body.encode(request.charset)
         response = GraphAPI(request).build_temporal_graph()
+
         graph_subresponse = response.pop("graph")
-        # remove the "from" and "to" keys, these will be returned in the graph_subresponse
-        build_json_body.pop("from")
-        build_json_body.pop("to")
-        assert response == build_json_body, "Response must contain the request body"
-        assert graph_subresponse.get("uuid") is not None, "Request UUID must be attached to the response."
+
+        assert response == {"database": "test_sfc"}, "Response must contain the database name"
+
+        assert graph_subresponse["uuid"] == self.graph_1_test_id, "Request UUID must be attached to the response."
         assert graph_subresponse["time_range"]["from"] == from_timestamp * 10**9  # timestamp returned in nanoseconds
         assert graph_subresponse["time_range"]["to"] == to_timestamp * 10**9  # timestamp returned in nanoseconds
         request_id = graph_subresponse["uuid"]
-        graph_1_id = request_id
 
         # check that the appropriate nodes have been created
         sfp_names = set([node["name"] for node in graph_db.nodes.match("ServiceFunctionPackage")])
@@ -193,6 +194,7 @@ class TestGraphAPI(object):
             assert endpoint_node["response_size"] == pytest.approx(response_size, 1), "Wrong response size attribute of endpoint node"
 
         # send a new request for a new service function chain instance and check the new subgraph has been created
+        uuid_mock.return_value = self.graph_2_test_id
         service_functions = dict(minio={"measurement_name": "minio_http", "response_time_field": "mean(total_processing_time)/mean(total_requests_count)",
                                         "request_size_field": "mean(total_requests_size)/mean(total_requests_count)", "response_size_field": "mean(total_response_size)/mean(total_requests_count)"},
                                  apache={"measurement_name": "apache", "response_time_field": "mean(avg_processing_time)",
@@ -204,16 +206,15 @@ class TestGraphAPI(object):
         request = testing.DummyRequest()
         request.body = body.encode(request.charset)
         response = GraphAPI(request).build_temporal_graph()
+
         graph_subresponse = response.pop("graph")
-        # remove the "from" and "to" keys, these will be returned in the graph_subresponse
-        build_json_body.pop("from")
-        build_json_body.pop("to")
-        assert response == build_json_body, "Response must contain the request body"
-        assert graph_subresponse.get("uuid") is not None, "Request UUID must be attached to the response."
+
+        assert response == {"database": "test_sfc"}, "Response must contain the database name"
+
+        assert graph_subresponse["uuid"] == self.graph_2_test_id, "Request UUID must be attached to the response."
         assert graph_subresponse["time_range"]["from"] == from_timestamp * 10**9  # timestamp returned in nanoseconds
         assert graph_subresponse["time_range"]["to"] == to_timestamp * 10**9  # timestamp returned in nanoseconds
         request_id = graph_subresponse["uuid"]
-        graph_2_id = request_id
 
         # check the new nodes have been created
         assert graph_db.nodes.match("ServiceFunctionPackage", name="apache").first() is not None, "Service function package apache must have been added to the graph"
@@ -262,8 +263,6 @@ class TestGraphAPI(object):
         :param db_testing_data: pair of time stamps - the from-to range of the generated influx test data, test database name and the graph db client object (this is a fixture from conftest)
         """
 
-        global graph_1_id, graph_2_id
-
         from_timestamp, to_timestamp, graph_db = db_testing_data
 
         request = testing.DummyRequest()
@@ -277,15 +276,15 @@ class TestGraphAPI(object):
 
         # delete the graph associated with graph_1_id
         request = testing.DummyRequest()
-        request.matchdict["graph_id"] = graph_1_id
+        request.matchdict["graph_id"] = self.graph_1_test_id
         response = GraphAPI(request).delete_temporal_graph()
-        assert response == {"uuid": graph_1_id, "deleted": 4}, "Incorrect response when deleting temporal graph"
+        assert response == {"deleted": 4}, "Incorrect response when deleting temporal graph"
 
         # delete the graph associated with graph_2_id
         request = testing.DummyRequest()
-        request.matchdict["graph_id"] = graph_2_id
+        request.matchdict["graph_id"] = self.graph_2_test_id
         response = GraphAPI(request).delete_temporal_graph()
-        assert response == {"uuid": graph_2_id, "deleted": 3}, "Incorrect response when deleting temporal graph"
+        assert response == {"deleted": 3}, "Incorrect response when deleting temporal graph"
 
         assert len(graph_db.nodes.match("Endpoint")) == 0, "All endpoint nodes should have been deleted"
         assert set([node["name"] for node in graph_db.nodes.match("Cluster")]) == set(["DC" + str(i) for i in range(1, 7)]), "Cluster nodes must not be deleted"
@@ -348,15 +347,7 @@ class TestGraphAPI(object):
         request = testing.DummyRequest()
         request.body = body.encode(request.charset)
         response = GraphAPI(request).build_temporal_graph()
-        graph_subresponse = response.pop("graph")
-        # remove the "from" and "to" keys, these will be returned in the graph_subresponse
-        build_json_body.pop("from")
-        build_json_body.pop("to")
-        assert response == build_json_body, "Response must contain the request body"
-        assert graph_subresponse.get("uuid") is not None, "Request UUID must be attached to the response."
-        assert graph_subresponse["time_range"]["from"] == from_timestamp * 10**9  # timestamp returned in nanoseconds
-        assert graph_subresponse["time_range"]["to"] == to_timestamp * 10**9  # timestamp returned in nanoseconds
-        request_id = graph_subresponse["uuid"]
+        request_id = response["graph"]["uuid"]
 
         # test some more error case handling of the RTT API endpoint
         request = testing.DummyRequest()
@@ -426,15 +417,7 @@ class TestGraphAPI(object):
         request = testing.DummyRequest()
         request.body = body.encode(request.charset)
         response = GraphAPI(request).build_temporal_graph()
-        graph_subresponse = response.pop("graph")
-        # remove the "from" and "to" keys, these will be returned in the graph_subresponse
-        build_json_body.pop("from")
-        build_json_body.pop("to")
-        assert response == build_json_body, "Response must contain the request body"
-        assert graph_subresponse.get("uuid") is not None, "Request UUID must be attached to the response."
-        assert graph_subresponse["time_range"]["from"] == from_timestamp * 10**9  # timestamp returned in nanoseconds
-        assert graph_subresponse["time_range"]["to"] == to_timestamp * 10**9  # timestamp returned in nanoseconds
-        request_id = graph_subresponse["uuid"]
+        request_id = response["graph"]["uuid"]
 
         # go through the set of input/output (expected) parameters and assert actual results match with expected ones
         for startpoint, endpoint, forward_latencies, reverse_latencies, response_time, request_size, response_size, rtt, global_tags in (
diff --git a/src/service/clmcservice/graphapi/views.py b/src/service/clmcservice/graphapi/views.py
index 53f0d31..32a32fd 100644
--- a/src/service/clmcservice/graphapi/views.py
+++ b/src/service/clmcservice/graphapi/views.py
@@ -81,11 +81,10 @@ class GraphAPI(object):
         request_id = str(uuid4())
 
         build_temporal_subgraph(request_id, from_timestamp, to_timestamp, json_queries, graph, influx_client)
-        json_queries['graph'] = {"uuid": request_id, "time_range": {"from": from_timestamp, "to": to_timestamp}}
-        json_queries.pop("from")
-        json_queries.pop("to")
 
-        return json_queries
+        json_response = {"database": database_name, 'graph': {"uuid": request_id, "time_range": {"from": from_timestamp, "to": to_timestamp}}}
+
+        return json_response
 
     @view_config(route_name='graph_manage', request_method='DELETE')
     def delete_temporal_graph(self):
@@ -103,7 +102,7 @@ class GraphAPI(object):
             raise HTTPNotFound("No subgraph found associated with the request ID {0}".format(graph_id))
 
         number_of_deleted_nodes = delete_temporal_subgraph(graph, graph_id)
-        return {"uuid": graph_id, "deleted": number_of_deleted_nodes}
+        return {"deleted": number_of_deleted_nodes}
 
     @view_config(route_name='graph_algorithms_rtt', request_method='GET')
     def run_rtt_query(self):
-- 
GitLab