diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a2d25f6361bcf84c27f090a318f769c9e29898ae..1c9ae7583d7964c9265a7536352ea8e9348535ad 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -35,8 +35,8 @@ build:tests: - python setup.py sdist --dist-dir=$CI_PROJECT_DIR/build artifacts: paths: - - build/clmctest-2.0.2.tar.gz - - build/clmcservice-2.0.2.tar.gz + - build/clmctest-2.0.3.tar.gz + - build/clmcservice-2.0.3.tar.gz expire_in: 1 day test:all: @@ -50,10 +50,8 @@ test:all: - echo "REPO_PASS=${REPO_PASS}" >> $CI_PROJECT_DIR/reporc - sudo scripts/test/fixture.sh create -f src/test/clmctest/rspec.json -r $CI_PROJECT_DIR -c all - sudo mkdir /var/lib/lxd/containers/test-runner/rootfs/opt/clmc/build - - sudo cp build/clmctest-2.0.2.tar.gz /var/lib/lxd/containers/test-runner/rootfs/opt/clmc/build - - sudo cp build/clmcservice-2.0.2.tar.gz /var/lib/lxd/containers/test-runner/rootfs/opt/clmc/build - - sudo lxc exec test-runner -- pip3 install /opt/clmc/build/clmctest-2.0.2.tar.gz - - sudo lxc exec test-runner -- pip3 install /opt/clmc/build/clmcservice-2.0.2.tar.gz + - sudo cp build/clmctest-2.0.3.tar.gz /var/lib/lxd/containers/test-runner/rootfs/opt/clmc/build + - sudo lxc exec test-runner -- pip3 install /opt/clmc/build/clmctest-2.0.3.tar.gz - sudo lxc exec test-runner -- pytest -s --tb=short -rfp --pyargs clmctest when: on_success diff --git a/src/service/VERSION b/src/service/VERSION index 3391f8417e5d96317d5c25ca088eab68c25b5f6f..966cd76e5fb78bf3f154ef049c8de0476ed86910 100644 --- a/src/service/VERSION +++ b/src/service/VERSION @@ -1 +1 @@ -__version__ = "2.0.2" \ No newline at end of file +__version__ = "2.0.3" \ No newline at end of file diff --git a/src/service/clmcservice/graphapi/utilities.py b/src/service/clmcservice/graphapi/utilities.py index b8e736dd6e5eec3b08fbe2baceb7048aa16b27ab..9d8dd6808a5fc16440d65bb128fa6da17fe39f51 100644 --- a/src/service/clmcservice/graphapi/utilities.py +++ b/src/service/clmcservice/graphapi/utilities.py @@ -223,14 +223,15 @@ def delete_nodes_with_type(graph, node_type): log.info("Deleting {0} nodes.".format(node_type)) - subgraph = graph.nodes.match(node_type) - deleted_nodes = len(subgraph) - for node in subgraph: - graph.delete(node) + # this is the recommended way to delete a number of nodes, rather than deleting them one by one + query = "MATCH (node:{0}) DETACH DELETE node RETURN count(node) as count;".format(node_type) + log.info("Executing query {0}".format(query)) + result = graph.run(query) + nodes_matched = result.data()[0]["count"] # we expect exactly one result, which is a dictionary with key 'count' - log.info("Deleted {0} {1} nodes.".format(deleted_nodes, node_type)) + log.info("Deleted {0} {1} nodes.".format(nodes_matched, node_type)) - return deleted_nodes + return nodes_matched def build_temporal_subgraph(request_id, from_timestamp, to_timestamp, json_queries, graph, influx_client): @@ -338,11 +339,11 @@ def delete_temporal_subgraph(graph, subgraph_id): log.info("Deleting subgraph associated with ID {0}".format(subgraph_id)) - subgraph = graph.nodes.match(uuid=subgraph_id) - nodes_matched = 0 - for node in subgraph: - graph.delete(node) - nodes_matched += 1 + # this is the recommended way to delete a number of nodes, rather than deleting them one by one + query = "MATCH (node {{uuid: '{0}'}}) DETACH DELETE node RETURN count(node) as count;".format(subgraph_id) + log.info("Executing query {0}".format(query)) + result = graph.run(query) + nodes_matched = result.data()[0]["count"] # we expect exactly one result, which is a dictionary with key 'count' log.info("Deleted {0} nodes associated with ID {1}".format(nodes_matched, subgraph_id)) @@ -389,7 +390,11 @@ def build_network_graph(graph, switches, links, clusters, ues): new_switches_count += 1 # create the link between the two nodes - find_or_create_edge(graph, "linkedTo", from_node, to_node, latency=latency) + edge = find_or_create_edge(graph, "linkedTo", from_node, to_node, latency=latency) + if edge["latency"] != latency: + log.info("Updating latency for edge {0}, old latency {1}, new latency {2}".format(edge, edge["latency"], latency)) + edge["latency"] = latency # make sure that the latency is updated if the edge already existed + graph.push(edge) # update the relationship in the DB # check whether the source service router connects a particular cluster or a particular UE if create_node_from_mapping(graph, from_node, source, clusters, "Cluster"): diff --git a/src/service/clmcservice/graphapi/views.py b/src/service/clmcservice/graphapi/views.py index 8aff5faf612a924f1e643159082692ae1f48fe26..323b78ac2836b86a53b8d8a7f46db4788f1b0882 100644 --- a/src/service/clmcservice/graphapi/views.py +++ b/src/service/clmcservice/graphapi/views.py @@ -194,13 +194,15 @@ class GraphAPI(object): log.error("Unexpected error occurred while executing RTT cypher query for graph with UUID {0} - {1}".format(graph_id, e)) raise HTTPBadRequest("The Neo4j cypher query didn't return a valid result for the temporal graph with ID {0}".format(graph_id)) - sf_node = graph.match(nodes=(None, endpoint_node), r_type="realisedBy").first().start_node + sf_edge = graph.match(nodes=(None, endpoint_node), r_type="realisedBy").first() + sf_node = sf_edge.start_node if sf_edge is not None else None if sf_node is None: msg = "No service function found associated with endpoint {0}".format(endpoint_node["name"]) log.error("Unexpected error: {0}".format(msg)) raise HTTPBadRequest(msg) - sf_package_node = graph.match(nodes=(sf_node, None), r_type="instanceOf").first().end_node + sf_package_edge = graph.match(nodes=(sf_node, None), r_type="instanceOf").first() + sf_package_node = sf_package_edge.end_node if sf_package_edge is not None else None if sf_package_node is None: msg = "No service function package found associated with service function {0}".format(sf_node["name"]) log.error("Unexpected error: {0}".format(msg)) diff --git a/src/service/setup.py b/src/service/setup.py index 55c5182ebf36855516926a3beefd3b0efdf185ff..d1e8ccfd11293fabd1dc17f4eac42e08b6fccea4 100644 --- a/src/service/setup.py +++ b/src/service/setup.py @@ -61,15 +61,15 @@ requires = [ 'zope.sqlalchemy==1.0', 'psycopg2==2.7.5', 'influxdb==5.2.0', - 'py2neo==4.1.3', + 'py2neo==4.2.0', 'pyyaml==3.13', 'tosca-parser==1.1.0', 'schema==0.6.8', - 'requests==2.19.1' + 'requests==2.21.0', + 'pytest==3.8.1' ] tests_require = [ - 'pytest==3.8.1', 'pytest-cov==2.6.0' ] diff --git a/src/test/VERSION b/src/test/VERSION index 3391f8417e5d96317d5c25ca088eab68c25b5f6f..966cd76e5fb78bf3f154ef049c8de0476ed86910 100644 --- a/src/test/VERSION +++ b/src/test/VERSION @@ -1 +1 @@ -__version__ = "2.0.2" \ No newline at end of file +__version__ = "2.0.3" \ No newline at end of file