diff --git a/src/service/clmcservice/__init__.py b/src/service/clmcservice/__init__.py
index 6b5c6e48116c354697b98c227f0c0e3903791205..c17e71b9506d71736ab7eeeba4ce71af9d4150eb 100644
--- a/src/service/clmcservice/__init__.py
+++ b/src/service/clmcservice/__init__.py
@@ -76,6 +76,7 @@ def main(global_config, **settings):
 
     # add routes of the Alerts Configuration API
     config.add_route('alerts_configuration', '/alerts')
+    config.add_route('alerts_configuration_instance', '/alerts/{sfc_id}/{sfc_instance_id}')
 
-    config.scan()  # This method scans the packages and finds any views related to the routes added in the app configuration
+    config.scan()  # this method scans the packages and finds any views related to the routes added in the app configuration
     return config.make_wsgi_app()
diff --git a/src/service/clmcservice/alertsapi/tests.py b/src/service/clmcservice/alertsapi/tests.py
index a4eb816b7bd3fa23a07e36285fdbd853832825b8..be88402c696ca0a8c809c1722d6235c95dcae649 100644
--- a/src/service/clmcservice/alertsapi/tests.py
+++ b/src/service/clmcservice/alertsapi/tests.py
@@ -36,7 +36,7 @@ from requests import get, delete
 from toscaparser.tosca_template import ToscaTemplate
 
 # CLMC-service imports
-from clmcservice.alertsapi.utilities import adjust_tosca_definitions_import
+from clmcservice.alertsapi.utilities import adjust_tosca_definitions_import, SFEMC
 from clmcservice.alertsapi.alerts_specification_schema import validate_clmc_alerts_specification
 from clmcservice.alertsapi.views import AlertsConfigurationAPI
 from clmcservice import ROOT_DIR
@@ -157,6 +157,7 @@ class TestAlertsConfigurationAPI(object):
             * Send a valid TOSCA Alert Specification to the view responsible for configuring Kapacitor and creating alerts
             * Check that Kapacitor alerts, topics and handlers are created with the correct identifier and arguments
             * Check that the API returns the duplication errors if the same alerts specification is sent
+            * Clean up the registered alerts
 
         :param app_config: fixture for setUp/tearDown of the web service registry
         """
@@ -164,30 +165,15 @@ class TestAlertsConfigurationAPI(object):
         kapacitor_host = self.config.registry.settings["kapacitor_host"]
         kapacitor_port = self.config.registry.settings["kapacitor_port"]
 
-        test_folder = "clmc-validator"
-        alerts_test_data_path = join(dirname(ROOT_DIR), *["resources", "tosca", "test-data", test_folder, "valid"])
-        resources_test_data_path = join(dirname(ROOT_DIR), *["resources", "tosca", "test-data", "resource-spec"])
+        # test all of the test files provided by the path_generator_testfiles function
+        for alert_spec_file_paths, valid_resource_spec_file_paths, invalid_resource_spec_file_paths in path_generator_testfiles():
 
-        # traverse through all files in the clmc-validator/valid folder (expected to be valid TOSCA alert specifications)
-        for alerts_test_file in listdir(alerts_test_data_path):
-            alert_spec_abs_path = join(alerts_test_data_path, alerts_test_file)
-
-            if not isfile(alert_spec_abs_path):
-                continue  # skip directories
-
-            print("Testing file {0} in folder {1}".format(alerts_test_file, test_folder))
-
-            # the respective resource specification consistent with this alert spec. will have the same name, with "alerts"
-            # being replaced by "resources_valid" for valid spec or "resources_invalid" for invalid spec
-            valid_resources_test_file = alerts_test_file.replace("alerts", "resources_valid")
-            invalid_resources_test_file = alerts_test_file.replace("alerts", "resources_invalid")
-            valid_resource_spec_abs_path = join(resources_test_data_path, valid_resources_test_file)
-            invalid_resource_spec_abs_path = join(resources_test_data_path, invalid_resources_test_file)
-
-            print("Test uses resource spec. files {0} and {1}".format(valid_resources_test_file, invalid_resources_test_file))
+            alert_spec_abs_path, alerts_test_file = alert_spec_file_paths  # absolute path and name of the alerts spec. file
+            valid_resource_spec_abs_path, valid_resources_test_file = valid_resource_spec_file_paths  # absolute path and name of a valid resource spec. file
+            invalid_resource_spec_abs_path, invalid_resources_test_file = invalid_resource_spec_file_paths  # absolute path and name of an invalid resource spec. file
 
             with open(alert_spec_abs_path) as alert_spec:
-                # first send an inconsistent resource spec
+                # first send an inconsistent resource spec, expecting bad request
                 with open(invalid_resource_spec_abs_path) as invalid_resource_spec:
                     request = testing.DummyRequest()
                     request.POST['alert-spec'] = FieldStorageMock(alerts_test_file, alert_spec)
@@ -200,11 +186,14 @@ class TestAlertsConfigurationAPI(object):
 
                 # reset the read pointer of the alert specification file since it was already read once
                 alert_spec.seek(0)
+
+                # extract the alert specification data in a structured way
+                sfc, sfc_instance, alerts = extract_alert_configuration_data(alert_spec, self.config.registry.settings["sfemc_fqdn"], self.config.registry.settings["sfemc_port"])
+                alert_spec.seek(0)  # reset the read pointer to the beginning again (the extraction in the previous step had to read the full file)
+
                 # then send a consistent resource spec
                 with open(valid_resource_spec_abs_path) as valid_resource_spec:
                     request = testing.DummyRequest()
-                    sfc, sfc_instance, alert_ids, topic_handlers = extract_alert_spec_data(alert_spec, self.config.registry.settings["sfemc_fqdn"], self.config.registry.settings["sfemc_port"])
-                    alert_spec.seek(0)  # reset the read pointer to the beginning again (the extraction in the previous step had to read the full file)
                     request.POST['alert-spec'] = FieldStorageMock(alerts_test_file, alert_spec)  # a simple mock class is used to mimic the FieldStorage class
                     request.POST['resource-spec'] = FieldStorageMock(valid_resources_test_file, valid_resource_spec)
                     clmc_service_response = AlertsConfigurationAPI(request).post_alerts_specification()
@@ -214,31 +203,37 @@ class TestAlertsConfigurationAPI(object):
             assert "triggers_specification_errors" not in clmc_service_response, "Unexpected error was returned for triggers specification"
             assert "triggers_action_errors" not in clmc_service_response, "Unexpected error was returned for handlers specification"
 
-            # traverse through all alert IDs and check that they are created within Kapacitor
-            for alert_id, alert_type in alert_ids:
+            # traverse through all alerts and check that they are created along with their respective handlers within Kapacitor
+            for alert in alerts:
+
+                alert_id = alert["task"]
+                alert_type = alert["type"]
+                topic_id = alert["topic"]
+
+                # check the Kapacitor task
                 kapacitor_response = get("http://{0}:{1}/kapacitor/v1/tasks/{2}".format(kapacitor_host, kapacitor_port, alert_id))
                 assert kapacitor_response.status_code == 200, "Alert with ID {0} was not created - test file {1}.".format(alert_id, alerts_test_file)
                 kapacitor_response_json = kapacitor_response.json()
                 assert "link" in kapacitor_response_json, "Incorrect response from kapacitor for alert with ID {0} - test file {1}".format(alert_id, alerts_test_file)
                 assert kapacitor_response_json["status"] == "enabled", "Alert with ID {0} was created but is disabled - test file {1}".format(alert_id, alerts_test_file)
                 assert kapacitor_response_json["executing"], "Alert with ID {0} was created and is enabled, but is not executing - test file {1}".format(alert_id, alerts_test_file)
-                assert kapacitor_response_json["type"] == alert_type,  "Alert with ID {0} was created with the wrong type - test file {1}".format(alert_id, alerts_test_file)
+                assert kapacitor_response_json["type"] == alert_type, "Alert with ID {0} was created with the wrong type - test file {1}".format(alert_id, alerts_test_file)
 
-            # check that all topic IDs were registered within Kapacitor
-            # check that all handler IDs were created and each of them is subscribed to the correct topic ID
-            for topic_id in topic_handlers.keys():
+                # check the Kapacitor topic
                 kapacitor_response = get("http://{0}:{1}/kapacitor/v1/alerts/topics/{2}".format(kapacitor_host, kapacitor_port, topic_id))
                 assert kapacitor_response.status_code == 200, "Topic with ID {0} was not created - test file {1}".format(topic_id, alerts_test_file)
                 kapacitor_response_json = kapacitor_response.json()
                 assert kapacitor_response_json["id"] == topic_id, "Topic {0} was created with incorrect ID - test file {1}".format(topic_id, alerts_test_file)
 
-                for handler_id, handler_url in topic_handlers[topic_id]:
+                # check that all handler IDs were created and each of them is subscribed to the correct topic ID
+                for handler_id in alert["handlers"]:
+                    handler_url = alert["handlers"][handler_id]
                     kapacitor_response = get("http://{0}:{1}/kapacitor/v1/alerts/topics/{2}/handlers/{3}".format(kapacitor_host, kapacitor_port, topic_id, handler_id))
                     assert kapacitor_response.status_code == 200, "Handler with ID {0} for topic with ID {1} doesn't exist - test file {2}".format(handler_id, topic_id, alerts_test_file)
                     kapacitor_response_json = kapacitor_response.json()
                     assert kapacitor_response_json["id"] == handler_id, "Incorrect ID of handler {0} in the Kapacitor response - test file {1}".format(handler_id, alerts_test_file)
                     assert kapacitor_response_json["kind"] == "post", "Incorrect kind of handler {0} in the Kapacitor response - test file {1}".format(handler_id, alerts_test_file)
-                    assert kapacitor_response_json["options"]["url"], "Incorrect url of handler {0} in the Kapacitor response - test file {1}".format(handler_id, alerts_test_file)
+                    assert kapacitor_response_json["options"]["url"] == handler_url, "Incorrect url of handler {0} in the Kapacitor response - test file {1}".format(handler_id, alerts_test_file)
 
             # send the same spec again to check that error messages are returned (because of ID duplication)
             with open(alert_spec_abs_path) as alert_spec:
@@ -249,22 +244,23 @@ class TestAlertsConfigurationAPI(object):
             assert (sfc, sfc_instance) == (clmc_service_response["service_function_chain_id"], clmc_service_response["service_function_chain_instance_id"]), \
                 "Incorrect extraction of metadata for file {0}". format(alerts_test_file)
 
-            assert len(clmc_service_response["triggers_specification_errors"]) == len(alert_ids), "Expected errors were not returned for triggers specification"
-            handlers_count = sum([len(topic_handlers[topic]) for topic in topic_handlers])
+            assert len(clmc_service_response["triggers_specification_errors"]) == len(alerts), "Expected errors were not returned for triggers specification"
+            handlers_count = sum([len(alert["handlers"]) for alert in alerts])
             assert len(clmc_service_response["triggers_action_errors"]) == handlers_count, "Expected errors were not returned for handlers specification"
 
-            clear_kapacitor_alerts(alert_ids, topic_handlers, kapacitor_host, kapacitor_port)
+            clean_kapacitor_alerts(alerts, kapacitor_host, kapacitor_port)
 
-    def test_alerts_config_api_delete(self, app_config):
+    def test_alerts_config_api_get(self, app_config):
         """
-        Tests the DELETE API endpoint of the alerts configuration API responsible for deleting alerts specifications.
+        Tests the GET API endpoint of the alerts configuration API responsible for fetching registered alerts for a specific SFC instance.
 
         Test steps are:
             * Traverse all valid TOSCA Alerts Specifications and TOSCA Resource Specifications in the
                 src/service/clmcservice/resources/tosca/test-data/clmc-validator/valid and src/service/clmcservice/resources/tosca/test-data/tosca-parser/valid folders
             * Send a valid TOSCA Alert Specification to the view responsible for configuring Kapacitor and creating alerts
-            * Send a valid TOSCA Alert Specification to the view responsible for deleting the created alerts if they exist
-            * Check that all Kapacitor resources (task, topic, handler) have been deleted
+            * Send a valid TOSCA Alert Specification to the view responsible for getting the created alerts
+            * Check that all alerts and Kapacitor resources (task, topic, handler urls) have been correctly returned
+            * Clean up the alerts
 
         :param app_config: fixture for setUp/tearDown of the web service registry
         """
@@ -272,31 +268,80 @@ class TestAlertsConfigurationAPI(object):
         kapacitor_host = self.config.registry.settings["kapacitor_host"]
         kapacitor_port = self.config.registry.settings["kapacitor_port"]
 
-        test_folder = "clmc-validator"
-        alerts_test_data_path = join(dirname(ROOT_DIR), *["resources", "tosca", "test-data", test_folder, "valid"])
-        resources_test_data_path = join(dirname(ROOT_DIR), *["resources", "tosca", "test-data", "resource-spec"])
+        # test all of the test files provided by the path_generator_testfiles function, ignoring the last result, which is invalid resource spec. files
+        for alert_spec_file_paths, valid_resource_spec_file_paths, _ in path_generator_testfiles():
+
+            alert_spec_abs_path, alerts_test_file = alert_spec_file_paths  # absolute path and name of the alerts spec. file
+            resource_spec_abs_path, resources_test_file = valid_resource_spec_file_paths  # absolute path and name of a valid resource spec. file
+
+            with open(alert_spec_abs_path) as alert_spec:
+
+                # extract alert configuration data
+                sfc, sfc_instance, alerts = extract_alert_configuration_data(alert_spec, self.config.registry.settings["sfemc_fqdn"], self.config.registry.settings["sfemc_port"])
+                alert_spec.seek(0)  # reset the read pointer to the beginning again (the extraction in the previous step had to read the full file)
+
+                # send a GET request for registered alerts, expecting an empty lists, alerts have not been created yet
+                request = testing.DummyRequest()
+                request.matchdict["sfc_id"] = sfc
+                request.matchdict["sfc_instance_id"] = sfc_instance
+                response = AlertsConfigurationAPI(request).get_alerts()
+                assert response == [], "Incorrect response when fetching registered alerts, expecting empty list"
+
+                # send valid alert and resource spec to create the alerts to be fetched afterwards through a GET request
+                with open(resource_spec_abs_path) as resource_spec:
+                    request = testing.DummyRequest()
+                    request.POST['alert-spec'] = FieldStorageMock(alerts_test_file, alert_spec)  # a simple mock class is used to mimic the FieldStorage class
+                    request.POST['resource-spec'] = FieldStorageMock(resources_test_file, resource_spec)
+                    AlertsConfigurationAPI(request).post_alerts_specification()
+
+            # send a GET request for registered alerts, expecting the newly created alerts
+            request = testing.DummyRequest()
+            request.matchdict["sfc_id"] = sfc
+            request.matchdict["sfc_instance_id"] = sfc_instance
+            response = AlertsConfigurationAPI(request).get_alerts()
+
+            # restructure the extracted alerts data to be comparable with the response of the clmc service
+            expected_alerts = map(lambda alert_object: {
+                "policy": alert_object["policy"], "trigger": alert_object["trigger"],
+                "task_identifier": alert_object["task"], "topic_identifier": alert_object["topic"],
+                "task_api_endpoint": "/kapacitor/v1/tasks/{0}".format(alert_object["task"]),
+                "topic_api_endpoint": "/kapacitor/v1/alerts/topics/{0}".format(alert_object["topic"]),
+                "topic_handlers_api_endpoint": "/kapacitor/v1/alerts/topics/{0}/handlers".format(alert_object["topic"])
+            }, alerts)
+            expected_alerts = sorted(expected_alerts, key=lambda x: x["trigger"])
+            assert sorted(response, key=lambda x: x["trigger"]) == expected_alerts, "Incorrect result returned from a GET alerts request"
+
+            clean_kapacitor_alerts(alerts, kapacitor_host, kapacitor_port)
 
-        # traverse through all files in the clmc-validator/valid folder (expected to be valid TOSCA alert specifications)
-        for alerts_test_file in listdir(alerts_test_data_path):
-            alert_spec_abs_path = join(alerts_test_data_path, alerts_test_file)
+    def test_alerts_config_api_delete(self, app_config):
+        """
+        Tests the DELETE API endpoint of the alerts configuration API responsible for deleting alerts specifications.
+
+        Test steps are:
+            * Traverse all valid TOSCA Alerts Specifications and TOSCA Resource Specifications in the
+                src/service/clmcservice/resources/tosca/test-data/clmc-validator/valid and src/service/clmcservice/resources/tosca/test-data/tosca-parser/valid folders
+            * Send a valid TOSCA Alert Specification to the view responsible for configuring Kapacitor and creating alerts
+            * Send a valid TOSCA Alert Specification to the view responsible for deleting the created alerts if they exist
+            * Check that all Kapacitor resources (task, topic, handler) have been deleted
 
-            if not isfile(alert_spec_abs_path):
-                continue  # skip directories
+        :param app_config: fixture for setUp/tearDown of the web service registry
+        """
 
-            print("Testing file {0} in folder {1}".format(alerts_test_file, test_folder))
+        kapacitor_host = self.config.registry.settings["kapacitor_host"]
+        kapacitor_port = self.config.registry.settings["kapacitor_port"]
+        sfemc_fqdn = self.config.registry.settings["sfemc_fqdn"]
+        sfemc_port = self.config.registry.settings["sfemc_port"]
 
-            # the respective resource specification consistent with this alert spec. will have the same name, with "alerts"
-            # being replaced by "resources_valid" for valid spec or "resources_invalid" for invalid spec
-            resources_test_file = alerts_test_file.replace("alerts", "resources_valid")
-            resource_spec_abs_path = join(resources_test_data_path, resources_test_file)
+        # test all of the test files provided by the path_generator_testfiles function, ignoring the last result, which is invalid resource spec. files
+        for alert_spec_file_paths, valid_resource_spec_file_paths, _ in path_generator_testfiles():
 
-            print("Test uses resource spec. file {0}".format(resources_test_file))
+            alert_spec_abs_path, alerts_test_file = alert_spec_file_paths  # absolute path and name of the alerts spec. file
+            resource_spec_abs_path, resources_test_file = valid_resource_spec_file_paths  # absolute path and name of a valid resource spec. file
 
             with open(alert_spec_abs_path) as alert_spec:
 
-                alerts, handlers = extract_tosca_spec_data(alert_spec)
-                alert_spec.seek(0)  # reset the read pointer to the beginning again (the extraction in the previous step had to read the full file)
-                _, _, alert_ids, topic_handlers = extract_alert_spec_data(alert_spec, self.config.registry.settings["sfemc_fqdn"], self.config.registry.settings["sfemc_port"])
+                # extract the alert specification data, ignore SFC and SFC instance IDs
+                _, _, alerts = extract_alert_configuration_data(alert_spec, sfemc_fqdn, sfemc_port)
                 alert_spec.seek(0)  # reset the read pointer to the beginning again (the extraction in the previous step had to read the full file)
 
                 # send valid alert and resource spec to create the alerts to be deleted afterwards
@@ -306,29 +351,64 @@ class TestAlertsConfigurationAPI(object):
                     request.POST['resource-spec'] = FieldStorageMock(resources_test_file, resource_spec)
                     AlertsConfigurationAPI(request).post_alerts_specification()
 
+            # ensure that these resource (tasks, topics, handlers) exist in Kapacitor
+            # traverse through all alerts and check that everything is created
+            for alert in alerts:
+                # check the alert task
+                alert_id = alert["task"]
+                kapacitor_response = get("http://{0}:{1}/kapacitor/v1/tasks/{2}".format(kapacitor_host, kapacitor_port, alert_id))
+                assert kapacitor_response.status_code == 200, "Alert with ID {0} was not created - test file {1}.".format(alert_id, alerts_test_file)
+
+                # check the alert topic
+                topic_id = alert["topic"]
+                kapacitor_response = get("http://{0}:{1}/kapacitor/v1/alerts/topics/{2}".format(kapacitor_host, kapacitor_port, topic_id))
+                assert kapacitor_response.status_code == 200, "Topic with ID {0} was not created - test file {1}".format(topic_id, alerts_test_file)
+
+                # check handlers
+                for handler_id in alert["handlers"]:
+                    kapacitor_response = get("http://{0}:{1}/kapacitor/v1/alerts/topics/{2}/handlers/{3}".format(kapacitor_host, kapacitor_port, topic_id, handler_id))
+                    assert kapacitor_response.status_code == 200, "Handler with ID {0} for topic with ID {1} was not created - test file {2}".format(handler_id, topic_id, alerts_test_file)
+
+            with open(alert_spec_abs_path) as alert_spec:
                 # now send the alert spec for deletion and check that everything is deleted in Kapacitor
-                alert_spec.seek(0)  # reset the read pointer since the file has already been read once
                 request = testing.DummyRequest()
                 request.params['alert-spec'] = FieldStorageMock(alerts_test_file, alert_spec)
                 response = AlertsConfigurationAPI(request).delete_alerts_specification()
 
+            # restructure the extracted alerts data to be comparable with the response of the clmc service
+            deleted_alerts = map(lambda alert_object: {"policy": alert_object["policy"], "trigger": alert_object["trigger"]}, alerts)
+            deleted_alerts = sorted(deleted_alerts, key=lambda x: x["trigger"])
+            deleted_handlers = []
+            for alert in alerts:
+                policy = alert["policy"]
+                trigger = alert["trigger"]
+                for handler_id in alert["handlers"]:
+                    handler_url = alert["handlers"][handler_id]
+                    if handler_url.startswith("http://{0}:{1}/sfemc/event".format(sfemc_fqdn, sfemc_port)):
+                        handler_url = SFEMC
+                    deleted_handlers.append({"policy": policy, "trigger": trigger, "handler": handler_url})
+            deleted_handlers = sorted(deleted_handlers, key=lambda x: (x["trigger"], x["handler"]))
+
             # assert the response is what's expected containing the deleted alerts and handlers
             assert response.keys() == {"deleted_alerts", "deleted_handlers"}, "Incorrect response format"
-            assert sorted(response["deleted_alerts"], key=lambda x: x["trigger"]) == alerts, "Incorrect result for deleted alerts"
-            assert sorted(response["deleted_handlers"], key=lambda x: x["handler"]) == handlers, "Incorrect result for deleted handlers"
-            # ensure that these resource (tasks, topics, handlers) do not exist in Kapacitor anymore
+            assert sorted(response["deleted_alerts"], key=lambda x: x["trigger"]) == deleted_alerts, "Incorrect result for deleted alerts"
+            assert sorted(response["deleted_handlers"], key=lambda x: (x["trigger"], x["handler"])) == deleted_handlers, "Incorrect result for deleted handlers"
 
-            # traverse through all alert IDs and check that they are deleted from Kapacitor
-            for alert_id, alert_type in alert_ids:
+            # ensure that these resource (tasks, topics, handlers) do not exist in Kapacitor anymore
+            # traverse through all alerts and check that everything is deleted from Kapacitor
+            for alert in alerts:
+                # check the alert task
+                alert_id = alert["task"]
                 kapacitor_response = get("http://{0}:{1}/kapacitor/v1/tasks/{2}".format(kapacitor_host, kapacitor_port, alert_id))
                 assert kapacitor_response.status_code == 404, "Alert with ID {0} was not deleted - test file {1}.".format(alert_id, alerts_test_file)
 
-            # check that all topic IDs and handler IDs were deleted from Kapacitor
-            for topic_id in topic_handlers:
+                # check the alert topic
+                topic_id = alert["topic"]
                 kapacitor_response = get("http://{0}:{1}/kapacitor/v1/alerts/topics/{2}".format(kapacitor_host, kapacitor_port, topic_id))
                 assert kapacitor_response.status_code == 404, "Topic with ID {0} was not deleted - test file {1}".format(topic_id, alerts_test_file)
 
-                for handler_id, handler_url in topic_handlers[topic_id]:
+                # check handlers
+                for handler_id in alert["handlers"]:
                     kapacitor_response = get("http://{0}:{1}/kapacitor/v1/alerts/topics/{2}/handlers/{3}".format(kapacitor_host, kapacitor_port, topic_id, handler_id))
                     assert kapacitor_response.status_code == 404, "Handler with ID {0} for topic with ID {1} was not deleted - test file {2}".format(handler_id, topic_id, alerts_test_file)
 
@@ -355,7 +435,39 @@ class FieldStorageMock(object):
         self.file = file
 
 
-def extract_alert_spec_data(alert_spec, sfemc_fqdn, sfemc_port):
+def path_generator_testfiles():
+    """
+    A utility function which returns a generator object for traversing the valid CLMC alert specs and their respective valid/invalid resource specs
+
+    :return: a generator object that yields a triple of file paths (valid_alert_spec_path, valid_resource_spec_path, invalid_resource_spec_path)
+    """
+
+    test_folder = "clmc-validator"
+    alerts_test_data_path = join(dirname(ROOT_DIR), *["resources", "tosca", "test-data", test_folder, "valid"])
+    resources_test_data_path = join(dirname(ROOT_DIR), *["resources", "tosca", "test-data", "resource-spec"])
+
+    # traverse through all files in the clmc-validator/valid folder (expected to be valid TOSCA alert specifications)
+    for alerts_test_file in listdir(alerts_test_data_path):
+        alert_spec_abs_path = join(alerts_test_data_path, alerts_test_file)
+
+        if not isfile(alert_spec_abs_path):
+            continue  # skip directories
+
+        print("Testing file {0} in folder {1}".format(alerts_test_file, test_folder))
+
+        # the respective resource specification consistent with this alert spec. will have the same name, with "alerts"
+        # being replaced by "resources_valid" for valid spec or "resources_invalid" for invalid spec
+        valid_resources_test_file = alerts_test_file.replace("alerts", "resources_valid")
+        invalid_resources_test_file = alerts_test_file.replace("alerts", "resources_invalid")
+        valid_resource_spec_abs_path = join(resources_test_data_path, valid_resources_test_file)
+        invalid_resource_spec_abs_path = join(resources_test_data_path, invalid_resources_test_file)
+
+        print("Test uses resource spec. files {0} and {1}".format(valid_resources_test_file, invalid_resources_test_file))
+
+        yield (alert_spec_abs_path, alerts_test_file), (valid_resource_spec_abs_path, valid_resources_test_file), (invalid_resource_spec_abs_path, invalid_resources_test_file)
+
+
+def extract_alert_configuration_data(alert_spec, sfemc_fqdn, sfemc_port):
     """
     A utility function to extract the expected alert, handler and topic identifiers (Kapacitor resources) from a given alert specification.
 
@@ -363,7 +475,7 @@ def extract_alert_spec_data(alert_spec, sfemc_fqdn, sfemc_port):
     :param sfemc_fqdn: FQDN of SFEMC
     :param sfemc_port: port number of SFEMC
 
-    :return: a tuple containing sfc_id and sfc_instance_id along with a list and a dictionary of generated IDs (alert IDs (list), topic IDs linked to handler IDs (dict))
+    :return: a list of alert objects containing policy ID, trigger ID, handlers, generated task/topic ID, alert type
     """
 
     version = splitext(alert_spec.name)[0].split("-")[-1]  # take the ending number of the alert spec file
@@ -374,8 +486,7 @@ def extract_alert_spec_data(alert_spec, sfemc_fqdn, sfemc_port):
     # sfc, sfc_instance = tosca_tpl.tpl["metadata"]["sfc"], tosca_tpl.tpl["metadata"]["sfci"]
     sfc, sfc_instance = tosca_tpl.tpl["metadata"]["servicefunctionchain"], "{0}_1".format(tosca_tpl.tpl["metadata"]["servicefunctionchain"])
 
-    alert_ids = []  # saves all alert IDs in a list
-    topic_handlers = {}  # saves all topics in a dictionary, each topic is linked to a list of handler pairs (a handler pair consists of handler id and handler url)
+    alerts = []  # saves every alert object
 
     for policy in tosca_tpl.policies:
         policy_id = policy.name
@@ -386,52 +497,23 @@ def extract_alert_spec_data(alert_spec, sfemc_fqdn, sfemc_port):
 
             topic_id = "{0}\n{1}\n{2}\n{3}".format(sfc, sfc_instance, policy_id, trigger_id)
             topic_id = AlertsConfigurationAPI.get_hash(topic_id)
-            topic_handlers[topic_id] = []
 
             alert_id = topic_id
             alert_type = get_alert_type(event_type, alert_period_integer)
-            alert_ids.append((alert_id, alert_type))
 
+            alert_handlers = {}
             for handler_url in trigger.trigger_tpl["action"]["implementation"]:
 
-                if handler_url == "flame_sfemc":
+                if handler_url == SFEMC:
                     handler_url = "http://{0}:{1}/sfemc/event/{2}/{3}/{4}".format(sfemc_fqdn, sfemc_port, sfc, policy_id, "trigger_id_{0}".format(version))
 
                 handler_id = "{0}\n{1}\n{2}\n{3}\n{4}".format(sfc, sfc_instance, policy_id, trigger_id, handler_url)
                 handler_id = AlertsConfigurationAPI.get_hash(handler_id)
-                topic_handlers[topic_id].append((handler_id, handler_url))
+                alert_handlers[handler_id] = handler_url
 
-    return sfc, sfc_instance, alert_ids, topic_handlers
+            alerts.append({"policy": policy_id, "trigger": trigger_id, "task": alert_id, "topic": topic_id, "type": alert_type, "handlers": alert_handlers})
 
-
-def extract_tosca_spec_data(alert_spec):
-    """
-    A utility function to extract the expected triggers and handlers (TOSCA resources) from a given alert specification.
-
-    :param alert_spec: the alert specification file (file object)
-
-    :return: a tuple containing a list of alert definitions (policy and trigger IDs) and a list of handler definitions (policy, trigger, handler)
-    """
-
-    yaml_alert_spec = load(alert_spec)
-    adjust_tosca_definitions_import(yaml_alert_spec)
-    tosca_tpl = ToscaTemplate(yaml_dict_tpl=yaml_alert_spec)
-
-    alerts = []  # saves all alert definitions in a list
-    handlers = []  # saves all handler definitions in a list
-
-    for policy in tosca_tpl.policies:
-        policy_id = policy.name
-
-        for trigger in policy.triggers:
-            trigger_id = trigger.name
-
-            alerts.append({"policy": policy_id, "trigger": trigger_id})
-
-            for handler_url in trigger.trigger_tpl["action"]["implementation"]:
-                handlers.append({"policy": policy_id, "trigger": trigger_id, "handler": handler_url})
-
-    return sorted(alerts, key=lambda x: x["trigger"]), sorted(handlers, key=lambda x: x["handler"])
+    return sfc, sfc_instance, alerts
 
 
 def get_alert_type(event_type, alert_period):
@@ -454,24 +536,28 @@ def get_alert_type(event_type, alert_period):
         return events[event_type]
 
 
-def clear_kapacitor_alerts(alert_ids, topic_handlers, kapacitor_host, kapacitor_port):
+def clean_kapacitor_alerts(alerts, kapacitor_host, kapacitor_port):
     """
     A utility function to clean up Kapacitor from the configured alerts, topics and handlers.
 
-    :param alert_ids: the list of alert IDs to delete
-    :param topic_handlers: the dictionary of topic and handlers to delete
+    :param alerts: the list of alert objects along with handlers and generated identifiers
     :param kapacitor_host: Kapacitor hostname
     :param kapacitor_port: Kapacitor port number
     """
 
-    for alert_id, _ in alert_ids:
+    for alert in alerts:
+
+        # delete the alert task
+        alert_id = alert["task"]
         kapacitor_response = delete("http://{0}:{1}/kapacitor/v1/tasks/{2}".format(kapacitor_host, kapacitor_port, alert_id))  # delete alert
         assert kapacitor_response.status_code == 204
 
-    for topic_id in topic_handlers:
-        for handler_id, handler_url in topic_handlers[topic_id]:
+        # delete the handlers
+        topic_id = alert["topic"]
+        for handler_id in alert["handlers"]:
             kapacitor_response = delete("http://{0}:{1}/kapacitor/v1/alerts/topics/{2}/handlers/{3}".format(kapacitor_host, kapacitor_port, topic_id, handler_id))  # delete handler
             assert kapacitor_response.status_code == 204
 
+        # delete the alert topic
         kapacitor_response = delete("http://{0}:{1}/kapacitor/v1/alerts/topics/{2}".format(kapacitor_host, kapacitor_port, topic_id))  # delete topic
         assert kapacitor_response.status_code == 204
diff --git a/src/service/clmcservice/alertsapi/views.py b/src/service/clmcservice/alertsapi/views.py
index 60333012bb3a483102c0efb5a66f57b157e13287..edd5495f957a692e401d97adfb9301babed4d098 100644
--- a/src/service/clmcservice/alertsapi/views.py
+++ b/src/service/clmcservice/alertsapi/views.py
@@ -88,6 +88,49 @@ class AlertsConfigurationAPI(object):
             "topic_handlers_api_endpoint": "/kapacitor/v1/alerts/topics/{0}/handlers".format(topic_id)
         }
 
+    @view_config(route_name='alerts_configuration_instance', request_method='GET')
+    def get_alerts(self):
+        """
+        The view for retrieving all alerts and Kapacitor resources registered for a specific service function chain instance.
+
+        :return: a list of objects, each object representing an alert and containing the policy ID, trigger ID, task ID, topic ID and kapacitor endpoints for these
+        """
+
+        kapacitor_host, kapacitor_port = self.request.registry.settings['kapacitor_host'], self.request.registry.settings['kapacitor_port']
+
+        # fetch the URL parameters
+        sfc_id, sfc_instance_id = self.request.matchdict["sfc_id"], self.request.matchdict["sfc_instance_id"]
+
+        # get all tasks from kapacitor
+        kapacitor_tasks_url = "http://{0}:{1}/kapacitor/v1/tasks".format(kapacitor_host, kapacitor_port)
+        all_tasks = get(kapacitor_tasks_url).json()["tasks"]
+
+        # fetch all alerts that are configured for this SFC instance
+        sfc_instance_tasks = []
+
+        # traverse every registered task and check if it is configured for this SFC instance
+        for task in all_tasks:
+
+            # get the configured variables of this alert
+            task_config = task["vars"]
+
+            # if configured for this SFC instance
+            if task_config["sfc"]["value"] == sfc_id and task_config["sfci"]["value"] == sfc_instance_id:
+
+                task_id = task["id"]
+                topic_id = task_id
+                policy_id = task_config["policy"]["value"]
+                trigger_id = task_config["eventID"]["value"]
+
+                # add it to the list of alerts for this SFC instance
+                sfc_instance_tasks.append({"policy": policy_id, "trigger": trigger_id,
+                                           "task_identifier": task_id, "topic_identifier": topic_id,
+                                           "task_api_endpoint": "/kapacitor/v1/tasks/{0}".format(task_id),
+                                           "topic_api_endpoint": "/kapacitor/v1/alerts/topics/{0}".format(topic_id),
+                                           "topic_handlers_api_endpoint": "/kapacitor/v1/alerts/topics/{0}/handlers".format(topic_id)})
+
+        return sfc_instance_tasks
+
     @view_config(route_name='alerts_configuration', request_method='DELETE')
     def delete_alerts_specification(self):
         """
diff --git a/src/test/clmctest/alerts/test_alerts.py b/src/test/clmctest/alerts/test_alerts.py
index 64a30a36c3a3c5de48c5a31e2db3d35fcfc2032e..9faf8b79e08e6f09980aa0d21b9cca2a08813b2e 100644
--- a/src/test/clmctest/alerts/test_alerts.py
+++ b/src/test/clmctest/alerts/test_alerts.py
@@ -85,6 +85,7 @@ class TestAlerts(object):
         """
         Test is implemented using the following steps:
             * Send to clmc service a POST request with TOSCA alert spec. and resource spec. files
+            * Check that the registered alerts can be fetched with a GET request
             * Wait 10 seconds for Kapacitor to configure and start executing the defined tasks
             * Send some test requests to nginx to increase the load
             * Wait 15 seconds for alerts to be triggered
@@ -107,6 +108,7 @@ class TestAlerts(object):
             if clmc_service_host is not None and nginx_host is not None:
                 break
 
+        # create the alerts with a POST request
         print("Sending alerts specification to clmc service...")
         alerts_spec = join(dirname(__file__), "alerts_test_config.yaml")
         resources_spec = join(dirname(__file__), "resources_test_config.yaml")
@@ -121,8 +123,40 @@ class TestAlerts(object):
         assert "triggers_specification_errors" not in clmc_service_response, "Unexpected error was returned for triggers specification"
         assert "triggers_action_errors" not in clmc_service_response, "Unexpected error was returned for handlers specification"
 
+        sfc, sfc_instance = "MS_Template_1", "MS_Template_1_1"
+        assert (sfc, sfc_instance) == (clmc_service_response["service_function_chain_id"], clmc_service_response["service_function_chain_instance_id"])
         print("Alert spec sent successfully")
 
+        # check that the alerts can be fetched with a GET request
+        print("Validate that the alerts were registered and can be fetched with a GET request.")
+        response = get("http://{0}/clmc-service/alerts/{1}/{2}".format(clmc_service_host, sfc, sfc_instance))
+        assert response.status_code == 200
+        clmc_service_response = response.json()
+        clmc_service_response = sorted(clmc_service_response, key=lambda x: x["trigger"])  # sort by trigger so that the response can be compared to what's expected
+        assert clmc_service_response == [
+            {"policy": "scale_nginx_policy", "trigger": "high_requests", "task_identifier": "46fb8800c8a5eeeb04b090d838d475df574a2e6d854b5d678fc981c096eb6c1b",
+             "topic_identifier": "46fb8800c8a5eeeb04b090d838d475df574a2e6d854b5d678fc981c096eb6c1b",
+             "task_api_endpoint": "/kapacitor/v1/tasks/46fb8800c8a5eeeb04b090d838d475df574a2e6d854b5d678fc981c096eb6c1b",
+             "topic_api_endpoint": "/kapacitor/v1/alerts/topics/46fb8800c8a5eeeb04b090d838d475df574a2e6d854b5d678fc981c096eb6c1b",
+             "topic_handlers_api_endpoint": "/kapacitor/v1/alerts/topics/46fb8800c8a5eeeb04b090d838d475df574a2e6d854b5d678fc981c096eb6c1b/handlers"},
+            {"policy": "scale_nginx_policy", "trigger": "increase_in_active_requests", "task_identifier": "7a9867f9270dba6635ac3760a3b70bc929f5bd0f3bf582e45d27fbd437f528ca",
+             "topic_identifier": "7a9867f9270dba6635ac3760a3b70bc929f5bd0f3bf582e45d27fbd437f528ca",
+             "task_api_endpoint": "/kapacitor/v1/tasks/7a9867f9270dba6635ac3760a3b70bc929f5bd0f3bf582e45d27fbd437f528ca",
+             "topic_api_endpoint": "/kapacitor/v1/alerts/topics/7a9867f9270dba6635ac3760a3b70bc929f5bd0f3bf582e45d27fbd437f528ca",
+             "topic_handlers_api_endpoint": "/kapacitor/v1/alerts/topics/7a9867f9270dba6635ac3760a3b70bc929f5bd0f3bf582e45d27fbd437f528ca/handlers"},
+            {"policy": "scale_nginx_policy", "trigger": "increase_in_running_processes", "task_identifier": "f5edaeb27fb847116be749c3815d240cbf0d7ba79aee1959daf0b3445a70f2c8",
+             "topic_identifier": "f5edaeb27fb847116be749c3815d240cbf0d7ba79aee1959daf0b3445a70f2c8",
+             "task_api_endpoint": "/kapacitor/v1/tasks/f5edaeb27fb847116be749c3815d240cbf0d7ba79aee1959daf0b3445a70f2c8",
+             "topic_api_endpoint": "/kapacitor/v1/alerts/topics/f5edaeb27fb847116be749c3815d240cbf0d7ba79aee1959daf0b3445a70f2c8",
+             "topic_handlers_api_endpoint": "/kapacitor/v1/alerts/topics/f5edaeb27fb847116be749c3815d240cbf0d7ba79aee1959daf0b3445a70f2c8/handlers"},
+            {"policy": "deadman_policy", "trigger": "no_measurements", "task_identifier": "f7dab6fd53001c812d44533d3bbb6ef45f0d1d39b9441bc3c60402ebda85d320",
+             "topic_identifier": "f7dab6fd53001c812d44533d3bbb6ef45f0d1d39b9441bc3c60402ebda85d320",
+             "task_api_endpoint": "/kapacitor/v1/tasks/f7dab6fd53001c812d44533d3bbb6ef45f0d1d39b9441bc3c60402ebda85d320",
+             "topic_api_endpoint": "/kapacitor/v1/alerts/topics/f7dab6fd53001c812d44533d3bbb6ef45f0d1d39b9441bc3c60402ebda85d320",
+             "topic_handlers_api_endpoint": "/kapacitor/v1/alerts/topics/f7dab6fd53001c812d44533d3bbb6ef45f0d1d39b9441bc3c60402ebda85d320/handlers"}
+        ], "Incorrect response for GET alerts request"
+        print("Alert spec validated successfully")
+
         print("Wait 10 seconds for Kapacitor stream/batch tasks to start working...")
         sleep(10)
 
@@ -138,6 +172,7 @@ class TestAlerts(object):
         alert_logs = listdir(LOG_TEST_FOLDER_PATH)
         assert len(alert_logs) == 4, "4 log files must have been created - one for each alert defined in the specification."
 
+        # check the content of eac log file
         for alert_log in alert_logs:
             alert_log_path = join(LOG_TEST_FOLDER_PATH, alert_log)
 
@@ -152,6 +187,7 @@ class TestAlerts(object):
 
             assert valid, "Alert log content is invalid - {0}".format(alert_log_path)
 
+        # delete the alerts with a DELETE request
         with open(alerts_spec, 'rb') as alerts:
             files = {'alert-spec': alerts}
             response = delete("http://{0}/clmc-service/alerts".format(clmc_service_host), files=files)