Skip to content
Snippets Groups Projects
Commit c40efc7d authored by Nikolay Stanchev's avatar Nikolay Stanchev
Browse files

Logging implementatio for CLMC service

parent b9f01978
No related branches found
No related tags found
No related merge requests found
...@@ -149,6 +149,10 @@ if [ $? -ne 0 ] ; then ...@@ -149,6 +149,10 @@ if [ $? -ne 0 ] ; then
exit 1 exit 1
fi fi
# create directory for CLMC service logs
echo "----> Creating CLMC web service log directory"
sudo mkdir /var/log/clmcservice
# start the service # start the service
echo "----> Starting CLMC web service" echo "----> Starting CLMC web service"
nohup pserve production.ini > /dev/null 2>&1 & nohup pserve production.ini > /dev/null 2>&1 &
......
...@@ -29,6 +29,15 @@ from urllib.parse import urlparse ...@@ -29,6 +29,15 @@ from urllib.parse import urlparse
from clmcservice.utilities import generate_e2e_delay_report from clmcservice.utilities import generate_e2e_delay_report
import getopt import getopt
import sys import sys
import logging
log = logging.getLogger('aggregator')
hdlr = logging.FileHandler('/var/log/clmcservice/aggregator.log', mode='a')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
log.addHandler(hdlr)
log.setLevel(logging.INFO)
class Aggregator(object): class Aggregator(object):
...@@ -49,11 +58,11 @@ class Aggregator(object): ...@@ -49,11 +58,11 @@ class Aggregator(object):
:param report_period: the report period in seconds :param report_period: the report period in seconds
""" """
print("Connecting to Influx database {0} with URL {1}".format(database_name, database_url)) log.info("Connecting to Influx database {0} with URL {1}".format(database_name, database_url))
# initialise a database client using the database url and the database name # initialise a database client using the database url and the database name
url_object = urlparse(database_url) url_object = urlparse(database_url)
self.db_client = InfluxDBClient(host=url_object.hostname, port=url_object.port, database=database_name, timeout=10) self.db_client = InfluxDBClient(host=url_object.hostname, port=url_object.port, database=database_name, timeout=10)
print("Successfully connected to Influx database {0} with URL {1}".format(database_name, database_url)) log.info("Successfully connected to Influx database {0} with URL {1}".format(database_name, database_url))
self.db_url = database_url self.db_url = database_url
self.db_name = database_name self.db_name = database_name
...@@ -71,7 +80,7 @@ class Aggregator(object): ...@@ -71,7 +80,7 @@ class Aggregator(object):
Stop the aggregator from running. Stop the aggregator from running.
""" """
print("Aggregator's stop flag has been set.") log.info("Aggregator's stop flag has been set.")
self._stop_flag.set() self._stop_flag.set()
def run(self): def run(self):
...@@ -79,10 +88,12 @@ class Aggregator(object): ...@@ -79,10 +88,12 @@ class Aggregator(object):
Performs the functionality of the aggregator - query data from both measurements merge that data and post it back in influx every 5 seconds. Performs the functionality of the aggregator - query data from both measurements merge that data and post it back in influx every 5 seconds.
""" """
print("Aggregator started running.") log.info("Aggregator started running.")
current_time = int(time()) current_time = int(time())
while not self._stop_flag.is_set(): while not self._stop_flag.is_set():
log.info("Trying to generate an E2E measurement.")
boundary_time = current_time - self.report_period boundary_time = current_time - self.report_period
boundary_time_nano = boundary_time * 1000000000 boundary_time_nano = boundary_time * 1000000000
...@@ -160,7 +171,9 @@ class Aggregator(object): ...@@ -160,7 +171,9 @@ class Aggregator(object):
e2e_arguments['delay_service'], e2e_arguments['delay_service'],
e2e_arguments["avg_request_size"], e2e_arguments['avg_response_size'], e2e_arguments['avg_bandwidth'], e2e_arguments["avg_request_size"], e2e_arguments['avg_response_size'], e2e_arguments['avg_bandwidth'],
e2e_arguments['time'])) e2e_arguments['time']))
print("Successfully generated an E2E measurement and posted back to Influx.") log.info("Successfully generated an E2E measurement and posted back to Influx.")
else:
log.info("Couldn't generate an E2E measurement although some of the data could be fetched.")
old_timestamp = current_time old_timestamp = current_time
# wait until {report_period) seconds have passed # wait until {report_period) seconds have passed
...@@ -168,7 +181,7 @@ class Aggregator(object): ...@@ -168,7 +181,7 @@ class Aggregator(object):
sleep(1) sleep(1)
current_time = int(time()) current_time = int(time())
print("Aggregator stopped running.") log.info("Aggregator stopped running.")
if __name__ == '__main__': if __name__ == '__main__':
...@@ -192,7 +205,7 @@ if __name__ == '__main__': ...@@ -192,7 +205,7 @@ if __name__ == '__main__':
Aggregator(database_name=arg_database_name, database_url=arg_database_url, report_period=arg_period).run() Aggregator(database_name=arg_database_name, database_url=arg_database_url, report_period=arg_period).run()
# print the error messages in case of a parse error # log.info the error messages in case of a parse error
except getopt.GetoptError as err: except getopt.GetoptError as err:
print(err) log.info(err)
print('Parse error; run the script using the following format: python aggregator.py -p <seconds> -d <database name> -u <database url>') log.info('Parse error; run the script using the following format: python aggregator.py -p <seconds> -d <database name> -u <database url>')
...@@ -31,6 +31,10 @@ from clmcservice.utilities import validate_config_content, validate_action_conte ...@@ -31,6 +31,10 @@ from clmcservice.utilities import validate_config_content, validate_action_conte
import os import os
import os.path import os.path
import sys import sys
import logging
log = logging.getLogger(__name__)
@view_defaults(route_name='aggregator_config', renderer='json') @view_defaults(route_name='aggregator_config', renderer='json')
...@@ -181,7 +185,7 @@ class AggregatorController(object): ...@@ -181,7 +185,7 @@ class AggregatorController(object):
config.get('aggregator_database_name'), '--url', config.get('aggregator_database_url')] config.get('aggregator_database_name'), '--url', config.get('aggregator_database_url')]
process = Popen(command, cwd=dir_path) process = Popen(command, cwd=dir_path)
print("\nStarted aggregator process with PID: {0}\n".format(process.pid)) log.info("\nStarted aggregator process with PID: {0}\n".format(process.pid))
return process return process
...@@ -196,7 +200,7 @@ class AggregatorController(object): ...@@ -196,7 +200,7 @@ class AggregatorController(object):
# check if the process is started # check if the process is started
if AggregatorController.is_process_running(process): if AggregatorController.is_process_running(process):
process.terminate() process.terminate()
print("\nStopped aggregator process with PID: {0}\n".format(process.pid)) log.info("\nStopped aggregator process with PID: {0}\n".format(process.pid))
@staticmethod @staticmethod
def is_process_running(process): def is_process_running(process):
...@@ -254,7 +258,7 @@ class RoundTripTimeQuery(object): ...@@ -254,7 +258,7 @@ class RoundTripTimeQuery(object):
db_client = InfluxDBClient(host=url_object.hostname, port=url_object.port, database=influx_db_name, timeout=10) db_client = InfluxDBClient(host=url_object.hostname, port=url_object.port, database=influx_db_name, timeout=10)
query = 'SELECT mean(*) FROM "{0}"."autogen"."e2e_delays" WHERE time >= {1} and time < {2} and sf_instance = \'{3}\''.format( query = 'SELECT mean(*) FROM "{0}"."autogen"."e2e_delays" WHERE time >= {1} and time < {2} and sf_instance = \'{3}\''.format(
influx_db_name, start_timestamp, end_timestamp, media_service) influx_db_name, start_timestamp, end_timestamp, media_service)
print(query) log.info(query)
result = db_client.query(query) result = db_client.query(query)
actual_result = next(result.get_points(), None) actual_result = next(result.get_points(), None)
......
...@@ -39,14 +39,14 @@ listen = localhost:9080 ...@@ -39,14 +39,14 @@ listen = localhost:9080
keys = root, clmcservice keys = root, clmcservice
[handlers] [handlers]
keys = console keys = console, filelog
[formatters] [formatters]
keys = generic keys = generic
[logger_root] [logger_root]
level = INFO level = INFO
handlers = console handlers = console, filelog
[logger_clmcservice] [logger_clmcservice]
level = DEBUG level = DEBUG
...@@ -59,5 +59,11 @@ args = (sys.stderr,) ...@@ -59,5 +59,11 @@ args = (sys.stderr,)
level = NOTSET level = NOTSET
formatter = generic formatter = generic
[handler_filelog]
class = FileHandler
args = ('/var/log/clmcservice/service.log','a')
level = INFO
formatter = generic
[formatter_generic] [formatter_generic]
format = %(asctime)s %(levelname)-5.5s [%(name)s:%(lineno)s][%(threadName)s] %(message)s format = %(asctime)s %(levelname)-5.5s [%(name)s:%(lineno)s][%(threadName)s] %(message)s
...@@ -34,14 +34,14 @@ listen = *:9080 ...@@ -34,14 +34,14 @@ listen = *:9080
keys = root, clmcservice keys = root, clmcservice
[handlers] [handlers]
keys = console keys = console, filelog
[formatters] [formatters]
keys = generic keys = generic
[logger_root] [logger_root]
level = WARN level = WARN
handlers = console handlers = console, filelog
[logger_clmcservice] [logger_clmcservice]
level = WARN level = WARN
...@@ -54,5 +54,11 @@ args = (sys.stderr,) ...@@ -54,5 +54,11 @@ args = (sys.stderr,)
level = NOTSET level = NOTSET
formatter = generic formatter = generic
[handler_filelog]
class = FileHandler
args = ('/var/log/clmcservice/service.log','a')
level = INFO
formatter = generic
[formatter_generic] [formatter_generic]
format = %(asctime)s %(levelname)-5.5s [%(name)s:%(lineno)s][%(threadName)s] %(message)s format = %(asctime)s %(levelname)-5.5s [%(name)s:%(lineno)s][%(threadName)s] %(message)s
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment