logview: migrate optparse to argparse, add additional date filtering capabilities, and some pep8 fixes

This commit is contained in:
Francois Andrieu 2020-10-28 16:45:29 +01:00
parent 88b07454b7
commit 5e91005e93
2 changed files with 110 additions and 71 deletions

View file

@ -15,6 +15,10 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = r''' DOCUMENTATION = r'''
callback: logdetail callback: logdetail
callback_type: notification callback_type: notification
@ -48,9 +52,10 @@ except ImportError:
# Ansible v1 compat # Ansible v1 compat
CallbackBase = object CallbackBase = object
TIME_FORMAT="%b %d %Y %H:%M:%S" TIME_FORMAT = "%b %d %Y %H:%M:%S"
MSG_FORMAT = "%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
MSG_FORMAT="%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
def getlogin(): def getlogin():
try: try:
@ -59,6 +64,7 @@ def getlogin():
user = pwd.getpwuid(os.geteuid())[0] user = pwd.getpwuid(os.geteuid())[0]
return user return user
class LogMech(object): class LogMech(object):
def __init__(self, logpath): def __init__(self, logpath):
self.started = time.time() self.started = time.time()
@ -91,13 +97,13 @@ class LogMech(object):
def logpath_play(self): def logpath_play(self):
# this is all to get our path to look nice ish # this is all to get our path to look nice ish
tstamp = time.strftime('%Y/%m/%d/%H.%M.%S', time.localtime(self.started)) tstamp = time.strftime('%Y/%m/%d/%H.%M.%S', time.localtime(self.started))
path = os.path.normpath(self.logpath + '/' + self.playbook_id + '/' + tstamp + '/') path = os.path.normpath(self.logpath + '/' + self.playbook_id + '/' + tstamp + '/')
if not os.path.exists(path): if not os.path.exists(path):
try: try:
os.makedirs(path) os.makedirs(path)
except OSError as e: except OSError as e:
if e.errno != 17: # if it is not dir exists then raise it up if e.errno != 17: # if it is not dir exists then raise it up
raise raise
return path return path
@ -132,18 +138,17 @@ class LogMech(object):
host = 'HOSTMISSING' host = 'HOSTMISSING'
if type(data) == dict: if type(data) == dict:
name = data.get('module_name',None) name = data.get('module_name', None)
else: else:
name = "unknown" name = "unknown"
# we're in setup - move the invocation info up one level # we're in setup - move the invocation info up one level
if 'invocation' in data: if 'invocation' in data:
invoc = data['invocation'] invoc = data['invocation']
if not name and 'module_name' in invoc: if not name and 'module_name' in invoc:
name = invoc['module_name'] name = invoc['module_name']
#don't add this since it can often contain complete passwords :( # don't add this since it can often contain complete passwords :(
del(data['invocation']) del(data['invocation'])
if task: if task:
@ -160,7 +165,7 @@ class LogMech(object):
if self.play_info.get('check', False) and self.play_info.get('diff', False): if self.play_info.get('check', False) and self.play_info.get('diff', False):
category = 'CHECK_DIFF:' + category category = 'CHECK_DIFF:' + category
elif self.play_info.get('check', False): elif self.play_info.get('check', False):
category = 'CHECK:' + category category = 'CHECK:' + category
# Sometimes this is None.. othertimes it's fine. Othertimes it has # Sometimes this is None.. othertimes it's fine. Othertimes it has
@ -175,7 +180,6 @@ class LogMech(object):
fd.close() fd.close()
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
""" """
logs playbook results, per host, in /var/log/ansible/hosts logs playbook results, per host, in /var/log/ansible/hosts
@ -227,7 +231,8 @@ class CallbackModule(CallbackBase):
def v2_playbook_on_task_start(self, task, is_conditional): def v2_playbook_on_task_start(self, task, is_conditional):
self.task = task self.task = task
self.task._name = task.name if self.task:
self.task._name = task.get_name().strip()
self.logmech._last_task_start = time.time() self.logmech._last_task_start = time.time()
self._task_count += 1 self._task_count += 1
@ -264,8 +269,9 @@ class CallbackModule(CallbackBase):
pb_info['extra_vars'] = play._variable_manager.extra_vars pb_info['extra_vars'] = play._variable_manager.extra_vars
pb_info['inventory'] = play._variable_manager._inventory._sources pb_info['inventory'] = play._variable_manager._inventory._sources
pb_info['playbook_checksum'] = secure_hash(path) pb_info['playbook_checksum'] = secure_hash(path)
pb_info['check'] = self.play_context.check_mode if hasattr(self, "play_context"):
pb_info['diff'] = self.play_context.diff pb_info['check'] = self.play_context.check_mode
pb_info['diff'] = self.play_context.diff
self.logmech.play_log(json.dumps(pb_info, indent=4)) self.logmech.play_log(json.dumps(pb_info, indent=4))
self._play_count += 1 self._play_count += 1
@ -273,17 +279,17 @@ class CallbackModule(CallbackBase):
info = {} info = {}
info['play'] = play.name info['play'] = play.name
info['hosts'] = play.hosts info['hosts'] = play.hosts
info['transport'] = str(self.play_context.connection)
info['number'] = self._play_count info['number'] = self._play_count
info['check'] = self.play_context.check_mode if hasattr(self, "play_context"):
info['diff'] = self.play_context.diff info['transport'] = str(self.play_context.connection)
info['check'] = self.play_context.check_mode
info['diff'] = self.play_context.diff
self.logmech.play_info = info self.logmech.play_info = info
try: try:
self.logmech.play_log(json.dumps(info, indent=4)) self.logmech.play_log(json.dumps(info, indent=4))
except TypeError: except TypeError:
print(("Failed to conver to JSON:", info)) print(("Failed to conver to JSON:", info))
def v2_playbook_on_stats(self, stats): def v2_playbook_on_stats(self, stats):
results = {} results = {}
for host in list(stats.processed.keys()): for host in list(stats.processed.keys()):
@ -292,5 +298,3 @@ class CallbackModule(CallbackBase):
self.logmech.play_log(json.dumps({'stats': results}, indent=4)) self.logmech.play_log(json.dumps({'stats': results}, indent=4))
self.logmech.play_log(json.dumps({'playbook_end': time.time()}, indent=4)) self.logmech.play_log(json.dumps({'playbook_end': time.time()}, indent=4))
print(('logs written to: %s' % self.logmech.logpath_play)) print(('logs written to: %s' % self.logmech.logpath_play))

View file

@ -1,16 +1,23 @@
#!/usr/bin/python3 #!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: et ts=4 ai sw=4 sts=0 # vim: et ts=4 ai sw=4 sts=0
import sys import sys
import json import json
from optparse import OptionParser from argparse import ArgumentParser
import os import os
import re
import glob import glob
import gzip import gzip
from datetime import datetime, date, timedelta from datetime import datetime, timedelta
import dateutil.parser as dateparser import dateutil.parser as dateparser
import configparser try:
# Python3
import configparser
except ImportError:
# Python2
import ConfigParser as configparser
from ansible.config.manager import find_ini_config_file from ansible.config.manager import find_ini_config_file
from ansible.utils.color import hostcolor, stringc from ansible.utils.color import stringc
from ansible import constants as C from ansible import constants as C
from collections import Counter from collections import Counter
@ -19,11 +26,13 @@ if not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
else: else:
HAS_COLOR = True HAS_COLOR = True
logpath = '/var/log/ansible' DEFAULT_LOGPATH = '/var/log/ansible'
default_search_terms = ['CHANGED', 'FAILED'] default_search_terms = ['CHANGED', 'FAILED']
date_terms = { date_terms = {
"today": lambda: datetime.today().replace(hour=0, minute=0, second=0, microsecond=0), "today": lambda: datetime.today().replace(
"yesterday": lambda: datetime.today().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(1), hour=0, minute=0, second=0, microsecond=0),
"yesterday": lambda: datetime.today().replace(
hour=0, minute=0, second=0, microsecond=0) - timedelta(1),
} }
@ -44,7 +53,7 @@ def colorByCat(category, txt=None):
# This hack make sure the text width is the same as any other colored text # This hack make sure the text width is the same as any other colored text
color_out = u'\x1b[0;00m%s\x1b[0m' % (txt,) color_out = u'\x1b[0;00m%s\x1b[0m' % (txt,)
if not HAS_COLOR: if not HAS_COLOR:
color_out = txt color_out = txt
return color_out return color_out
@ -58,12 +67,14 @@ def colorByStats(txt, stats):
else: else:
return stringc(txt, C.COLOR_OK) return stringc(txt, C.COLOR_OK)
def colorByCount(txt, count, color): def colorByCount(txt, count, color):
s = "%s%s" % (txt, count) s = "%s%s" % (txt, count)
if count > 0 and HAS_COLOR: if count > 0 and HAS_COLOR:
s = stringc(s, color) s = stringc(s, color)
return s return s
def parse_info(infofile): def parse_info(infofile):
data = {} data = {}
with open(infofile) as f: with open(infofile) as f:
@ -87,19 +98,26 @@ def format_stats(stats):
colorByCount("unr:", stats['unreachable'], C.COLOR_UNREACHABLE), colorByCount("unr:", stats['unreachable'], C.COLOR_UNREACHABLE),
colorByCount("fail:", stats['failures'], C.COLOR_ERROR)) colorByCount("fail:", stats['failures'], C.COLOR_ERROR))
def col_width(rows): def col_width(rows):
widths = [] widths = []
for col in zip(*(rows)): for col in zip(*(rows)):
col_width = max(map(len,col)) col_width = max(map(len, col))
widths.append(col_width) widths.append(col_width)
widths[-1] = 0 # don't pad last column widths[-1] = 0 # don't pad last column
return widths return widths
def date_cheat(datestr): def date_cheat(datestr):
dc = date_terms.get(datestr, lambda: dateparser.parse(datestr)) dc = date_terms.get(datestr, lambda: dateparser.parse(datestr))
return dc() return dc()
def date_from_path(path):
date_comp = re.search(r'/(\d{4})/(\d{2})/(\d{2})', path)
return datetime(*map(int, date_comp.groups()))
def parse_args(args): def parse_args(args):
usage = """ usage = """
logview [options] [-d datestr] [-p playbook] logview [options] [-d datestr] [-p playbook]
@ -114,53 +132,56 @@ def parse_args(args):
logview -s ANY -d yesterday -p mirrorlist # list all events from the mirrorlist playbook logview -s ANY -d yesterday -p mirrorlist # list all events from the mirrorlist playbook
""" """
parser = OptionParser(usage=usage) parser = ArgumentParser(usage=usage)
parser.add_option("-d", default='today', dest='datestr', help="time string of when you want logs") date_group = parser.add_mutually_exclusive_group()
parser.add_option("-p", default='*', dest='playbook', help="the playbook you want to look for") date_group.add_argument("-d", default='today', dest='datestr', help="display logs from specified date")
parser.add_option("-H", default=[], dest='hostname', action='append', help="Limit to the specified hostname") date_group.add_argument("--since", dest="since", help="display logs since specified date")
parser.add_option("-m", default=False, dest='message', action='store_true', help='Show tasks output') date_group.add_argument("--all", default=False, dest="list_all", action='store_true', help="display all logs")
parser.add_option("-v", default=False, dest='verbose', action='store_true', help='Verbose') parser.add_argument("-p", default='*', dest='playbook', help="the playbook you want to look for")
parser.add_option("-s", default=[], dest='search_terms', action='append', help="status to search for") parser.add_argument("-H", default=[], dest='hostname', action='append', help="Limit to the specified hostname")
parser.add_option("-l", default=False, dest="list_pb", action='store_true', help="list playbooks for a specific date") parser.add_argument("-m", default=False, dest='message', action='store_true', help='Show tasks output')
parser.add_option("-L", default=False, dest="list_all_pb", action='store_true', help="list all playbooks ever ran") parser.add_argument("-v", default=False, dest='verbose', action='store_true', help='Verbose')
parser.add_option("--profile", default=False, dest="profile", action='store_true', help="output timing input per task") parser.add_argument("-s", default=[], dest='search_terms', action='append', help="status to search for")
(opts, args) = parser.parse_args(args) parser.add_argument("-l", default=False, dest="list_pb", action='store_true', help="list playbook runs")
parser.add_argument("--profile", default=False, dest="profile", action='store_true', help="output timing input per task")
opts = parser.parse_args(args)
opts.datestr = date_cheat(opts.datestr) opts.datestr = date_cheat(opts.datestr)
if not opts.search_terms: if not opts.search_terms:
opts.search_terms = default_search_terms opts.search_terms = default_search_terms
if opts.since:
opts.since = date_cheat(opts.since)
opts.search_terms = list(map(str.upper, opts.search_terms)) opts.search_terms = list(map(str.upper, opts.search_terms))
return opts, args return opts
def search_logs(opts, logfiles): def search_logs(opts, logfiles):
rows = [] rows = []
headers = []
msg = '' msg = ''
for fn in sorted(logfiles): for fn in sorted(logfiles):
hostname = os.path.basename(fn).replace('.log', '').replace('.gz', '') hostname = os.path.basename(fn).replace('.log', '').replace('.gz', '')
timestamp = os.path.basename(os.path.dirname(fn)) timestamp = os.path.basename(os.path.dirname(fn))
if opts.hostname and hostname not in opts.hostname: if opts.hostname and hostname not in opts.hostname:
continue continue
try: try:
with gzip.open(fn) as f: with gzip.open(fn) as f:
f.read() f.read()
open_f = gzip.open(fn, "rt") open_f = gzip.open(fn, "rt")
except: except IOError:
open_f = open(fn) open_f = open(fn)
for line in open_f: for line in open_f:
things = line.split('\t') things = line.split('\t')
if len(things) < 5: if len(things) < 5:
print("(logview error - unhandled line): %r\n" % line) msg += "(logview error - unhandled line): %r\n" % line
continue continue
# See callback_plugins/logdetail.py for how these lines get created. # See callback_plugins/logdetail.py for how these lines get created.
# MSG_FORMAT="%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n" # MSG_FORMAT="%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
task_ts, count, category, name, data = things task_ts, count, category, name, data = things
if category in opts.search_terms or 'ANY' in opts.search_terms: if category in opts.search_terms or 'ANY' in opts.search_terms:
dur = None dur = None
last_col = "" last_col = ""
@ -170,7 +191,7 @@ def search_logs(opts, logfiles):
end = slurp.get('task_end', 0) end = slurp.get('task_end', 0)
if st and end: if st and end:
dur = '%.2fs' % (float(end) - float(st)) dur = '%.2fs' % (float(end) - float(st))
state = colorByCat(category) state = colorByCat(category)
c_hostname = colorByCat(category, hostname) c_hostname = colorByCat(category, hostname)
@ -180,11 +201,14 @@ def search_logs(opts, logfiles):
c_hostname = colorByStats(hostname, slurp) c_hostname = colorByStats(hostname, slurp)
state = colorByStats(category, slurp) state = colorByStats(category, slurp)
result = [timestamp, c_hostname, task_ts, count, state] result = [timestamp, c_hostname, task_ts, count, state]
if not name: if not name:
name = slurp.get("task_module") name = slurp.get("task_module")
try:
name = name.decode('utf8')
except AttributeError:
pass
result.append(name) result.append(name)
if dur: if dur:
@ -192,18 +216,18 @@ def search_logs(opts, logfiles):
if not opts.verbose: if not opts.verbose:
if type(slurp) == dict: if type(slurp) == dict:
for term in ['cmd',]: for term in ['cmd', ]:
if term in slurp: if term in slurp:
last_col += '\t%s:%s' % (term, slurp.get(term, None)) last_col += '\t%s:%s' % (term, slurp.get(term, None))
if opts.message: if opts.message:
for term in ['msg', 'stdout']: for term in ['msg', 'stdout']:
if term in slurp: if term in slurp:
value = slurp.get(term, None) value = slurp.get(term, None)
if type(value) is list: if type(value) is list:
value = "\n".join(value) value = "\n".join(value)
if value: if value:
last_col += '\n%s: %s\n' % (term, colorByCat(category, value.strip())) last_col += '\n%s: %s\n' % (term, colorByCat(category, value.strip()))
else: else:
last_col += '\n' last_col += '\n'
last_col += json.dumps(slurp, indent=4) last_col += json.dumps(slurp, indent=4)
@ -211,7 +235,7 @@ def search_logs(opts, logfiles):
result.append(last_col) result.append(last_col)
rows.append(result) rows.append(result)
return rows return rows
@ -220,21 +244,26 @@ def main(args):
if cfg: if cfg:
cp = configparser.ConfigParser() cp = configparser.ConfigParser()
cp.read(cfg) cp.read(cfg)
logpath = cp.get('callback_logdetail', "log_path", fallback="/var/log/ansible") try:
opts, args = parse_args(args) logpath = cp.get('callback_logdetail', "log_path")
except configparser.NoSectionError:
logpath = DEFAULT_LOGPATH
opts = parse_args(args)
rows = [] rows = []
# List play summary # List play summary
if opts.list_pb or opts.list_all_pb: if opts.list_pb:
rows.append([ "Date", colorByCat("", "Playbook"), "Ran By", "Hosts", "Stats"]) rows.append(["Date", colorByCat("", "Playbook"), "Ran By", "Hosts", "Stats"])
for r,d,f in os.walk(logpath): for r, d, f in os.walk(logpath):
if opts.since and f and date_from_path(r) < opts.since:
continue
for file in f: for file in f:
if file.endswith('.info'): if file.endswith('.info'):
pb = parse_info(os.path.join(r,file)) pb = parse_info(os.path.join(r, file))
pb_name = os.path.splitext(os.path.basename(pb['playbook']))[0] pb_name = os.path.splitext(os.path.basename(pb['playbook']))[0]
pb_date = datetime.fromtimestamp(pb['playbook_start']) pb_date = datetime.fromtimestamp(pb['playbook_start'])
if ( if (
opts.list_all_pb opts.list_all or opts.since
or ( or (
opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0) opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0)
and opts.datestr == pb_date) and opts.datestr == pb_date)
@ -253,8 +282,7 @@ def main(args):
summary = format_stats(stats) summary = format_stats(stats)
# summary = "ok:%s chd:%s unr:%s faild:%s" % (stats['ok'], stats['changed'], stats['unreachable'], stats['failures']) # summary = "ok:%s chd:%s unr:%s faild:%s" % (stats['ok'], stats['changed'], stats['unreachable'], stats['failures'])
rows.append([str(pb_date), pb_name, pb['userid'], str(host_count), summary])
rows.append([ str(pb_date), pb_name, pb['userid'], str(host_count), summary ])
m_widths = col_width(rows) m_widths = col_width(rows)
if len(rows) <= 1: if len(rows) <= 1:
@ -263,12 +291,19 @@ def main(args):
for row in rows: for row in rows:
print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))).strip()) print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))).strip())
# Play detail # Play detail
else: else:
for pb in glob.glob(os.path.join(logpath, opts.playbook)): for pb in glob.glob(os.path.join(logpath, opts.playbook)):
pb_name = os.path.basename(pb) pb_name = os.path.basename(pb)
for pb_logdir in glob.glob(os.path.join(pb, opts.datestr.strftime("%Y/%m/%d"))): if opts.list_all or opts.since:
date_glob = glob.glob(os.path.join(pb, "*/*/*"))
else:
date_glob = glob.glob(os.path.join(pb, opts.datestr.strftime("%Y/%m/%d")))
for pb_logdir in date_glob:
if opts.since:
run_date = date_from_path(pb_logdir)
if run_date < opts.since:
continue
if opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0): if opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0):
logfiles = glob.glob(pb_logdir + '/' + opts.datestr.strftime("%H.%M.%S") + '/*.log*') logfiles = glob.glob(pb_logdir + '/' + opts.datestr.strftime("%H.%M.%S") + '/*.log*')
else: else:
@ -276,11 +311,11 @@ def main(args):
rows = search_logs(opts, logfiles) rows = search_logs(opts, logfiles)
if rows: if rows:
m_widths = col_width(rows) m_widths = col_width(rows)
print(pb_name) print("%s\n-------" % (pb_name,))
for row in rows: for row in rows:
print(" ".join((val.ljust(width) for val, width in zip(row, m_widths)))) print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))))
#print(pb_name) print("")
#print(msg)
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))