logview: playbooks stats, tabular display, improve date filtering and some more

Signed-off-by: Francois Andrieu <darknao@fedoraproject.org>
This commit is contained in:
Francois Andrieu 2020-08-28 18:04:39 +02:00
parent 6d1d72ee20
commit 80f3f3b574

View file

@ -1,4 +1,4 @@
#!/usr/bin/python #!/usr/bin/python3
# vim: et ts=4 ai sw=4 sts=0 # vim: et ts=4 ai sw=4 sts=0
import sys import sys
import json import json
@ -10,43 +10,60 @@ from datetime import datetime, date, timedelta
import dateutil.parser as dateparser import dateutil.parser as dateparser
import configparser import configparser
from ansible.config.manager import find_ini_config_file from ansible.config.manager import find_ini_config_file
from ansible.utils.color import hostcolor, stringc
from ansible import constants as C
from collections import Counter from collections import Counter
if not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
HAS_COLOR = False
else:
HAS_COLOR = True
logpath = '/var/log/ansible' logpath = '/var/log/ansible'
search_terms = ['CHANGED', 'FAILED'] default_search_terms = ['CHANGED', 'FAILED']
date_terms = { date_terms = {
"today": date.today, "today": lambda: datetime.today().replace(hour=0, minute=0, second=0, microsecond=0),
"yesterday": lambda: date.today() - timedelta(1), "yesterday": lambda: datetime.today().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(1),
} }
class bcolors:
BLUE = '\033[94m'
GREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
ERROR = '\033[41m'
WHITE = '\033[00m'
def colorByCat(category, txt=None):
def format_state(category, txt=None):
if not txt: if not txt:
txt = category txt = category
if category == "OK": if 'OK' in category:
color_out = bcolors.GREEN + txt + bcolors.ENDC color_out = stringc(txt, C.COLOR_OK)
elif category == "FAILED": elif "FAILED" in category:
color_out = bcolors.FAIL + txt + bcolors.ENDC color_out = stringc(txt, C.COLOR_ERROR)
elif category == "CHANGED": elif "CHANGED" in category:
color_out = bcolors.WARNING + txt + bcolors.ENDC color_out = stringc(txt, C.COLOR_CHANGED)
elif category == "SKIPPED": elif "SKIPPED" in category:
color_out = bcolors.BLUE + txt + bcolors.ENDC color_out = stringc(txt, C.COLOR_SKIP)
elif category == "UNREACHABLE": elif "UNREACHABLE" in category:
color_out = bcolors.ERROR + txt + bcolors.ENDC color_out = stringc(txt, C.COLOR_UNREACHABLE)
else: else:
color_out = bcolors.WHITE + txt + bcolors.ENDC # This hack make sure the text width is the same as any other colored text
color_out = u'\x1b[0;00m%s\x1b[0m' % (txt,)
if not HAS_COLOR:
color_out = txt
return color_out return color_out
def colorByStats(txt, stats):
if stats['failures'] != 0:
return stringc(txt, C.COLOR_ERROR)
elif stats['unreachable'] != 0:
return stringc(txt, C.COLOR_UNREACHABLE)
elif stats['changed'] != 0:
return stringc(txt, C.COLOR_CHANGED)
else:
return stringc(txt, C.COLOR_OK)
def colorByCount(txt, count, color):
s = "%s%s" % (txt, count)
if count > 0 and HAS_COLOR:
s = stringc(s, color)
return s
def parse_info(infofile): def parse_info(infofile):
data = {} data = {}
with open(infofile) as f: with open(infofile) as f:
@ -63,6 +80,21 @@ def parse_info(infofile):
return data return data
def format_stats(stats):
return "%s %s %s %s" % (
colorByCount("ok:", stats['ok'], C.COLOR_OK),
colorByCount("chg:", stats['changed'], C.COLOR_CHANGED),
colorByCount("unr:", stats['unreachable'], C.COLOR_UNREACHABLE),
colorByCount("fail:", stats['failures'], C.COLOR_ERROR))
def col_width(rows):
widths = []
for col in zip(*(rows)):
col_width = max(map(len,col))
widths.append(col_width)
widths[-1] = 0 # don't pad last column
return widths
def date_cheat(datestr): def date_cheat(datestr):
dc = date_terms.get(datestr, lambda: dateparser.parse(datestr)) dc = date_terms.get(datestr, lambda: dateparser.parse(datestr))
return dc() return dc()
@ -96,11 +128,14 @@ def parse_args(args):
opts.datestr = date_cheat(opts.datestr) opts.datestr = date_cheat(opts.datestr)
if not opts.search_terms: if not opts.search_terms:
opts.search_terms = search_terms opts.search_terms = default_search_terms
opts.search_terms = list(map(str.upper, opts.search_terms))
return opts, args return opts, args
def search_logs(opts, logfiles): def search_logs(opts, logfiles):
rows = []
headers = []
msg = '' msg = ''
for fn in sorted(logfiles): for fn in sorted(logfiles):
hostname = os.path.basename(fn).replace('.log', '').replace('.gz', '') hostname = os.path.basename(fn).replace('.log', '').replace('.gz', '')
@ -119,7 +154,7 @@ def search_logs(opts, logfiles):
for line in open_f: for line in open_f:
things = line.split('\t') things = line.split('\t')
if len(things) < 5: if len(things) < 5:
msg += "(logview error - unhandled line): %r\n" % line print("(logview error - unhandled line): %r\n" % line)
continue continue
# See callback_plugins/logdetail.py for how these lines get created. # See callback_plugins/logdetail.py for how these lines get created.
@ -128,6 +163,7 @@ def search_logs(opts, logfiles):
if category in opts.search_terms or 'ANY' in opts.search_terms: if category in opts.search_terms or 'ANY' in opts.search_terms:
dur = None dur = None
last_col = ""
slurp = json.loads(data) slurp = json.loads(data)
if opts.profile: if opts.profile:
st = slurp.get('task_start', 0) st = slurp.get('task_start', 0)
@ -135,52 +171,48 @@ def search_logs(opts, logfiles):
if st and end: if st and end:
dur = '%.2fs' % (float(end) - float(st)) dur = '%.2fs' % (float(end) - float(st))
state = format_state(category) state = colorByCat(category)
c_hostname = format_state(category, hostname) c_hostname = colorByCat(category, hostname)
if category == "STATS": if "STATS" in category:
if type(slurp) == dict: if type(slurp) == dict:
# ok=2 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0 name = format_stats(slurp)
name = 'o:%s c:%s u:%s f:%s s:%s r:%s i:%s' % ( c_hostname = colorByStats(hostname, slurp)
slurp.get("ok", 0), state = colorByStats(category, slurp)
slurp.get("changed", 0),
slurp.get("unreachable", 0),
slurp.get("failures", 0), result = [timestamp, c_hostname, task_ts, count, state]
slurp.get("skipped", 0),
slurp.get("rescued", 0),
slurp.get("ignored", 0))
if not name: if not name:
name = slurp.get("task_module") name = slurp.get("task_module")
result.append(name)
msg += '{0: <10} {1: <30} {2: <22} {3: <4} {4: <25}'.format(
timestamp, c_hostname, task_ts, count, state)
if dur: if dur:
msg += ' {0: <8}'.format(dur,) last_col += "%s " % (dur,)
if name:
msg += ' %s' % (name,)
if not opts.verbose: if not opts.verbose:
if type(slurp) == dict: if type(slurp) == dict:
for term in ['cmd',]: for term in ['cmd',]:
if term in slurp: if term in slurp:
msg += '\t%s:%s' % (term, slurp.get(term, None)) last_col += '\t%s:%s' % (term, slurp.get(term, None))
if opts.message: if opts.message:
for term in ['msg',]: for term in ['msg', 'stdout']:
if term in slurp: if term in slurp:
value = slurp.get(term, None).strip() value = slurp.get(term, None)
if type(value) is list:
value = "\n".join(value)
if value: if value:
msg += '\n%s: %s\n' % (term, format_state(category, value)) last_col += '\n%s: %s\n' % (term, colorByCat(category, value.strip()))
msg += '\n'
else: else:
msg += '\n' last_col += '\n'
msg += json.dumps(slurp, indent=4) last_col += json.dumps(slurp, indent=4)
msg += '\n' last_col += '\n'
result.append(last_col)
rows.append(result)
return msg return rows
def main(args): def main(args):
@ -190,36 +222,65 @@ def main(args):
cp.read(cfg) cp.read(cfg)
logpath = cp.get('callback_logdetail', "log_path", fallback="/var/log/ansible") logpath = cp.get('callback_logdetail', "log_path", fallback="/var/log/ansible")
opts, args = parse_args(args) opts, args = parse_args(args)
rows = []
# List play summary
if opts.list_pb or opts.list_all_pb: if opts.list_pb or opts.list_all_pb:
rows.append([ "Date", colorByCat("", "Playbook"), "Ran By", "Hosts", "Stats"])
for r,d,f in os.walk(logpath): for r,d,f in os.walk(logpath):
for file in f: for file in f:
if file.endswith('.info'): if file.endswith('.info'):
pb = parse_info(os.path.join(r,file)) pb = parse_info(os.path.join(r,file))
pb_name = os.path.splitext(os.path.basename(pb['playbook']))[0] pb_name = os.path.splitext(os.path.basename(pb['playbook']))[0]
pb_date = datetime.fromtimestamp(pb['playbook_start']) pb_date = datetime.fromtimestamp(pb['playbook_start'])
if opts.list_all_pb or opts.datestr == pb_date.date(): if (
opts.list_all_pb
or (
opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0)
and opts.datestr == pb_date)
or (
opts.datestr == opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0)
and opts.datestr.date() == pb_date.date())):
stats = Counter() stats = Counter()
for stat in pb['stats'].values(): hosts = []
del stat['task_userid'] if "stats" in pb:
stats += Counter(stat) for host, stat in pb['stats'].items():
del stat['task_userid']
print("%s\tplaybook:%s\tran by:%s\tsummary: ok:%s chd:%s unr:%s faild:%s" stats += Counter(stat)
% (pb_date, pb_name, pb['userid'], hosts.append(host)
stats['ok'], stats['changed'], stats['unreachable'], host_count = len(set(hosts))
stats['failures'])) pb_name = colorByStats(pb_name, stats)
summary = format_stats(stats)
# summary = "ok:%s chd:%s unr:%s faild:%s" % (stats['ok'], stats['changed'], stats['unreachable'], stats['failures'])
rows.append([ str(pb_date), pb_name, pb['userid'], str(host_count), summary ])
m_widths = col_width(rows)
if len(rows) <= 1:
print("no log")
else:
for row in rows:
print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))).strip())
# Play detail
else: else:
for pb in glob.glob(os.path.join(logpath, opts.playbook)): for pb in glob.glob(os.path.join(logpath, opts.playbook)):
pb_name = os.path.basename(pb) pb_name = os.path.basename(pb)
for pb_logdir in glob.glob(os.path.join(pb, opts.datestr.strftime("%Y/%m/%d"))): for pb_logdir in glob.glob(os.path.join(pb, opts.datestr.strftime("%Y/%m/%d"))):
logfiles = glob.glob(pb_logdir + '/*/*.log*') if opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0):
msg = search_logs(opts, logfiles) logfiles = glob.glob(pb_logdir + '/' + opts.datestr.strftime("%H.%M.%S") + '/*.log*')
if msg: else:
logfiles = glob.glob(pb_logdir + '/*/*.log*')
rows = search_logs(opts, logfiles)
if rows:
m_widths = col_width(rows)
print(pb_name) print(pb_name)
print(msg) for row in rows:
print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))))
#print(pb_name)
#print(msg)
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))