| |
@@ -1,23 +1,126 @@
|
| |
- #!/usr/bin/python3
|
| |
+ #!/usr/bin/python
|
| |
+ # -*- coding: utf-8 -*-
|
| |
+ # vim: et ts=4 ai sw=4 sts=0
|
| |
import sys
|
| |
import json
|
| |
- from optparse import OptionParser
|
| |
+ from argparse import ArgumentParser
|
| |
import os
|
| |
+ import re
|
| |
import glob
|
| |
- from datetime import date, timedelta
|
| |
+ import gzip
|
| |
+ from datetime import datetime, timedelta
|
| |
import dateutil.parser as dateparser
|
| |
+ try:
|
| |
+ # Python3
|
| |
+ import configparser
|
| |
+ except ImportError:
|
| |
+ # Python2
|
| |
+ import ConfigParser as configparser
|
| |
+ from ansible.config.manager import find_ini_config_file
|
| |
+ from ansible.utils.color import stringc
|
| |
+ from ansible import constants as C
|
| |
+ from collections import Counter
|
| |
|
| |
- logpath = '/var/log/ansible'
|
| |
- search_terms = ['CHANGED', 'FAILED']
|
| |
+ if not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
|
| |
+ HAS_COLOR = False
|
| |
+ else:
|
| |
+ HAS_COLOR = True
|
| |
+
|
| |
+ DEFAULT_LOGPATH = '/var/log/ansible'
|
| |
+ default_search_terms = ['CHANGED', 'FAILED']
|
| |
date_terms = {
|
| |
- "today": date.today,
|
| |
- "yesterday": lambda: date.today() - timedelta(1),
|
| |
+ "today": lambda: datetime.today().replace(
|
| |
+ hour=0, minute=0, second=0, microsecond=0),
|
| |
+ "yesterday": lambda: datetime.today().replace(
|
| |
+ hour=0, minute=0, second=0, microsecond=0) - timedelta(1),
|
| |
}
|
| |
|
| |
|
| |
+ def colorByCat(category, txt=None):
|
| |
+ if not txt:
|
| |
+ txt = category
|
| |
+ if 'OK' in category:
|
| |
+ color_out = stringc(txt, C.COLOR_OK)
|
| |
+ elif "FAILED" in category:
|
| |
+ color_out = stringc(txt, C.COLOR_ERROR)
|
| |
+ elif "CHANGED" in category:
|
| |
+ color_out = stringc(txt, C.COLOR_CHANGED)
|
| |
+ elif "SKIPPED" in category:
|
| |
+ color_out = stringc(txt, C.COLOR_SKIP)
|
| |
+ elif "UNREACHABLE" in category:
|
| |
+ color_out = stringc(txt, C.COLOR_UNREACHABLE)
|
| |
+ else:
|
| |
+ # This hack make sure the text width is the same as any other colored text
|
| |
+ color_out = u'\x1b[0;00m%s\x1b[0m' % (txt,)
|
| |
+ if not HAS_COLOR:
|
| |
+ color_out = txt
|
| |
+ return color_out
|
| |
+
|
| |
+
|
| |
+ def colorByStats(txt, stats):
|
| |
+ if stats['failures'] != 0:
|
| |
+ return stringc(txt, C.COLOR_ERROR)
|
| |
+ elif stats['unreachable'] != 0:
|
| |
+ return stringc(txt, C.COLOR_UNREACHABLE)
|
| |
+ elif stats['changed'] != 0:
|
| |
+ return stringc(txt, C.COLOR_CHANGED)
|
| |
+ else:
|
| |
+ return stringc(txt, C.COLOR_OK)
|
| |
+
|
| |
+
|
| |
+ def colorByCount(txt, count, color):
|
| |
+ s = "%s%s" % (txt, count)
|
| |
+ if count > 0 and HAS_COLOR:
|
| |
+ s = stringc(s, color)
|
| |
+ return s
|
| |
+
|
| |
+
|
| |
+ def parse_info(infofile):
|
| |
+ data = {}
|
| |
+ with open(infofile) as f:
|
| |
+ content = f.read()
|
| |
+ obj_list = [x+'}' for x in content.split('\n}')]
|
| |
+ plays = []
|
| |
+ for obj in obj_list[:-1]:
|
| |
+ js = json.loads(obj)
|
| |
+ if 'play' in js:
|
| |
+ plays.append(js)
|
| |
+ else:
|
| |
+ data.update(json.loads(obj))
|
| |
+ data['plays'] = plays
|
| |
+ return data
|
| |
+
|
| |
+
|
| |
+ def format_stats(stats):
|
| |
+ return "%s %s %s %s" % (
|
| |
+ colorByCount("ok:", stats['ok'], C.COLOR_OK),
|
| |
+ colorByCount("chg:", stats['changed'], C.COLOR_CHANGED),
|
| |
+ colorByCount("unr:", stats['unreachable'], C.COLOR_UNREACHABLE),
|
| |
+ colorByCount("fail:", stats['failures'], C.COLOR_ERROR))
|
| |
+
|
| |
+
|
| |
+ def col_width(rows):
|
| |
+ widths = []
|
| |
+ for col in zip(*(rows)):
|
| |
+ col_width = max(map(len, col))
|
| |
+ widths.append(col_width)
|
| |
+ widths[-1] = 0 # don't pad last column
|
| |
+ return widths
|
| |
+
|
| |
+
|
| |
def date_cheat(datestr):
|
| |
dc = date_terms.get(datestr, lambda: dateparser.parse(datestr))
|
| |
- return dc().strftime("%Y/%m/%d")
|
| |
+ return dc()
|
| |
+
|
| |
+
|
| |
+ def date_from_path(path):
|
| |
+ date_comp = re.search(r'/(\d{4})/(\d{2})/(\d{2})', path)
|
| |
+ return datetime(*map(int, date_comp.groups()))
|
| |
+
|
| |
+
|
| |
+ def datetime_from_path(path):
|
| |
+ date_comp = re.search(r'/(\d{4})/(\d{2})/(\d{2})/(\d{2})\.(\d{2})\.(\d{2})', path)
|
| |
+ return datetime(*map(int, date_comp.groups()))
|
| |
|
| |
|
| |
def parse_args(args):
|
| |
@@ -33,29 +136,49 @@
|
| |
|
| |
logview -s ANY -d yesterday -p mirrorlist # list all events from the mirrorlist playbook
|
| |
|
| |
-
|
| |
"""
|
| |
- parser = OptionParser(usage=usage)
|
| |
- parser.add_option("-d", default='today', dest='datestr', help="time string of when you want logs")
|
| |
- parser.add_option("-p", default='*', dest='playbook', help="the playbook you want to look for")
|
| |
- parser.add_option("-v", default=False, dest='verbose', action='store_true', help='Verbose')
|
| |
- parser.add_option("-s", default=[], dest='search_terms', action='append', help="status to search for")
|
| |
- parser.add_option("-l", default=False, dest="list_pb", action='store_true', help="list playbooks for a specific date")
|
| |
- parser.add_option("--profile", default=False, dest="profile", action='store_true', help="output timing input per task")
|
| |
- (opts, args) = parser.parse_args(args)
|
| |
+ parser = ArgumentParser(usage=usage)
|
| |
+ date_group = parser.add_mutually_exclusive_group()
|
| |
+ date_group.add_argument("-d", default='today', dest='datestr', help="display logs from specified date")
|
| |
+ date_group.add_argument("--since", dest="since", help="display logs since specified date")
|
| |
+ date_group.add_argument("--all", default=False, dest="list_all", action='store_true', help="display all logs")
|
| |
+ parser.add_argument("-p", default='*', dest='playbook', help="the playbook you want to look for")
|
| |
+ parser.add_argument("-H", default=[], dest='hostname', action='append', help="Limit to the specified hostname")
|
| |
+ parser.add_argument("-m", default=False, dest='message', action='store_true', help='Show tasks output')
|
| |
+ parser.add_argument("-v", default=False, dest='verbose', action='store_true', help='Verbose')
|
| |
+ parser.add_argument("-s", default=[], dest='search_terms', action='append', help="status to search for")
|
| |
+ parser.add_argument("-l", default=False, dest="list_pb", action='store_true', help="list playbook runs")
|
| |
+ parser.add_argument("--profile", default=False, dest="profile", action='store_true', help="output timing input per task")
|
| |
+ opts = parser.parse_args(args)
|
| |
|
| |
opts.datestr = date_cheat(opts.datestr)
|
| |
if not opts.search_terms:
|
| |
- opts.search_terms = search_terms
|
| |
- return opts, args
|
| |
+ opts.search_terms = default_search_terms
|
| |
+ if opts.since:
|
| |
+ opts.since = date_cheat(opts.since)
|
| |
+ opts.search_terms = list(map(str.upper, opts.search_terms))
|
| |
+ return opts
|
| |
|
| |
|
| |
def search_logs(opts, logfiles):
|
| |
+ rows = [("Play Date", colorByCat("Hostname"), "Task Time", "Id", colorByCat("State"), "Task Name", "")]
|
| |
+ # rows = []
|
| |
msg = ''
|
| |
for fn in sorted(logfiles):
|
| |
- hostname = os.path.basename(fn).replace('.log', '')
|
| |
- timestamp = os.path.basename(os.path.dirname(fn))
|
| |
- for line in open(fn):
|
| |
+ hostname = os.path.basename(fn).replace('.log', '').replace('.gz', '')
|
| |
+ timestamp = datetime_from_path(fn).strftime("%a %b %d %Y %H:%M:%S")
|
| |
+
|
| |
+ if opts.hostname and hostname not in opts.hostname:
|
| |
+ continue
|
| |
+
|
| |
+ try:
|
| |
+ with gzip.open(fn) as f:
|
| |
+ f.read()
|
| |
+ open_f = gzip.open(fn, "rt")
|
| |
+ except IOError:
|
| |
+ open_f = open(fn)
|
| |
+
|
| |
+ for line in open_f:
|
| |
things = line.split('\t')
|
| |
if len(things) < 5:
|
| |
msg += "(logview error - unhandled line): %r\n" % line
|
| |
@@ -66,51 +189,137 @@
|
| |
task_ts, count, category, name, data = things
|
| |
|
| |
if category in opts.search_terms or 'ANY' in opts.search_terms:
|
| |
+ dur = None
|
| |
+ last_col = ""
|
| |
slurp = json.loads(data)
|
| |
if opts.profile:
|
| |
st = slurp.get('task_start', 0)
|
| |
end = slurp.get('task_end', 0)
|
| |
if st and end:
|
| |
- dur = '%.2f' % (float(end) - float(st))
|
| |
- else:
|
| |
- dur = None
|
| |
+ dur = '%.2fs' % (float(end) - float(st))
|
| |
+
|
| |
+ state = colorByCat(category)
|
| |
+ c_hostname = colorByCat(category, hostname)
|
| |
+
|
| |
+ if "STATS" in category:
|
| |
+ if type(slurp) == dict:
|
| |
+ name = format_stats(slurp)
|
| |
+ c_hostname = colorByStats(hostname, slurp)
|
| |
+ state = colorByStats(category, slurp)
|
| |
|
| |
- msg += '%s\t%s\t%s\t%s\t%s\t%s' % (
|
| |
- timestamp, hostname, task_ts, count, category, name)
|
| |
+ result = [timestamp, c_hostname, task_ts, count, state]
|
| |
+
|
| |
+ if not name:
|
| |
+ name = slurp.get("task_module")
|
| |
+ try:
|
| |
+ name = name.decode('utf8')
|
| |
+ except AttributeError:
|
| |
+ pass
|
| |
+ result.append(name)
|
| |
+
|
| |
+ if dur:
|
| |
+ last_col += "%s " % (dur,)
|
| |
|
| |
if not opts.verbose:
|
| |
if type(slurp) == dict:
|
| |
- for term in ['task_userid', 'cmd']:
|
| |
+ for term in ['cmd', ]:
|
| |
if term in slurp:
|
| |
- msg += '\t%s:%s' % (term, slurp.get(term, None))
|
| |
- if opts.profile and dur:
|
| |
- msg += '\t%s:%s' % ('dur', dur)
|
| |
+ last_col += '\t%s:%s' % (term, slurp.get(term, None))
|
| |
|
| |
- msg += '\n'
|
| |
+ if opts.message:
|
| |
+ for term in ['msg', 'stdout']:
|
| |
+ if term in slurp:
|
| |
+ value = slurp.get(term, None)
|
| |
+ if type(value) is list:
|
| |
+ value = "\n".join(value)
|
| |
+ if value:
|
| |
+ last_col += '\n%s: %s\n' % (term, colorByCat(category, value.strip()))
|
| |
else:
|
| |
- if opts.profile and dur:
|
| |
- msg += '\t%s:%s' % ('dur', dur)
|
| |
- msg += '\n'
|
| |
- msg += json.dumps(slurp, indent=4)
|
| |
- msg += '\n'
|
| |
+ last_col += '\n'
|
| |
+ last_col += json.dumps(slurp, indent=4)
|
| |
+ last_col += '\n'
|
| |
+
|
| |
+ result.append(last_col)
|
| |
+ rows.append(result)
|
| |
|
| |
- return msg
|
| |
+ return rows
|
| |
|
| |
|
| |
def main(args):
|
| |
- opts, args = parse_args(args)
|
| |
- for pb in glob.glob(os.path.join(logpath, opts.playbook)):
|
| |
- pb_name = os.path.basename(pb)
|
| |
- for pb_logdir in glob.glob(os.path.join(pb, opts.datestr)):
|
| |
- if opts.list_pb:
|
| |
- print(pb_name)
|
| |
+ cfg = find_ini_config_file()
|
| |
+ if cfg:
|
| |
+ cp = configparser.ConfigParser()
|
| |
+ cp.read(cfg)
|
| |
+ try:
|
| |
+ logpath = cp.get('callback_logdetail', "log_path")
|
| |
+ except configparser.NoSectionError:
|
| |
+ logpath = DEFAULT_LOGPATH
|
| |
+ opts = parse_args(args)
|
| |
+ rows = []
|
| |
+
|
| |
+ # List play summary
|
| |
+ if opts.list_pb:
|
| |
+ rows.append(["Date", colorByCat("", "Playbook"), "Ran By", "Hosts", "Stats"])
|
| |
+ for r, d, f in os.walk(logpath):
|
| |
+ if opts.since and f and date_from_path(r) < opts.since:
|
| |
continue
|
| |
+ for file in f:
|
| |
+ if file.endswith('.info'):
|
| |
+ pb = parse_info(os.path.join(r, file))
|
| |
+ pb_name = os.path.splitext(os.path.basename(pb['playbook']))[0]
|
| |
+ pb_date = datetime_from_path(r)
|
| |
+ if (
|
| |
+ opts.list_all or opts.since
|
| |
+ or (
|
| |
+ opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0)
|
| |
+ and opts.datestr == pb_date)
|
| |
+ or (
|
| |
+ opts.datestr == opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0)
|
| |
+ and opts.datestr.date() == pb_date.date())):
|
| |
+ stats = Counter()
|
| |
+ hosts = []
|
| |
+ if "stats" in pb:
|
| |
+ for host, stat in pb['stats'].items():
|
| |
+ del stat['task_userid']
|
| |
+ stats += Counter(stat)
|
| |
+ hosts.append(host)
|
| |
+ host_count = len(set(hosts))
|
| |
+ pb_name = colorByStats(pb_name, stats)
|
| |
+ summary = format_stats(stats)
|
| |
+ # summary = "ok:%s chd:%s unr:%s faild:%s" % (stats['ok'], stats['changed'], stats['unreachable'], stats['failures'])
|
| |
+
|
| |
+ rows.append([pb_date.isoformat(), pb_name, pb['userid'], str(host_count), summary])
|
| |
|
| |
- logfiles = glob.glob(pb_logdir + '/*/*.log')
|
| |
- msg = search_logs(opts, logfiles)
|
| |
- if msg:
|
| |
- print(pb_name)
|
| |
- print(msg)
|
| |
+ m_widths = col_width(rows)
|
| |
+ if len(rows) <= 1:
|
| |
+ print("no log")
|
| |
+ else:
|
| |
+ for row in rows:
|
| |
+ print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))).strip())
|
| |
+
|
| |
+ # Play detail
|
| |
+ else:
|
| |
+ for pb in glob.glob(os.path.join(logpath, opts.playbook)):
|
| |
+ pb_name = os.path.basename(pb)
|
| |
+ if opts.list_all or opts.since:
|
| |
+ date_glob = glob.glob(os.path.join(pb, "*/*/*"))
|
| |
+ else:
|
| |
+ date_glob = glob.glob(os.path.join(pb, opts.datestr.strftime("%Y/%m/%d")))
|
| |
+ for pb_logdir in date_glob:
|
| |
+ run_date = date_from_path(pb_logdir)
|
| |
+ if opts.since and run_date < opts.since:
|
| |
+ continue
|
| |
+ if opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0):
|
| |
+ logfiles = glob.glob(pb_logdir + '/' + opts.datestr.strftime("%H.%M.%S") + '/*.log*')
|
| |
+ else:
|
| |
+ logfiles = glob.glob(pb_logdir + '/*/*.log*')
|
| |
+ rows = search_logs(opts, logfiles)
|
| |
+ if len(rows) > 1:
|
| |
+ m_widths = col_width(rows)
|
| |
+ print("%s\n-------" % (pb_name,))
|
| |
+ for row in rows:
|
| |
+ print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))))
|
| |
+ print("")
|
| |
|
| |
|
| |
if __name__ == "__main__":
|
| |
I end up using logview quite a lot myself and added a few features I needed you may be interested in.
All of them are open to discussion, and feel free to close this PR if you think it's not relevant for your usage.
Here they are:
Log files:
My ansible control node is heavily space constrained at $dayjob so I needed to keep all logs at a minimum, in my own home directory. I've kept logview retro compatible with old plain text log files, so you can still query previous logs if needed.
Cosmetics changes:
BTW, the status option ( -s ) is not case sensitive anymore
New options: