Coverage for src/lcdoc/mkdocs/stats/__init__.py: 11.11%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1"""
2## Stats
4Prints collected stats on stdout after build.
6Intended for piping into / consolidation with [jq](https://stedolan.github.io/jq/download/).
8### Config
10 config_scheme = (('round_digits', config_options.Type(int, default=4)),)
12"""
13from lcdoc import log lp
14import json lp
15import os lp
16import sys lp
18from mkdocs.config import config_options lp
20from lcdoc.const import LogStats, PageStats, Stats lp
21from lcdoc.mkdocs.tools import MDPlugin, app lp
22from lcdoc.tools import dirname, exists, project, read_file, write_file lp
24last_stats = {} lp
27def get_fn_and_set_last(self, config): lp
28 """On serve and '-' we work with the cached last stats. Else we read the file if present"""
29 fn = self.config['dump_stats']
30 if not fn:
31 app.info('no stats file configured')
32 return None
34 if fn == '-':
35 return fn
36 else:
37 fn = project.abs_path(fn, config, mkdirs=True)
38 if exists(fn):
39 os.rename(fn, fn + '.prev.json')
40 if last_stats:
41 return fn
42 l = read_file(fn + '.prev.json', dflt='')
43 if l:
44 last_stats.update(json.loads(l))
45 return fn
47 # write_file(fn, json.dumps(s, sort_keys=True, indent=4))
48 # app.info('Have written stats', keys=len(s), file=fn)
51def get_diff(s, minval): lp
52 isminv = lambda v, m=minval: isinstance(v, float) and v == m
53 d, o = {'added': {}, 'changed': {}}, last_stats
54 d['removed'] = [k for k in o if not k in s and not isminv(o[k])]
55 for k, v in s.items():
56 vo = o.get(k)
57 if vo is None:
58 if isinstance(v, float) and v < 2 * minval:
59 continue
60 d['added'][k] = v
61 elif vo != v:
62 if isinstance(vo, float) and isinstance(v, float):
63 if int(vo * 10) == int(v * 10):
64 continue
65 d['changed'][k] = [vo, v]
66 d['changed'].pop('Filtered_0_Values', 0)
67 for k in {'added', 'removed', 'changed'}:
68 if not d.get(k):
69 d.pop(k)
70 return d
73def filter_logs(sever): lp
74 """beyond info we kept all logs in a ram cache (log_majors)
75 Here we return those incl. and beyond sever(=warning or error or fatal)
76 """
78 l = log.log_majors
79 if not sever or not sever in l:
80 return
81 logs = []
82 m = log.log_levels
83 [logs.append([k, l.get(k)]) for k in m[m.index(sever) :] if l.get(k)]
84 return logs, sum([len(i[1]) for i in logs])
87def by_ts(store): lp
88 def d(l, L):
89 l.insert(1, log.level_by_name[L])
90 if not l[-1]:
91 l.pop()
92 return l
94 k = lambda i: i[0]
95 return sorted([d(l, L) for L, logs in store.items() for l in logs], key=k)
98class StatsPlugin(MDPlugin): lp
99 # :docs:stats_config
100 C = config_options.Choice lp
101 log_maj = lambda d, C=C: C(['warning', 'error', 'fatal', 'none'], default=d) lp
102 config_scheme = (
103 # if not starting with "/": relative to project root.
104 # for stdout: set file="-"
105 ('dump_stats', config_options.Type(str, default='build/lcd-stats.json')),
106 # round floats to this precision:
107 ('round_digits', config_options.Type(int, default=2)),
108 # omit zero values:
109 ('filter_0', config_options.Type(bool, default=True)),
110 # helpfull to see changes at serve
111 ('print_diff', config_options.Type(bool, default=True)),
112 # write the logs as json (same dir than fn)
113 ('dump_logs', config_options.Type(str, default='build/lcd-logs.json')),
114 # print all logs from this level again at end of build:
115 ('repeat_major_log_events', log_maj('warning')),
116 # fail mkdocs build on errors, you don't want broken docs published:
117 ('fail_build_on_log_events', log_maj('error')),
118 )
119 # :docs:stats_config
121 def on_post_build(self, config): lp
122 from lcdoc.tools import flatten
124 fn = get_fn_and_set_last(self, config)
125 rd = self.config['round_digits']
126 minval = 5 * 10 ** -rd
127 filter_0 = self.config['filter_0']
129 s = {'Global': Stats, 'Pages': PageStats, 'Log': LogStats}
130 s = flatten(s, sep='.', tpljoin='')
131 if rd:
132 r = lambda v: round(v, rd) if type(v) == float else v
133 s = dict([(k, r(v)) for k, v in s.items()])
134 l = len(s)
135 if filter_0:
136 s = dict(filter(lambda x: x[1] > minval, s.items()))
137 f = l - len(s)
138 if f:
139 s['Filtered_0_Values'] = f
141 if last_stats and self.config['print_diff']:
142 diff = get_diff(s, minval=minval)
143 msg = 'Stats changes since last run'
144 msg = ('No s' + msg[1:]) if not diff else msg
145 app.info(msg, json=diff)
147 last_stats.clear()
148 last_stats.update(s)
149 kw = dict(filtered_near_zero_vals=filter_0)
150 if filter_0:
151 kw['minval'] = minval
152 if fn == '-':
153 app.info('Collected Stats', hint='pipe into jq to consolidate', **kw)
154 print(json.dumps(s, sort_keys=True))
155 elif fn:
156 write_file(fn, json.dumps(s, sort_keys=True, indent=4))
157 app.info('Have written stats', keys=len(s), file=fn, **kw)
159 sever = self.config['repeat_major_log_events']
160 logs, cnt = filter_logs(sever=sever)
161 if logs:
162 app.info('Logs of severity %s and higher' % sever, json=logs, count=cnt)
164 l = log.log_majors
165 fn = self.config['dump_logs']
166 if fn:
167 fn = project.abs_path(fn, config, mkdirs=True)
168 if exists(fn):
169 os.rename(fn, fn + '.prev.json')
170 ol, j = by_ts(l), json.dumps
171 write_file(fn, '\n'.join(j(i, default=str) for i in ol))
172 app.info('Dumped logs', fn=fn, count=len(ol))
174 bsever = self.config['fail_build_on_log_events']
175 if bsever != sever:
176 logs, cnt = filter_logs(sever=bsever)
178 if logs:
179 # won't interrupt server mode for this
180 m = app.error if 'serve' in sys.argv else app.die
181 m('Build is broken, have %s critical logs' % cnt)
183 [i.clear() for k, i in l.items()]