Coverage for mflog/__init__.py: 62%

285 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2024-10-08 09:20 +0000

1# -*- coding: utf-8 -*- 

2 

3from __future__ import print_function 

4import sys 

5import json 

6import os 

7import logging 

8import logging.config 

9import structlog 

10import functools 

11import traceback 

12try: 

13 from rich.console import Console 

14 from rich.table import Table 

15 from rich.text import Text 

16except ImportError: 

17 pass 

18 

19from mflog.utils import level_name_to_level_no, Config, \ 

20 get_level_no_from_logger_name, write_with_lock, flush_with_lock, \ 

21 __reset_level_from_logger_name_cache, \ 

22 get_resolved_fancy_output_config_value 

23from mflog.utils import dump_locals as _dump_locals 

24from mflog.processors import fltr, add_level, add_pid, add_exception_info, \ 

25 kv_renderer, add_extra_context 

26from mflog.unittests import UNIT_TESTS_STDOUT, UNIT_TESTS_STDERR, \ 

27 UNIT_TESTS_JSON, UNIT_TESTS_MODE 

28from mflog.syslog import SyslogLogger 

29 

30CONFIGURATION_SET = False 

31 

32 

33class StructlogHandler(logging.Handler): 

34 """Feed all events back into `structlog`. 

35 

36 See https://github.com/hynek/structlog/issues/9 

37 """ 

38 

39 level = logging.DEBUG 

40 filters = [] 

41 lock = None 

42 __loggers = None 

43 

44 def __init__(self, *args, **kwargs): 

45 logging.Handler.__init__(self, *args, **kwargs) 

46 self.__loggers = {} 

47 

48 def __get_logger(self, name): 

49 if name not in self.__loggers: 49 ↛ 51line 49 didn't jump to line 51, because the condition on line 49 was never false

50 self.__loggers[name] = get_logger(name) 

51 return self.__loggers[name] 

52 

53 def emit(self, record): 

54 kwargs = {'name': record.name} 

55 if record.exc_info: 55 ↛ 56line 55 didn't jump to line 56, because the condition on line 55 was never true

56 kwargs['exc_info'] = record.exc_info 

57 logger = self.__get_logger(record.name) 

58 if record.levelno >= logging.CRITICAL: 58 ↛ 59line 58 didn't jump to line 59, because the condition on line 58 was never true

59 f = logger.critical 

60 elif record.levelno >= logging.ERROR: 60 ↛ 61line 60 didn't jump to line 61, because the condition on line 60 was never true

61 f = logger.error 

62 elif record.levelno >= logging.WARNING: 

63 f = logger.warning 

64 elif record.levelno >= logging.INFO: 64 ↛ 66line 64 didn't jump to line 66, because the condition on line 64 was never false

65 f = logger.info 

66 elif record.levelno >= logging.DEBUG: 

67 f = logger.debug 

68 else: 

69 # let's ignore this 

70 return 

71 # Mimick the formatting behaviour of the stdlib's logging 

72 # module, which accepts both positional arguments and a single 

73 # dict argument. 

74 if record.args and isinstance(record.args, dict): 

75 f(record.msg, record.args, **kwargs) 

76 else: 

77 f(record.msg, *(record.args), **kwargs) 

78 

79 

80class MFLogLogger(object): 

81 

82 _unittests_stdout = None 

83 _unittests_stderr = None 

84 _unittests_json = None 

85 

86 def __init__(self, *args): 

87 self._json_file = None 

88 self._json_logger = None 

89 self._syslog_logger = None 

90 if len(args) > 0: 90 ↛ 93line 90 didn't jump to line 93, because the condition on line 90 was never false

91 self.name = args[0] 

92 else: 

93 self.name = 'root' 

94 self._stdout_print_logger = structlog.PrintLogger(sys.stdout) 

95 self._stderr_print_logger = structlog.PrintLogger(sys.stderr) 

96 if Config.syslog_address: 96 ↛ 97line 96 didn't jump to line 97, because the condition on line 96 was never true

97 self._syslog_logger = SyslogLogger(Config.syslog_address, 

98 Config.syslog_format) 

99 if Config.json_file or UNIT_TESTS_MODE: 99 ↛ 110line 99 didn't jump to line 110, because the condition on line 99 was never false

100 if UNIT_TESTS_MODE or Config.json_file is None: 100 ↛ 104line 100 didn't jump to line 104, because the condition on line 100 was never false

101 self._json_file = open('/dev/null', 'a') 

102 self._json_logger = structlog.PrintLogger(self._json_file) 

103 else: 

104 self._json_file = open(Config.json_file, 'a') 

105 self._json_logger = structlog.PrintLogger(self._json_file) 

106 self._json_logger._write = functools.partial(write_with_lock, 

107 self._json_file) 

108 self._json_logger._flush = functools.partial(flush_with_lock, 

109 self._json_file) 

110 if UNIT_TESTS_MODE: 110 ↛ 117line 110 didn't jump to line 117, because the condition on line 110 was never false

111 self._stdout_print_logger._flush = lambda *args, **kwargs: None 111 ↛ exitline 111 didn't run the lambda on line 111

112 self._stdout_print_logger._write = UNIT_TESTS_STDOUT.append 

113 self._stderr_print_logger._flush = lambda *args, **kwargs: None 113 ↛ exitline 113 didn't run the lambda on line 113

114 self._stderr_print_logger._write = UNIT_TESTS_STDERR.append 

115 self._json_logger._flush = lambda *args, **kwargs: None 115 ↛ exitline 115 didn't run the lambda on line 115

116 self._json_logger._write = UNIT_TESTS_JSON.append 

117 self._json_only_keys = Config.json_only_keys 

118 

119 def close(self): 

120 if self._json_file: 120 ↛ 125line 120 didn't jump to line 125, because the condition on line 120 was never false

121 try: 

122 self._json_file.close() 

123 except Exception: 

124 pass 

125 if self._syslog_logger is not None: 125 ↛ 126line 125 didn't jump to line 126, because the condition on line 125 was never true

126 self._syslog_logger.close() 

127 

128 def __del__(self): 

129 self.close() 

130 

131 def _msg(self, std_logger, **event_dict): 

132 try: 

133 self._json(**event_dict) 

134 except Exception as e: 

135 print("MFLOG ERROR: can't write log message to json output " 

136 "with exception: %s" % e, file=sys.stderr) 

137 traceback.print_exc(file=sys.stderr) 

138 try: 

139 self._syslog(**event_dict) 

140 except Exception as e: 

141 print("MFLOG ERROR: can't write log message to syslog output " 

142 "with exception: %s" % e, file=sys.stderr) 

143 traceback.print_exc(file=sys.stderr) 

144 fancy = get_resolved_fancy_output_config_value(f=std_logger._file) 

145 if fancy: 145 ↛ 146line 145 didn't jump to line 146, because the condition on line 145 was never true

146 try: 

147 self._fancy_msg(std_logger._file, **event_dict) 

148 return 

149 except Exception: 

150 # can't write to fancy output, let's fallback silently to 

151 # standard logging 

152 pass 

153 try: 

154 std_logger.msg(self._format(event_dict)) 

155 except Exception as e: 

156 print("MFLOG ERROR: can't write log message to stdout/err " 

157 "with exception: %s" % e, file=sys.stderr) 

158 traceback.print_exc(file=sys.stderr) 

159 

160 def _fancy_msg(self, f, **event_dict): 

161 c = Console(file=f, highlight=False, emoji=False, markup=False) 

162 lll = event_dict.pop('level').lower() 

163 llu = lll.upper() 

164 exc = event_dict.pop('exception', None) 

165 event_dict.pop('exception_type', None) 

166 event_dict.pop('exception_file', None) 

167 name = event_dict.pop('name', 'root') 

168 pid = event_dict.pop('pid') 

169 ts = event_dict.pop('timestamp')[0:-3] + "Z" 

170 try: 

171 msg = event_dict.pop('event') 

172 except KeyError: 

173 msg = "None" 

174 for key in self._json_only_keys: # pylint: disable=E1133 

175 try: 

176 event_dict.pop(key) 

177 except KeyError: 

178 pass 

179 extra = "" 

180 if len(event_dict) > 0: 

181 extra = kv_renderer(None, None, event_dict) 

182 if lll in ['notset', 'debug', 'info', 'warning', 

183 'error', 'critical']: 

184 ls = "logging.level.%s" % lll 

185 else: 

186 ls = "none" 

187 output = Table(show_header=False, expand=True, box=None, 

188 padding=(0, 1, 0, 0)) 

189 output.add_column(style="log.time") 

190 output.add_column(width=10, justify="center") 

191 output.add_column(justify="center") 

192 output.add_column(ratio=1) 

193 row = [] 

194 row.append(Text(ts)) 

195 row.append(Text("[%s]" % llu, style=ls)) 

196 row.append(Text(name, style="bold") + Text("#") + Text("%i" % pid, 

197 style="yellow")) 

198 row.append(Text(msg)) 

199 output.add_row(*row) 

200 if extra != "": 

201 output.add_row( 

202 "", "", "", 

203 Text("{ ", style="repr.attrib_name") + 

204 Text(extra, style="repr.attrib_name") + 

205 Text(" }", style="repr.attrib_name")) 

206 c.print(output) 

207 if exc is not None: 

208 c.print_exception() 

209 if Config.auto_dump_locals: 

210 _dump_locals(f) 

211 

212 def _msg_stdout(self, **event_dict): 

213 self._msg(self._stdout_print_logger, **event_dict) 

214 

215 def _msg_stderr(self, **event_dict): 

216 self._msg(self._stderr_print_logger, **event_dict) 

217 

218 def _json(self, **event_dict): 

219 if Config.json_file is None and not UNIT_TESTS_MODE: 219 ↛ 220line 219 didn't jump to line 220, because the condition on line 219 was never true

220 return 

221 method_level_no = level_name_to_level_no(event_dict['level']) 

222 if method_level_no < level_name_to_level_no(Config.json_minimal_level): 

223 return 

224 self._json_logger.msg(json.dumps(event_dict)) 

225 

226 def _syslog(self, **event_dict): 

227 if Config.syslog_address is None: 227 ↛ 229line 227 didn't jump to line 229, because the condition on line 227 was never false

228 return 

229 method_level_no = level_name_to_level_no(event_dict['level']) 

230 syslog_minimal_level = Config.syslog_minimal_level 

231 if method_level_no < level_name_to_level_no(syslog_minimal_level): 

232 return 

233 self._syslog_logger.msg(event_dict) 

234 

235 def _format(self, event_dict): 

236 level = "[%s]" % event_dict.pop('level').upper() 

237 ts = event_dict.pop('timestamp') 

238 name = event_dict.pop('name', 'root') 

239 pid = event_dict.pop('pid') 

240 try: 

241 msg = event_dict.pop('event') 

242 except KeyError: 

243 msg = "None" 

244 exc = event_dict.pop('exception', None) 

245 event_dict.pop('exception_type', None) 

246 event_dict.pop('exception_file', None) 

247 for key in self._json_only_keys: # pylint: disable=E1133 

248 try: 

249 event_dict.pop(key) 

250 except KeyError: 

251 pass 

252 extra = "" 

253 if len(event_dict) > 0: 

254 extra = " {%s}" % kv_renderer(None, None, event_dict) 

255 tmp = "%s %10s (%s#%i) %s%s" % (ts, level, name, pid, msg, extra) 

256 if exc is not None: 

257 tmp = tmp + "\n" + exc 

258 return tmp 

259 

260 def _json_format(self, event_dict): 

261 return json.dumps(event_dict) 

262 

263 def isEnabledFor(self, level): 

264 logger_level_no = \ 

265 get_level_no_from_logger_name(self.name) 

266 return level >= logger_level_no 

267 

268 def getEffectiveLevel(self): 

269 return get_level_no_from_logger_name(self.name) 

270 

271 def setLevel(self, level): 

272 pass 

273 

274 debug = info = msg = _msg_stdout 

275 error = critical = warning = exception = _msg_stderr 

276 

277 

278class MFLogLoggerFactory(object): 

279 

280 def __call__(self, *args): 

281 return MFLogLogger(*args) 

282 

283 

284class MFBoundLogger(structlog.stdlib.BoundLogger): 

285 

286 def die(self, *args, **kwargs): 

287 if len(args) == 0: 

288 self.exception("die() called", **kwargs) 

289 else: 

290 self.exception(*args, **kwargs) 

291 if Config.auto_dump_locals: 

292 _dump_locals() 

293 sys.exit(1) 

294 

295 def dump_locals(self): 

296 res = _dump_locals() 

297 if not res: 

298 self.warning("can't dump locals") 

299 

300 

301def set_config(minimal_level=None, json_minimal_level=None, 

302 json_file=None, override_files=None, 

303 thread_local_context=False, extra_context_func=None, 

304 json_only_keys=None, standard_logging_redirect=None, 

305 override_dict={}, syslog_address=None, syslog_format=None, 

306 fancy_output=None, auto_dump_locals=True): 

307 """Set the logging configuration. 

308 

309 The configuration is cached. So you can call this several times. 

310 

311 """ 

312 global CONFIGURATION_SET 

313 Config.set_instance(minimal_level=minimal_level, 

314 json_minimal_level=json_minimal_level, 

315 json_file=json_file, 

316 override_files=override_files, 

317 thread_local_context=thread_local_context, 

318 extra_context_func=extra_context_func, 

319 json_only_keys=json_only_keys, 

320 override_dict=override_dict, 

321 syslog_address=syslog_address, 

322 syslog_format=syslog_format, 

323 fancy_output=fancy_output, 

324 auto_dump_locals=auto_dump_locals) 

325 if standard_logging_redirect is not None: 

326 slr = standard_logging_redirect 

327 else: 

328 if 'MFLOG_STANDARD_LOGGING_REDIRECT' in os.environ: 328 ↛ 329line 328 didn't jump to line 329, because the condition on line 328 was never true

329 slr = (os.environ['MFLOG_STANDARD_LOGGING_REDIRECT'] == '1') 

330 else: 

331 slr = True # default value 

332 if slr: 

333 # Configure standard logging redirect to structlog 

334 d = { 

335 "version": 1, 

336 "disable_existing_loggers": False, 

337 "formatters": {}, 

338 "handlers": {}, 

339 "filters": {}, 

340 "loggers": { 

341 "": { 

342 "level": "NOTSET" 

343 } 

344 } 

345 } 

346 logging.config.dictConfig(d) 

347 root_logger = logging.getLogger() 

348 root_logger.addHandler(StructlogHandler()) 

349 root_logger.setLevel(logging.NOTSET) 

350 else: 

351 root_logger = logging.getLogger() 

352 root_logger.handlers = [x for x in root_logger.handlers 

353 if not isinstance(x, StructlogHandler)] 

354 # Configure structlog 

355 context_class = None 

356 if thread_local_context: 356 ↛ 357line 356 didn't jump to line 357, because the condition on line 356 was never true

357 context_class = structlog.threadlocal.wrap_dict(dict) 

358 structlog.reset_defaults() 

359 structlog.configure( 

360 processors=[ 

361 fltr, 

362 add_level, 

363 add_pid, 

364 add_extra_context, 

365 structlog.processors.TimeStamper(fmt="iso", utc=True), 

366 add_exception_info, 

367 structlog.stdlib.PositionalArgumentsFormatter(), 

368 structlog.processors.UnicodeDecoder(), 

369 # See https://stackoverflow.com/a/51629142 

370 # we do the formatting in the Logger 

371 lambda _, __, ed: ed 

372 ], 

373 cache_logger_on_first_use=True, 

374 wrapper_class=MFBoundLogger, 

375 context_class=context_class, 

376 logger_factory=MFLogLoggerFactory() 

377 ) 

378 CONFIGURATION_SET = True 

379 

380 

381def add_override(logger_name_pattern, minimal_level_name): 

382 """Add an override to the configuration. 

383 

384 You provide a fnmatch pattern to the logger_name as the first argument. 

385 And the minimal_level_name (WARNING, DEBUG...) to force for this pattern. 

386 

387 Note: if you use None as minimal_level_name, it will delete the override. 

388 

389 """ 

390 if not CONFIGURATION_SET: 

391 set_config() 

392 if minimal_level_name is None: 

393 try: 

394 # pylint: disable=unsupported-delete-operation 

395 del(Config.override_dict[logger_name_pattern]) 

396 except KeyError: 

397 pass 

398 else: 

399 # just to raise an exception here 

400 # if the minimal_level_name is incorrect 

401 level_name_to_level_no(minimal_level_name) 

402 d = Config.override_dict 

403 # pylint: disable=unsupported-assignment-operation 

404 d[logger_name_pattern] = minimal_level_name 

405 __reset_level_from_logger_name_cache() 

406 

407 

408def getLogger(logger_name='root'): 

409 """Return a python logging logger. 

410 

411 This function is just a wrapper. 

412 

413 But by importing and using this one (and not directly logging.getLogger 

414 or structlog.get_logger), 

415 you are sure that the logging config is set. 

416 """ 

417 if not CONFIGURATION_SET: 

418 set_config() 

419 return structlog.get_logger(logger_name, name=logger_name) 

420 

421 

422def get_logger(logger_name='root'): 

423 """Return a python logging logger. 

424 

425 This function is just a wrapper. 

426 

427 But by importing and using this one (and not directly logging.getLogger 

428 or structlog.get_logger), 

429 you are sure that the logging config is set. 

430 """ 

431 return getLogger(logger_name) 

432 

433 

434def debug(message, *args, **kwargs): 

435 return get_logger().debug(message, *args, **kwargs) 

436 

437 

438def info(message, *args, **kwargs): 

439 return get_logger().info(message, *args, **kwargs) 

440 

441 

442def warning(message, *args, **kwargs): 

443 return get_logger().warning(message, *args, **kwargs) 

444 

445 

446def error(message, *args, **kwargs): 

447 return get_logger().error(message, *args, **kwargs) 

448 

449 

450def critical(message, *args, **kwargs): 

451 return get_logger().critical(message, *args, **kwargs) 

452 

453 

454def exception(message, *args, **kwargs): 

455 return get_logger().exception(message, *args, **kwargs) 

456 

457 

458def die(*args, **kwargs): 

459 get_logger().die(*args, **kwargs) 

460 

461 

462def dump_locals(f=sys.stderr): 

463 get_logger().dump_locals(f=f) 

464 

465 

466def __unset_configuration(): 

467 global CONFIGURATION_SET 

468 CONFIGURATION_SET = False