Coverage for bzfs_main / loggers.py: 100%

124 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-02-24 10:16 +0000

1# Copyright 2024 Wolfgang Hoschek AT mac DOT com 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14# 

15"""Logging helpers that build default and syslog-enabled loggers; Centralizes logger setup so that all bzfs tools share 

16uniform formatting and configuration. 

17 

18Each bzfs.Job has its own separate Logger object such that multiple Jobs can run concurrently in the same Python process 

19without interfering with each other's logging semantics. This is achieved by passing a unique ``logger_name_suffix`` per job 

20to ``get_logger()`` so each job receives an isolated Logger instance. Callers are responsible for closing any loggers they 

21own via ``reset_logger()``. 

22""" 

23 

24from __future__ import ( 

25 annotations, 

26) 

27import argparse 

28import contextlib 

29import logging 

30import sys 

31from datetime import ( 

32 datetime, 

33) 

34from logging import ( 

35 Logger, 

36) 

37from typing import ( 

38 TYPE_CHECKING, 

39 Any, 

40 Final, 

41 final, 

42) 

43 

44from bzfs_main.util.utils import ( 

45 LOG_STDERR, 

46 LOG_STDOUT, 

47 LOG_TRACE, 

48) 

49 

50if TYPE_CHECKING: # pragma: no cover - for type hints only 

51 from bzfs_main.configuration import ( 

52 LogParams, 

53 ) 

54 

55 

56def _get_logger_name() -> str: 

57 """Returns the canonical logger name used throughout bzfs.""" 

58 return "bzfs_main.bzfs" 

59 

60 

61def _resolve_logger_name(logger_name_suffix: str) -> str: 

62 """Returns the logger name for the given optional logger suffix. 

63 

64 Each bzfs.Job instance gets a distinct Logger name and hence uses a separate private (thread-safe) logging.Logger object. 

65 """ 

66 logger_name: str = _get_logger_name() 

67 return logger_name + "." + logger_name_suffix if logger_name_suffix else logger_name 

68 

69 

70def reset_logger(log: Logger) -> None: 

71 """Removes and closes logging handlers (and closes their files) and resets logger to default state.""" 

72 for handler in log.handlers.copy(): 

73 log.removeHandler(handler) 

74 with contextlib.suppress(BrokenPipeError): 

75 handler.flush() 

76 handler.close() 

77 for _filter in log.filters.copy(): 

78 log.removeFilter(_filter) 

79 log.setLevel(logging.NOTSET) 

80 log.propagate = True 

81 

82 

83def get_logger( 

84 log_params: LogParams, args: argparse.Namespace, log: Logger | None = None, logger_name_suffix: str = "" 

85) -> Logger: 

86 """Returns a logger configured from CLI arguments or an optional base logger.""" 

87 if log is not None: 

88 assert isinstance(log, Logger) 

89 return log # use third party provided logger object 

90 return _get_default_logger(log_params, args, logger_name_suffix=logger_name_suffix) 

91 

92 

93def _get_default_logger(log_params: LogParams, args: argparse.Namespace, logger_name_suffix: str = "") -> Logger: 

94 """Creates the default logger with stream, file and optional syslog handlers.""" 

95 logger_name: str = _resolve_logger_name(logger_name_suffix) 

96 log = Logger(logger_name) # noqa: LOG001 do not register logger with Logger.manager to avoid potential memory leak 

97 log.setLevel(log_params.log_level) 

98 log.propagate = False # don't propagate log messages up to the root logger to avoid emitting duplicate messages 

99 

100 handler: logging.StreamHandler = logging.StreamHandler(stream=sys.stdout) 

101 handler.terminator = "" 

102 handler.setFormatter(get_default_log_formatter(default_terminator="\n", log_params=log_params)) 

103 handler.setLevel(log_params.log_level) 

104 log.addHandler(handler) 

105 

106 handler = logging.FileHandler(log_params.log_file, encoding="utf-8") 

107 handler.terminator = "" 

108 handler.setFormatter(get_default_log_formatter(default_terminator="\n")) 

109 handler.setLevel(log_params.log_level) 

110 log.addHandler(handler) 

111 

112 address = args.log_syslog_address 

113 if address: # optionally, also log to local or remote syslog 

114 from logging import handlers # lazy import for startup perf 

115 

116 address, socktype = _get_syslog_address(address, args.log_syslog_socktype) 

117 log_syslog_prefix = str(args.log_syslog_prefix).strip().replace("%", "") # sanitize 

118 syslog_handler = handlers.SysLogHandler(address=address, facility=args.log_syslog_facility, socktype=socktype) 

119 syslog_handler.setFormatter(get_default_log_formatter(prefix=log_syslog_prefix + " ")) 

120 syslog_handler.setLevel(args.log_syslog_level) 

121 log.addHandler(syslog_handler) 

122 if syslog_handler.level < log.getEffectiveLevel(): 

123 log_level_name: str = logging.getLevelName(log.getEffectiveLevel()) 

124 log.warning( 

125 "%s", 

126 f"No messages with priority lower than {log_level_name} will be sent to syslog because syslog " 

127 f"log level {args.log_syslog_level} is lower than overall log level {log_level_name}.", 

128 ) 

129 return log 

130 

131 

132LOG_LEVEL_PREFIXES: Final[dict[int, str]] = { 

133 logging.CRITICAL: "[C] CRITICAL:", 

134 logging.ERROR: "[E] ERROR:", 

135 logging.WARNING: "[W]", 

136 logging.INFO: "[I]", 

137 logging.DEBUG: "[D]", 

138 LOG_TRACE: "[T]", 

139} 

140 

141 

142def get_default_log_formatter( 

143 prefix: str = "", default_terminator: str = "", log_params: LogParams | None = None 

144) -> logging.Formatter: 

145 """Returns a formatter for bzfs logs with optional prefix and column padding.""" 

146 level_prefixes_: dict[int, str] = LOG_LEVEL_PREFIXES.copy() 

147 log_stderr_: int = LOG_STDERR 

148 log_stdout_: int = LOG_STDOUT 

149 cols: int = 0 if log_params is None else log_params.terminal_columns 

150 

151 @final 

152 class DefaultLogFormatter(logging.Formatter): 

153 """Formatter adding timestamps and padding for progress output.""" 

154 

155 def format(self, record: logging.LogRecord) -> str: 

156 """Formats the given record, adding timestamp and level prefix and padding.""" 

157 levelno: int = record.levelno 

158 if levelno != log_stderr_ and levelno != log_stdout_: # emit stdout and stderr "as-is" (no formatting) 

159 timestamp: str = datetime.now().isoformat(sep=" ", timespec="seconds") # 2024-09-03 12:26:15 

160 ts_level: str = f"{timestamp} {level_prefixes_.get(levelno, '')} " 

161 msg: str = record.msg 

162 i: int = msg.find("%s") 

163 msg = ts_level + msg 

164 if i >= 1: 

165 i += len(ts_level) 

166 msg = msg[0:i].ljust(54) + msg[i:] # right-pad msg if record.msg contains "%s" unless at start 

167 if record.exc_info or record.exc_text or record.stack_info: 

168 original_msg = record.msg 

169 try: 

170 record.msg = msg 

171 msg = super().format(record) 

172 finally: 

173 record.msg = original_msg # restore original msg so other logging handlers see unmodified state 

174 elif record.args: 

175 msg = msg % record.args 

176 else: 

177 msg = super().format(record) 

178 

179 msg = prefix + msg 

180 msg = msg.ljust(cols) # w/ progress line, "overwrite" trailing chars of previous msg with spaces 

181 terminator: str = getattr(record, "terminator", default_terminator) 

182 return msg + terminator 

183 

184 return DefaultLogFormatter() 

185 

186 

187def get_simple_logger(program: str, logger_name_suffix: str = "") -> Logger: 

188 """Returns a minimal logger for simple tools.""" 

189 

190 level_prefixes_: dict[int, str] = LOG_LEVEL_PREFIXES.copy() 

191 logger_name = program + "." + logger_name_suffix if logger_name_suffix else program 

192 

193 @final 

194 class LevelFormatter(logging.Formatter): 

195 """Injects level prefix and program name into log records.""" 

196 

197 def format(self, record: logging.LogRecord) -> str: 

198 """Attaches extra fields before delegating to base formatter.""" 

199 record.level_prefix = level_prefixes_.get(record.levelno, "") 

200 record.program = program 

201 return super().format(record) 

202 

203 log = Logger(logger_name) # noqa: LOG001 do not register logger with Logger.manager to avoid potential memory leak 

204 log.setLevel(logging.INFO) 

205 log.propagate = False 

206 handler = logging.StreamHandler() 

207 handler.setFormatter( 

208 LevelFormatter(fmt="%(asctime)s %(level_prefix)s [%(program)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S") 

209 ) 

210 log.addHandler(handler) 

211 return log 

212 

213 

214def set_logging_runtime_defaults() -> None: 

215 """Applies process-wide logging runtime defaults for production use.""" 

216 logging.addLevelName(LOG_TRACE, "TRACE") 

217 logging.addLevelName(LOG_STDERR, "STDERR") 

218 logging.addLevelName(LOG_STDOUT, "STDOUT") 

219 # perf: tell logging framework not to gather unnecessary expensive info for each log record 

220 logging.logProcesses = False 

221 logging.logThreads = False 

222 logging.logMultiprocessing = False 

223 logging.raiseExceptions = False # avoid noisy tracebacks from logging handler errors like BrokenPipeError in production 

224 

225 

226def _get_syslog_address(address: str, log_syslog_socktype: str) -> tuple[str | tuple[str, int], Any]: 

227 """Normalizes syslog address to tuple form and returns socket type.""" 

228 import socket # lazy import for startup perf 

229 

230 address = address.strip() 

231 socktype: socket.SocketKind | None = None 

232 if ":" in address: 

233 host, port_str = address.rsplit(":", 1) 

234 addr = (host.strip(), int(port_str.strip())) 

235 scktype: socket.SocketKind = socket.SOCK_DGRAM if log_syslog_socktype == "UDP" else socket.SOCK_STREAM # for TCP 

236 return addr, scktype 

237 return address, socktype