muutils.logger
(deprecated) experimenting with logging utilities
1"""(deprecated) experimenting with logging utilities""" 2 3import warnings 4 5from muutils.logger.logger import Logger 6from muutils.logger.loggingstream import LoggingStream 7from muutils.logger.simplelogger import SimpleLogger 8from muutils.logger.timing import TimerContext 9 10warnings.warn( 11 DeprecationWarning( 12 "muutils.logger is no longer maintained. Consider using [trnbl](https://github.com/mivanit/trnbl) instead." 13 ) 14) 15 16__all__ = [ 17 # submodules 18 "exception_context", 19 "headerfuncs", 20 "log_util", 21 "logger", 22 "loggingstream", 23 "simplelogger", 24 "timing", 25 # imports 26 "Logger", 27 "LoggingStream", 28 "SimpleLogger", 29 "TimerContext", 30]
41class Logger(SimpleLogger): 42 """logger with more features, including log levels and streams 43 44 # Parameters: 45 - `log_path : str | None` 46 default log file path 47 (defaults to `None`) 48 - `log_file : AnyIO | None` 49 default log io, should have a `.write()` method (pass only this or `log_path`, not both) 50 (defaults to `None`) 51 - `timestamp : bool` 52 whether to add timestamps to every log message (under the `_timestamp` key) 53 (defaults to `True`) 54 - `default_level : int` 55 default log level for streams/messages that don't specify a level 56 (defaults to `0`) 57 - `console_print_threshold : int` 58 log level at which to print to the console, anything greater will not be printed unless overridden by `console_print` 59 (defaults to `50`) 60 - `level_header : HeaderFunction` 61 function for formatting log messages when printing to console 62 (defaults to `HEADER_FUNCTIONS["md"]`) 63 - `keep_last_msg_time : bool` 64 whether to keep the last message time 65 (defaults to `True`) 66 67 68 # Raises: 69 - `ValueError` : _description_ 70 """ 71 72 def __init__( 73 self, 74 log_path: str | None = None, 75 log_file: AnyIO | None = None, 76 default_level: int = 0, 77 console_print_threshold: int = 50, 78 level_header: HeaderFunction = HEADER_FUNCTIONS["md"], 79 streams: dict[str | None, LoggingStream] | Sequence[LoggingStream] = (), 80 keep_last_msg_time: bool = True, 81 # junk args 82 timestamp: bool = True, 83 **kwargs, 84 ): 85 # junk arg checking 86 # ================================================== 87 if len(kwargs) > 0: 88 raise ValueError(f"unrecognized kwargs: {kwargs}") 89 90 if not timestamp: 91 raise ValueError( 92 "timestamp must be True -- why would you not want timestamps?" 93 ) 94 95 # timing 96 # ================================================== 97 # timing compares 98 self._keep_last_msg_time: bool = keep_last_msg_time 99 # TODO: handle per stream? 100 self._last_msg_time: float | None = time.time() 101 102 # basic setup 103 # ================================================== 104 # init BaseLogger 105 super().__init__(log_file=log_file, log_path=log_path, timestamp=timestamp) 106 107 # level-related 108 self._console_print_threshold: int = console_print_threshold 109 self._default_level: int = default_level 110 111 # set up streams 112 self._streams: dict[str | None, LoggingStream] = ( 113 streams 114 if isinstance(streams, typing.Mapping) 115 else {s.name: s for s in streams} 116 ) 117 # default error stream 118 if "error" not in self._streams: 119 self._streams["error"] = LoggingStream( 120 "error", 121 aliases={ 122 "err", 123 "except", 124 "Exception", 125 "exception", 126 "exceptions", 127 "errors", 128 }, 129 ) 130 131 # check alias duplicates 132 alias_set: set[str | None] = set() 133 for stream in self._streams.values(): 134 for alias in stream.aliases: 135 if alias in alias_set: 136 raise ValueError(f"alias {alias} is already in use") 137 alias_set.add(alias) 138 139 # add aliases 140 for stream in tuple(self._streams.values()): 141 for alias in stream.aliases: 142 if alias not in self._streams: 143 self._streams[alias] = stream 144 145 # print formatting 146 self._level_header: HeaderFunction = level_header 147 148 print({k: str(v) for k, v in self._streams.items()}) 149 150 def _exception_context( 151 self, 152 stream: str = "error", 153 # level: int = -256, 154 # **kwargs, 155 ) -> ExceptionContext: 156 s: LoggingStream = self._streams[stream] 157 return ExceptionContext(stream=s) 158 159 def log( # type: ignore # yes, the signatures are different here. 160 self, 161 msg: JSONitem = None, 162 lvl: int | None = None, 163 stream: str | None = None, 164 console_print: bool = False, 165 extra_indent: str = "", 166 **kwargs, 167 ): 168 """logging function 169 170 ### Parameters: 171 - `msg : JSONitem` 172 message (usually string or dict) to be logged 173 - `lvl : int | None` 174 level of message (lower levels are more important) 175 (defaults to `None`) 176 - `console_print : bool` 177 override `console_print_threshold` setting 178 (defaults to `False`) 179 - `stream : str | None` 180 whether to log to a stream (defaults to `None`), which logs to the default `None` stream 181 (defaults to `None`) 182 """ 183 184 # add to known stream names if not present 185 if stream not in self._streams: 186 self._streams[stream] = LoggingStream(stream) 187 188 # set default level to either global or stream-specific default level 189 # ======================================== 190 if lvl is None: 191 if stream is None: 192 lvl = self._default_level 193 else: 194 if self._streams[stream].default_level is not None: 195 lvl = self._streams[stream].default_level 196 else: 197 lvl = self._default_level 198 199 assert lvl is not None, "lvl should not be None at this point" 200 201 # print to console with formatting 202 # ======================================== 203 _printed: bool = False 204 if console_print or (lvl <= self._console_print_threshold): 205 # add some formatting 206 print( 207 self._level_header( 208 msg=msg, 209 lvl=lvl, 210 stream=stream, 211 extra_indent=extra_indent, 212 ) 213 ) 214 215 # store the last message time 216 if self._last_msg_time is not None: 217 self._last_msg_time = time.time() 218 219 _printed = True 220 221 # convert and add data 222 # ======================================== 223 # converting to dict 224 msg_dict: typing.Mapping 225 if not isinstance(msg, typing.Mapping): 226 msg_dict = {"_msg": msg} 227 else: 228 msg_dict = msg 229 230 # level+stream metadata 231 if lvl is not None: 232 msg_dict["_lvl"] = lvl 233 234 # msg_dict["_stream"] = stream # moved to LoggingStream 235 236 # extra data in kwargs 237 if len(kwargs) > 0: 238 msg_dict["_kwargs"] = kwargs 239 240 # add default contents (timing, etc) 241 msg_dict = { 242 **{k: v() for k, v in self._streams[stream].default_contents.items()}, 243 **msg_dict, 244 } 245 246 # write 247 # ======================================== 248 logfile_msg: str = json.dumps(json_serialize(msg_dict)) + "\n" 249 if ( 250 (stream is None) 251 or (stream not in self._streams) 252 or (self._streams[stream].handler is None) 253 ): 254 # write to the main log file if no stream is specified 255 self._log_file_handle.write(logfile_msg) 256 else: 257 # otherwise, write to the stream-specific file 258 s_handler: AnyIO | None = self._streams[stream].handler 259 if s_handler is not None: 260 s_handler.write(logfile_msg) 261 else: 262 raise ValueError( 263 f"stream handler is None! something in the logging stream setup is wrong:\n{self}" 264 ) 265 266 # if it was important enough to print, flush all streams 267 if _printed: 268 self.flush_all() 269 270 def log_elapsed_last( 271 self, 272 lvl: int | None = None, 273 stream: str | None = None, 274 console_print: bool = True, 275 **kwargs, 276 ) -> float: 277 """logs the time elapsed since the last message was printed to the console (in any stream)""" 278 if self._last_msg_time is None: 279 raise ValueError("no last message time!") 280 else: 281 return self.log( 282 {"elapsed_time": round(time.time() - self._last_msg_time, 6)}, 283 lvl=(lvl if lvl is not None else self._console_print_threshold), 284 stream=stream, 285 console_print=console_print, 286 **kwargs, 287 ) 288 289 def flush_all(self): 290 """flush all streams""" 291 292 self._log_file_handle.flush() 293 294 for stream in self._streams.values(): 295 if stream.handler is not None: 296 stream.handler.flush() 297 298 def __getattr__(self, stream: str) -> Callable: 299 if stream.startswith("_"): 300 raise AttributeError(f"invalid stream name {stream} (no underscores)") 301 return partial(self.log, stream=stream) 302 303 def __getitem__(self, stream: str): 304 return partial(self.log, stream=stream) 305 306 def __call__(self, *args, **kwargs): 307 return self.log(*args, **kwargs)
logger with more features, including log levels and streams
Parameters:
- `log_path : str | None`
default log file path
(defaults to `None`)
- `log_file : AnyIO | None`
default log io, should have a `.write()` method (pass only this or `log_path`, not both)
(defaults to `None`)
- `timestamp : bool`
whether to add timestamps to every log message (under the `_timestamp` key)
(defaults to `True`)
- `default_level : int`
default log level for streams/messages that don't specify a level
(defaults to `0`)
- `console_print_threshold : int`
log level at which to print to the console, anything greater will not be printed unless overridden by `console_print`
(defaults to `50`)
- `level_header : HeaderFunction`
function for formatting log messages when printing to console
(defaults to `HEADER_FUNCTIONS["md"]`)
keep_last_msg_time : bool
whether to keep the last message time (defaults toTrue
)
Raises:
- `ValueError` : _description_
Logger( log_path: str | None = None, log_file: Union[TextIO, muutils.logger.simplelogger.NullIO, NoneType] = None, default_level: int = 0, console_print_threshold: int = 50, level_header: muutils.logger.headerfuncs.HeaderFunction = <function md_header_function>, streams: Union[dict[str | None, LoggingStream], Sequence[LoggingStream]] = (), keep_last_msg_time: bool = True, timestamp: bool = True, **kwargs)
72 def __init__( 73 self, 74 log_path: str | None = None, 75 log_file: AnyIO | None = None, 76 default_level: int = 0, 77 console_print_threshold: int = 50, 78 level_header: HeaderFunction = HEADER_FUNCTIONS["md"], 79 streams: dict[str | None, LoggingStream] | Sequence[LoggingStream] = (), 80 keep_last_msg_time: bool = True, 81 # junk args 82 timestamp: bool = True, 83 **kwargs, 84 ): 85 # junk arg checking 86 # ================================================== 87 if len(kwargs) > 0: 88 raise ValueError(f"unrecognized kwargs: {kwargs}") 89 90 if not timestamp: 91 raise ValueError( 92 "timestamp must be True -- why would you not want timestamps?" 93 ) 94 95 # timing 96 # ================================================== 97 # timing compares 98 self._keep_last_msg_time: bool = keep_last_msg_time 99 # TODO: handle per stream? 100 self._last_msg_time: float | None = time.time() 101 102 # basic setup 103 # ================================================== 104 # init BaseLogger 105 super().__init__(log_file=log_file, log_path=log_path, timestamp=timestamp) 106 107 # level-related 108 self._console_print_threshold: int = console_print_threshold 109 self._default_level: int = default_level 110 111 # set up streams 112 self._streams: dict[str | None, LoggingStream] = ( 113 streams 114 if isinstance(streams, typing.Mapping) 115 else {s.name: s for s in streams} 116 ) 117 # default error stream 118 if "error" not in self._streams: 119 self._streams["error"] = LoggingStream( 120 "error", 121 aliases={ 122 "err", 123 "except", 124 "Exception", 125 "exception", 126 "exceptions", 127 "errors", 128 }, 129 ) 130 131 # check alias duplicates 132 alias_set: set[str | None] = set() 133 for stream in self._streams.values(): 134 for alias in stream.aliases: 135 if alias in alias_set: 136 raise ValueError(f"alias {alias} is already in use") 137 alias_set.add(alias) 138 139 # add aliases 140 for stream in tuple(self._streams.values()): 141 for alias in stream.aliases: 142 if alias not in self._streams: 143 self._streams[alias] = stream 144 145 # print formatting 146 self._level_header: HeaderFunction = level_header 147 148 print({k: str(v) for k, v in self._streams.items()})
def
log( self, msg: Union[bool, int, float, str, NoneType, List[Union[bool, int, float, str, NoneType, List[Any], Dict[str, Any]]], Dict[str, Union[bool, int, float, str, NoneType, List[Any], Dict[str, Any]]]] = None, lvl: int | None = None, stream: str | None = None, console_print: bool = False, extra_indent: str = '', **kwargs):
159 def log( # type: ignore # yes, the signatures are different here. 160 self, 161 msg: JSONitem = None, 162 lvl: int | None = None, 163 stream: str | None = None, 164 console_print: bool = False, 165 extra_indent: str = "", 166 **kwargs, 167 ): 168 """logging function 169 170 ### Parameters: 171 - `msg : JSONitem` 172 message (usually string or dict) to be logged 173 - `lvl : int | None` 174 level of message (lower levels are more important) 175 (defaults to `None`) 176 - `console_print : bool` 177 override `console_print_threshold` setting 178 (defaults to `False`) 179 - `stream : str | None` 180 whether to log to a stream (defaults to `None`), which logs to the default `None` stream 181 (defaults to `None`) 182 """ 183 184 # add to known stream names if not present 185 if stream not in self._streams: 186 self._streams[stream] = LoggingStream(stream) 187 188 # set default level to either global or stream-specific default level 189 # ======================================== 190 if lvl is None: 191 if stream is None: 192 lvl = self._default_level 193 else: 194 if self._streams[stream].default_level is not None: 195 lvl = self._streams[stream].default_level 196 else: 197 lvl = self._default_level 198 199 assert lvl is not None, "lvl should not be None at this point" 200 201 # print to console with formatting 202 # ======================================== 203 _printed: bool = False 204 if console_print or (lvl <= self._console_print_threshold): 205 # add some formatting 206 print( 207 self._level_header( 208 msg=msg, 209 lvl=lvl, 210 stream=stream, 211 extra_indent=extra_indent, 212 ) 213 ) 214 215 # store the last message time 216 if self._last_msg_time is not None: 217 self._last_msg_time = time.time() 218 219 _printed = True 220 221 # convert and add data 222 # ======================================== 223 # converting to dict 224 msg_dict: typing.Mapping 225 if not isinstance(msg, typing.Mapping): 226 msg_dict = {"_msg": msg} 227 else: 228 msg_dict = msg 229 230 # level+stream metadata 231 if lvl is not None: 232 msg_dict["_lvl"] = lvl 233 234 # msg_dict["_stream"] = stream # moved to LoggingStream 235 236 # extra data in kwargs 237 if len(kwargs) > 0: 238 msg_dict["_kwargs"] = kwargs 239 240 # add default contents (timing, etc) 241 msg_dict = { 242 **{k: v() for k, v in self._streams[stream].default_contents.items()}, 243 **msg_dict, 244 } 245 246 # write 247 # ======================================== 248 logfile_msg: str = json.dumps(json_serialize(msg_dict)) + "\n" 249 if ( 250 (stream is None) 251 or (stream not in self._streams) 252 or (self._streams[stream].handler is None) 253 ): 254 # write to the main log file if no stream is specified 255 self._log_file_handle.write(logfile_msg) 256 else: 257 # otherwise, write to the stream-specific file 258 s_handler: AnyIO | None = self._streams[stream].handler 259 if s_handler is not None: 260 s_handler.write(logfile_msg) 261 else: 262 raise ValueError( 263 f"stream handler is None! something in the logging stream setup is wrong:\n{self}" 264 ) 265 266 # if it was important enough to print, flush all streams 267 if _printed: 268 self.flush_all()
logging function
Parameters:
msg : JSONitem
message (usually string or dict) to be loggedlvl : int | None
level of message (lower levels are more important) (defaults toNone
)console_print : bool
overrideconsole_print_threshold
setting (defaults toFalse
)stream : str | None
whether to log to a stream (defaults toNone
), which logs to the defaultNone
stream (defaults toNone
)
def
log_elapsed_last( self, lvl: int | None = None, stream: str | None = None, console_print: bool = True, **kwargs) -> float:
270 def log_elapsed_last( 271 self, 272 lvl: int | None = None, 273 stream: str | None = None, 274 console_print: bool = True, 275 **kwargs, 276 ) -> float: 277 """logs the time elapsed since the last message was printed to the console (in any stream)""" 278 if self._last_msg_time is None: 279 raise ValueError("no last message time!") 280 else: 281 return self.log( 282 {"elapsed_time": round(time.time() - self._last_msg_time, 6)}, 283 lvl=(lvl if lvl is not None else self._console_print_threshold), 284 stream=stream, 285 console_print=console_print, 286 **kwargs, 287 )
logs the time elapsed since the last message was printed to the console (in any stream)
@dataclass
class
LoggingStream:
12@dataclass 13class LoggingStream: 14 """properties of a logging stream 15 16 - `name: str` name of the stream 17 - `aliases: set[str]` aliases for the stream 18 (calls to these names will be redirected to this stream. duplicate alises will result in errors) 19 TODO: perhaps duplicate alises should result in duplicate writes? 20 - `file: str|bool|AnyIO|None` file to write to 21 - if `None`, will write to standard log 22 - if `True`, will write to `name + ".log"` 23 - if `False` will "write" to `NullIO` (throw it away) 24 - if a string, will write to that file 25 - if a fileIO type object, will write to that object 26 - `default_level: int|None` default level for this stream 27 - `default_contents: dict[str, Callable[[], Any]]` default contents for this stream 28 - `last_msg: tuple[float, Any]|None` last message written to this stream (timestamp, message) 29 """ 30 31 name: str | None 32 aliases: set[str | None] = field(default_factory=set) 33 file: str | bool | AnyIO | None = None 34 default_level: int | None = None 35 default_contents: dict[str, Callable[[], Any]] = field(default_factory=dict) 36 handler: AnyIO | None = None 37 38 # TODO: implement last-message caching 39 # last_msg: tuple[float, Any]|None = None 40 41 def make_handler(self) -> AnyIO | None: 42 if self.file is None: 43 return None 44 elif isinstance(self.file, str): 45 # if its a string, open a file 46 return open( 47 self.file, 48 "w", 49 encoding="utf-8", 50 ) 51 elif isinstance(self.file, bool): 52 # if its a bool and true, open a file with the same name as the stream (in the current dir) 53 # TODO: make this happen in the same dir as the main logfile? 54 if self.file: 55 return open( # type: ignore[return-value] 56 f"{sanitize_fname(self.name)}.log.jsonl", 57 "w", 58 encoding="utf-8", 59 ) 60 else: 61 return NullIO() 62 else: 63 # if its neither, check it has `.write()` and `.flush()` methods 64 if ( 65 ( 66 not hasattr(self.file, "write") 67 or (not callable(self.file.write)) 68 or (not hasattr(self.file, "flush")) 69 or (not callable(self.file.flush)) 70 ) 71 or (not hasattr(self.file, "close")) 72 or (not callable(self.file.close)) 73 ): 74 raise ValueError(f"stream {self.name} has invalid handler {self.file}") 75 # ignore type check because we know it has a .write() method, 76 # assume the user knows what they're doing 77 return self.file # type: ignore 78 79 def __post_init__(self): 80 self.aliases = set(self.aliases) 81 if any(x.startswith("_") for x in self.aliases if x is not None): 82 raise ValueError( 83 "stream names or aliases cannot start with an underscore, sorry" 84 ) 85 self.aliases.add(self.name) 86 self.default_contents["_timestamp"] = time.time 87 self.default_contents["_stream"] = lambda: self.name 88 self.handler = self.make_handler() 89 90 def __del__(self): 91 if self.handler is not None: 92 self.handler.flush() 93 self.handler.close() 94 95 def __str__(self): 96 return f"LoggingStream(name={self.name}, aliases={self.aliases}, file={self.file}, default_level={self.default_level}, default_contents={self.default_contents})"
properties of a logging stream
name: str
name of the streamaliases: set[str]
aliases for the stream (calls to these names will be redirected to this stream. duplicate alises will result in errors) TODO: perhaps duplicate alises should result in duplicate writes?file: str|bool|AnyIO|None
file to write to- if
None
, will write to standard log - if
True
, will write toname + ".log"
- if
False
will "write" toNullIO
(throw it away) - if a string, will write to that file
- if a fileIO type object, will write to that object
- if
default_level: int|None
default level for this streamdefault_contents: dict[str, Callable[[], Any]]
default contents for this streamlast_msg: tuple[float, Any]|None
last message written to this stream (timestamp, message)
LoggingStream( name: str | None, aliases: set[str | None] = <factory>, file: Union[str, bool, TextIO, muutils.logger.simplelogger.NullIO, NoneType] = None, default_level: int | None = None, default_contents: dict[str, typing.Callable[[], typing.Any]] = <factory>, handler: Union[TextIO, muutils.logger.simplelogger.NullIO, NoneType] = None)
41 def make_handler(self) -> AnyIO | None: 42 if self.file is None: 43 return None 44 elif isinstance(self.file, str): 45 # if its a string, open a file 46 return open( 47 self.file, 48 "w", 49 encoding="utf-8", 50 ) 51 elif isinstance(self.file, bool): 52 # if its a bool and true, open a file with the same name as the stream (in the current dir) 53 # TODO: make this happen in the same dir as the main logfile? 54 if self.file: 55 return open( # type: ignore[return-value] 56 f"{sanitize_fname(self.name)}.log.jsonl", 57 "w", 58 encoding="utf-8", 59 ) 60 else: 61 return NullIO() 62 else: 63 # if its neither, check it has `.write()` and `.flush()` methods 64 if ( 65 ( 66 not hasattr(self.file, "write") 67 or (not callable(self.file.write)) 68 or (not hasattr(self.file, "flush")) 69 or (not callable(self.file.flush)) 70 ) 71 or (not hasattr(self.file, "close")) 72 or (not callable(self.file.close)) 73 ): 74 raise ValueError(f"stream {self.name} has invalid handler {self.file}") 75 # ignore type check because we know it has a .write() method, 76 # assume the user knows what they're doing 77 return self.file # type: ignore
class
SimpleLogger:
35class SimpleLogger: 36 """logs training data to a jsonl file""" 37 38 def __init__( 39 self, 40 log_path: str | None = None, 41 log_file: AnyIO | None = None, 42 timestamp: bool = True, 43 ): 44 self._timestamp: bool = timestamp 45 self._log_path: str | None = log_path 46 47 self._log_file_handle: AnyIO 48 49 if (log_path is None) and (log_file is None): 50 print( 51 "[logger_internal] # no log file specified, will only write to console", 52 sys.stderr, 53 ) 54 self._log_file_handle = sys.stdout 55 56 elif (log_path is not None) and (log_file is not None): 57 raise ValueError( 58 "cannot specify both log_path and log_file, use streams in `SimpleLogger`" 59 ) 60 else: 61 # now exactly one of the two is None 62 if log_file is not None: 63 self._log_file_handle = log_file 64 else: 65 assert log_path is not None 66 self._log_file_handle = open(log_path, "w", encoding="utf-8") 67 68 def log(self, msg: JSONitem, console_print: bool = False, **kwargs): 69 """log a message to the log file, and optionally to the console""" 70 if console_print: 71 print(msg) 72 73 if not isinstance(msg, typing.Mapping): 74 msg = {"_msg": msg} 75 76 if self._timestamp: 77 msg["_timestamp"] = time.time() 78 79 if len(kwargs) > 0: 80 msg["_kwargs"] = kwargs 81 82 self._log_file_handle.write(json.dumps(json_serialize(msg)) + "\n")
logs training data to a jsonl file
SimpleLogger( log_path: str | None = None, log_file: Union[TextIO, muutils.logger.simplelogger.NullIO, NoneType] = None, timestamp: bool = True)
38 def __init__( 39 self, 40 log_path: str | None = None, 41 log_file: AnyIO | None = None, 42 timestamp: bool = True, 43 ): 44 self._timestamp: bool = timestamp 45 self._log_path: str | None = log_path 46 47 self._log_file_handle: AnyIO 48 49 if (log_path is None) and (log_file is None): 50 print( 51 "[logger_internal] # no log file specified, will only write to console", 52 sys.stderr, 53 ) 54 self._log_file_handle = sys.stdout 55 56 elif (log_path is not None) and (log_file is not None): 57 raise ValueError( 58 "cannot specify both log_path and log_file, use streams in `SimpleLogger`" 59 ) 60 else: 61 # now exactly one of the two is None 62 if log_file is not None: 63 self._log_file_handle = log_file 64 else: 65 assert log_path is not None 66 self._log_file_handle = open(log_path, "w", encoding="utf-8")
def
log( self, msg: Union[bool, int, float, str, NoneType, List[Union[bool, int, float, str, NoneType, List[Any], Dict[str, Any]]], Dict[str, Union[bool, int, float, str, NoneType, List[Any], Dict[str, Any]]]], console_print: bool = False, **kwargs):
68 def log(self, msg: JSONitem, console_print: bool = False, **kwargs): 69 """log a message to the log file, and optionally to the console""" 70 if console_print: 71 print(msg) 72 73 if not isinstance(msg, typing.Mapping): 74 msg = {"_msg": msg} 75 76 if self._timestamp: 77 msg["_timestamp"] = time.time() 78 79 if len(kwargs) > 0: 80 msg["_kwargs"] = kwargs 81 82 self._log_file_handle.write(json.dumps(json_serialize(msg)) + "\n")
log a message to the log file, and optionally to the console
class
TimerContext:
8class TimerContext: 9 """context manager for timing code""" 10 11 def __init__(self) -> None: 12 self.start_time: float 13 self.end_time: float 14 self.elapsed_time: float 15 16 def __enter__(self) -> "TimerContext": 17 self.start_time = time.time() 18 return self 19 20 def __exit__(self, exc_type, exc_val, exc_tb) -> Literal[False]: 21 self.end_time = time.time() 22 self.elapsed_time = self.end_time - self.start_time 23 return False
context manager for timing code