Coverage for src/setlogging/logger.py: 81%
113 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-24 05:05 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-24 05:05 +0000
1# Standard library imports
2from datetime import datetime
3import json
4import logging
5from logging.handlers import RotatingFileHandler
6import os
7from typing import Optional
10def get_tz_abbreviation(dt_obj: datetime) -> str:
11 """Sanitize timezone name to standardized abbreviation (cross-platform compatible).
13 Args:
14 dt_obj: Timezone-aware datetime object
16 Returns:
17 str: 3-letter timezone abbreviation (e.g., EST, PST)
18 """
19 tz_name = dt_obj.tzname()
20 if tz_name and " " in tz_name: # Handle Windows full names
21 return "".join(word[0] for word in tz_name.split())
22 return tz_name or "UTC" # Fallback for empty values
25# Global timezone constants
26# tzinfo object for time calculations
27LOCAL_TZINFO = datetime.now().astimezone().tzinfo
28TIMEZONE_ABBREV = get_tz_abbreviation(
29 # Precomputed abbreviation for platform compatibility
30 datetime.now(LOCAL_TZINFO)
31)
34class CustomFormatter(logging.Formatter):
35 """Log formatter with millisecond precision and timezone support."""
37 def __init__(self, *args, **kwargs):
38 super().__init__(*args, **kwargs)
39 self._tz_abbrev = TIMEZONE_ABBREV # Cache abbreviation for performance
41 def formatTime(self, record, datefmt=None) -> str:
42 """Enhanced time formatting with milliseconds and timezone.
44 Args:
45 record: LogRecord object
46 datefmt: Date format string (optional)
48 Returns:
49 str: Formatted timestamp with timezone
50 """
51 try:
52 base_fmt = datefmt or "%Y-%m-%d %H:%M:%S"
53 aware_time = datetime.fromtimestamp(record.created, LOCAL_TZINFO)
54 time_str = aware_time.strftime(f"{base_fmt}.%f")[
55 :-3
56 ] # Truncate to milliseconds
57 return f"{time_str} {self._tz_abbrev}"
58 except Exception:
59 return super().formatTime(record, datefmt)
62def setup_logging(
63 log_level: int = logging.DEBUG,
64 log_file: Optional[str] = None,
65 max_size_mb: int = 25, # 25MB
66 backup_count: int = 7,
67 console_output: bool = True,
68 log_format: Optional[str] = None,
69 date_format: Optional[str] = None,
70 json_format: bool = False,
71 indent: Optional[int] = None,
72) -> logging.Logger:
73 """
74 Configure logging system with rotating file handler and optional console output.
76 Args:
77 log_level: Logging level (default: DEBUG)
78 log_file: Log file path (default: app.log or app_json.log if json_format is True)
79 max_size_mb: Max log file size in MB before rotation (default: 25MB)
80 backup_count: Number of backup files to keep (default: 7)
81 console_output: Enable console logging (default: True)
82 log_format: Custom log format string (optional)
83 date_format: Custom date format string (optional)
84 json_format: Flag to determine if log format should be JSON (default: False)
85 indent: Indentation level for JSON output (default: None)
86 """
87 try:
88 if max_size_mb <= 0:
89 raise ValueError("max_size_mb must be positive")
90 if backup_count < 0:
91 raise ValueError("backup_count must be non-negative")
92 if indent is not None:
93 if indent < 0:
94 raise ValueError("indent must be non-negative")
95 if not json_format:
96 raise ValueError(
97 "indent parameter is only valid when json_format is True"
98 )
100 # Validate log level
101 valid_levels = {
102 logging.DEBUG,
103 logging.INFO,
104 logging.WARNING,
105 logging.ERROR,
106 logging.CRITICAL,
107 }
108 if log_level not in valid_levels:
109 raise ValueError(
110 f"Invalid log level: {log_level}. Valid levels are: {valid_levels}"
111 )
113 # Validate the date_format
114 if date_format:
115 valid_codes = {"%Y", "%m", "%d", "%H", "%M", "%S", "%z", "%Z"}
116 if not any(code in date_format for code in valid_codes):
117 raise ValueError(
118 f"Invalid date_format: {date_format} must contain at least one format code (e.g., %Y, %m, %H)"
119 )
121 # Validate the log_format
122 if log_format:
123 valid_codes = {"%(asctime)s", "%(levelname)s", "%(name)s", "%(message)s"}
124 if not any(code in log_format for code in valid_codes):
125 raise ValueError(
126 f"Invalid log_format: {log_format} must contain at least one format code (e.g., %(asctime)s, %(levelname)s)"
127 )
129 # Calculate max file size in bytes
130 max_bytes = max_size_mb * 1024 * 1024
132 # Set default log file if not provided
133 log_file = log_file or ("app_json.log" if json_format else "app.log")
135 # Create log directory if it does not exist
136 log_dir = os.path.dirname(log_file)
137 if log_dir: # If log_dir is not empty
138 # Create directory if it does not exist
139 os.makedirs(log_dir, exist_ok=True)
141 # check if the directory is writable
142 test_file = os.path.join(log_dir, ".permission_test")
143 try:
144 with open(test_file, "w") as f:
145 f.write("test")
146 os.remove(test_file)
147 except IOError as e:
148 raise PermissionError(f"Directory not writable: {log_dir}") from e
150 # Check if log file is writable
151 if os.path.exists(log_file):
152 if not os.access(log_file, os.W_OK):
153 raise PermissionError(f"File not writable: {log_file}")
155 except Exception as e: # Catch permission errors
156 raise
158 try:
159 # Create logger
160 logger = logging.getLogger(__name__)
161 logger.setLevel(log_level)
163 # Clear existing handlers
164 logger.handlers = []
166 # Set up formatter
167 if json_format:
168 formatter = logging.Formatter(
169 json.dumps(
170 {
171 "time": "%(asctime)s",
172 "name": "%(name)s",
173 "level": "%(levelname)s",
174 "message": "%(message)s",
175 },
176 indent=indent,
177 )
178 )
179 else:
180 formatter = CustomFormatter(
181 log_format or "%(asctime)s [%(levelname)s] [%(name)s] %(message)s",
182 date_format or "%Y-%m-%d %H:%M:%S",
183 )
185 # Set up file handler
186 file_handler = RotatingFileHandler(
187 log_file, maxBytes=max_bytes, backupCount=backup_count
188 )
189 file_handler.setFormatter(formatter)
190 logger.addHandler(file_handler)
192 # Set up console handler if enabled
193 if console_output:
194 console_handler = logging.StreamHandler()
195 console_handler.setFormatter(formatter)
196 logger.addHandler(console_handler)
198 # Generate configuration details using get_config_message
199 config_message = get_config_message(
200 log_level=log_level,
201 file_handler=file_handler,
202 max_size_mb=max_size_mb,
203 backup_count=backup_count,
204 console_output=console_output,
205 json_format=json_format, # Adapt the format based on user preference
206 indent=indent,
207 )
209 # Log configuration details with respect to log_level
210 if json_format:
211 # Parse JSON as dictionary
212 config_dict = json.loads(config_message)
213 if log_level != 0:
214 logger.log(log_level, {"Logging Configuration": config_dict})
215 else:
216 logger.warning({"Logging Configuration": config_dict})
217 else:
218 if log_level != 0:
219 logger.log(log_level, (f"Logging Configuration:\n" f"{config_message}"))
220 else:
221 logger.warning(f"Logging Configuration:\n{config_message}")
223 return logger
225 except Exception as e:
226 raise RuntimeError(f"Failed to set up logging: {str(e)}") from e
229def get_config_message(
230 log_level,
231 file_handler,
232 max_size_mb,
233 backup_count,
234 console_output,
235 json_format=False,
236 indent=None,
237):
238 processID = os.getpid()
240 if json_format:
241 config_dict = {
242 "Level": logging.getLevelName(log_level),
243 "LogFile": file_handler.baseFilename,
244 "MaxFileSizeMB": max_size_mb,
245 "BackupCount": backup_count,
246 "ConsoleOutput": console_output,
247 "Timezone": str(LOCAL_TZINFO),
248 "ProcessID": processID,
249 }
250 return json.dumps(config_dict)
251 else:
252 # Max Size message
253 max_size_message = f"{max_size_mb:.2f} MB ({max_size_mb * 1024:.0f} KB)"
254 return f"""
255+{'-' * 60}+
256|{'Logging Configuration'.center(60)}|
257+{'-' * 60}+
258| Level : {logging.getLevelName(log_level):<44}|
259| Log File : {file_handler.baseFilename:<44.44}|
260| Max Size : {max_size_message:<44.44}|
261| Backups : {backup_count:<44}|
262| Console : {str(console_output):<44}|
263| Timezone : {str(LOCAL_TZINFO):<44}|
264| Process ID : {processID:<44}|
265+{'-' * 60}+
266"""
269def get_logger(
270 name: str = __name__,
271 log_level: int = logging.DEBUG,
272 log_file: Optional[str] = None,
273 max_size_mb: int = 25, # 25MB
274 backup_count: int = 7,
275 console_output: bool = True,
276 log_format: Optional[str] = None,
277 date_format: Optional[str] = None,
278 json_format: bool = False,
279 indent: Optional[int] = None,
280) -> logging.Logger:
281 """
282 Simplified function to set up logging and return a logger instance.
284 Args:
285 name: Name of the logger.
286 log_level: Logging level.
287 log_file: Log file name.
288 max_size_mb: Max size of log file in MB before rotation.
289 backup_count: Number of rotated backups to keep.
290 console_output: Enable console logging (default: True)
291 log_format: Custom log format string (optional)
292 date_format: Custom date format string (optional)
293 json_format: Flag to determine if log format should be JSON.
294 indent: Indentation level for JSON output.
296 Returns:
297 logging.Logger: Configured logger instance.
298 """
299 return setup_logging(
300 log_level=log_level,
301 log_file=log_file,
302 max_size_mb=max_size_mb, # Pass max_size_mb parameter
303 backup_count=backup_count,
304 console_output=console_output,
305 log_format=log_format,
306 date_format=date_format,
307 json_format=json_format,
308 indent=indent,
309 )
312# Example Usage
313if __name__ == "__main__":
314 try:
315 logger = get_logger(console_output=True)
316 logger.debug("Basic debug example")
317 logger.info("Basic usage example")
318 logger.warning("Basic warning example")
319 logger.error("Basic error example")
320 logger.critical("Basic critical example")
321 logger.info(datetime.now().astimezone().tzinfo)
322 # JSON format example
323 json_logger = get_logger(json_format=True, indent=2)
324 json_logger.info("JSON format example")
326 except Exception as e:
327 print(f"Error: {str(e)}")
328 raise