Update logging documentation and refactor logger implementation
- Revised the logging documentation to clarify the unified logging system's features and usage patterns. - Simplified the logger implementation by removing the custom `DateRotatingFileHandler` and utilizing the standard library's `TimedRotatingFileHandler` for date-based log rotation. - Enhanced the `get_logger` function to ensure thread-safe logger configuration and prevent duplicate handlers. - Introduced a new `cleanup_old_logs` function for age-based log cleanup, while retaining the existing count-based cleanup mechanism. - Improved error handling and logging setup to ensure robust logging behavior across components. These changes enhance the clarity and maintainability of the logging system, making it easier for developers to implement and utilize logging in their components.
This commit is contained in:
402
utils/logger.py
402
utils/logger.py
@@ -3,339 +3,151 @@ Unified logging system for the TCP Dashboard project.
|
||||
|
||||
Provides centralized logging with:
|
||||
- Component-specific log directories
|
||||
- Date-based file rotation
|
||||
- Date-based file rotation using standard library handlers
|
||||
- Unified log format: [YYYY-MM-DD HH:MM:SS - LEVEL - message]
|
||||
- Thread-safe operations
|
||||
- Automatic directory creation
|
||||
- Verbose console logging with proper level handling
|
||||
- Automatic old log cleanup
|
||||
|
||||
Usage:
|
||||
from utils.logger import get_logger
|
||||
from utils.logger import get_logger, cleanup_old_logs
|
||||
|
||||
logger = get_logger('bot_manager')
|
||||
logger.info("This is an info message")
|
||||
logger.error("This is an error message")
|
||||
|
||||
# With verbose console output
|
||||
logger = get_logger('bot_manager', verbose=True)
|
||||
|
||||
# With custom cleanup settings
|
||||
logger = get_logger('bot_manager', clean_old_logs=True, max_log_files=7)
|
||||
# Clean up logs older than 7 days
|
||||
cleanup_old_logs('bot_manager', days_to_keep=7)
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional
|
||||
from typing import Optional
|
||||
import threading
|
||||
|
||||
# Lock for thread-safe logger configuration
|
||||
_lock = threading.Lock()
|
||||
|
||||
class DateRotatingFileHandler(logging.FileHandler):
|
||||
"""
|
||||
Custom file handler that rotates log files based on date changes.
|
||||
Creates new log files when the date changes to ensure daily separation.
|
||||
"""
|
||||
|
||||
def __init__(self, log_dir: Path, component_name: str, cleanup_callback=None, max_files=30):
|
||||
self.log_dir = log_dir
|
||||
self.component_name = component_name
|
||||
self.current_date = None
|
||||
self.cleanup_callback = cleanup_callback
|
||||
self.max_files = max_files
|
||||
self._lock = threading.Lock()
|
||||
|
||||
# Initialize with today's file
|
||||
self._update_filename()
|
||||
super().__init__(self.current_filename, mode='a', encoding='utf-8')
|
||||
|
||||
def _update_filename(self):
|
||||
"""Update the filename based on current date."""
|
||||
today = datetime.now().strftime('%Y-%m-%d')
|
||||
if self.current_date != today:
|
||||
self.current_date = today
|
||||
self.current_filename = self.log_dir / f"{today}.txt"
|
||||
|
||||
# Ensure the directory exists
|
||||
self.log_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Cleanup old logs if callback is provided
|
||||
if self.cleanup_callback:
|
||||
self.cleanup_callback(self.component_name, self.max_files)
|
||||
|
||||
def emit(self, record):
|
||||
"""Emit a log record, rotating file if date has changed."""
|
||||
with self._lock:
|
||||
# Check if we need to rotate to a new file
|
||||
today = datetime.now().strftime('%Y-%m-%d')
|
||||
if self.current_date != today:
|
||||
# Close current file
|
||||
if hasattr(self, 'stream') and self.stream:
|
||||
self.stream.close()
|
||||
|
||||
# Update filename and reopen (this will trigger cleanup)
|
||||
self._update_filename()
|
||||
self.baseFilename = str(self.current_filename)
|
||||
self.stream = self._open()
|
||||
|
||||
super().emit(record)
|
||||
|
||||
|
||||
class UnifiedLogger:
|
||||
"""
|
||||
Unified logger class that manages component-specific loggers with consistent formatting.
|
||||
"""
|
||||
|
||||
_loggers: Dict[str, logging.Logger] = {}
|
||||
_lock = threading.Lock()
|
||||
|
||||
@classmethod
|
||||
def get_logger(cls, component_name: str, log_level: str = "INFO",
|
||||
verbose: Optional[bool] = None, clean_old_logs: bool = True,
|
||||
max_log_files: int = 30) -> logging.Logger:
|
||||
"""
|
||||
Get or create a logger for the specified component.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component (e.g., 'bot_manager', 'data_collector')
|
||||
log_level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
verbose: Enable console logging. If None, uses VERBOSE_LOGGING from .env
|
||||
clean_old_logs: Automatically clean old log files when creating new ones
|
||||
max_log_files: Maximum number of log files to keep (default: 30)
|
||||
|
||||
Returns:
|
||||
Configured logger instance for the component
|
||||
"""
|
||||
# Create a unique key for logger configuration
|
||||
logger_key = f"{component_name}_{log_level}_{verbose}_{clean_old_logs}_{max_log_files}"
|
||||
|
||||
with cls._lock:
|
||||
if logger_key in cls._loggers:
|
||||
return cls._loggers[logger_key]
|
||||
|
||||
# Create new logger
|
||||
logger = logging.getLogger(f"tcp_dashboard.{component_name}.{hash(logger_key) % 10000}")
|
||||
logger.setLevel(getattr(logging, log_level.upper()))
|
||||
|
||||
# Prevent duplicate handlers if logger already exists
|
||||
if logger.handlers:
|
||||
logger.handlers.clear()
|
||||
|
||||
# Create log directory for component
|
||||
log_dir = Path("logs") / component_name
|
||||
|
||||
try:
|
||||
# Setup cleanup callback if enabled
|
||||
cleanup_callback = cls._cleanup_old_logs if clean_old_logs else None
|
||||
|
||||
# Add date-rotating file handler
|
||||
file_handler = DateRotatingFileHandler(
|
||||
log_dir, component_name, cleanup_callback, max_log_files
|
||||
)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
|
||||
# Create unified formatter
|
||||
formatter = logging.Formatter(
|
||||
'[%(asctime)s - %(levelname)s - %(message)s]',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
file_handler.setFormatter(formatter)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
# Add console handler based on verbose setting
|
||||
should_log_to_console = cls._should_enable_console_logging(verbose)
|
||||
if should_log_to_console:
|
||||
console_handler = logging.StreamHandler()
|
||||
|
||||
# Set console log level based on log_level with proper type handling
|
||||
console_level = cls._get_console_log_level(log_level)
|
||||
console_handler.setLevel(console_level)
|
||||
|
||||
# Use colored formatter for console if available
|
||||
console_formatter = cls._get_console_formatter()
|
||||
console_handler.setFormatter(console_formatter)
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# Prevent propagation to root logger
|
||||
logger.propagate = False
|
||||
|
||||
cls._loggers[logger_key] = logger
|
||||
|
||||
# Log initialization
|
||||
logger.info(f"Logger initialized for component: {component_name} "
|
||||
f"(verbose={should_log_to_console}, cleanup={clean_old_logs}, "
|
||||
f"max_files={max_log_files})")
|
||||
|
||||
except Exception as e:
|
||||
# Fallback to console logging if file logging fails
|
||||
print(f"Warning: Failed to setup file logging for {component_name}: {e}")
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter('[%(asctime)s - %(levelname)s - %(message)s]')
|
||||
console_handler.setFormatter(formatter)
|
||||
logger.addHandler(console_handler)
|
||||
logger.propagate = False
|
||||
cls._loggers[logger_key] = logger
|
||||
|
||||
return logger
|
||||
|
||||
@classmethod
|
||||
def _should_enable_console_logging(cls, verbose: Optional[bool]) -> bool:
|
||||
"""
|
||||
Determine if console logging should be enabled.
|
||||
|
||||
Args:
|
||||
verbose: Explicit verbose setting, or None to use environment variable
|
||||
|
||||
Returns:
|
||||
True if console logging should be enabled
|
||||
"""
|
||||
if verbose is not None:
|
||||
return verbose
|
||||
|
||||
# Check environment variables
|
||||
env_verbose = os.getenv('VERBOSE_LOGGING', 'false').lower()
|
||||
env_console = os.getenv('LOG_TO_CONSOLE', 'false').lower()
|
||||
|
||||
return env_verbose in ('true', '1', 'yes') or env_console in ('true', '1', 'yes')
|
||||
|
||||
@classmethod
|
||||
def _get_console_log_level(cls, log_level: str) -> int:
|
||||
"""
|
||||
Get appropriate console log level based on file log level.
|
||||
|
||||
Args:
|
||||
log_level: File logging level
|
||||
|
||||
Returns:
|
||||
Console logging level (integer)
|
||||
"""
|
||||
# Map file log levels to console log levels
|
||||
# Generally, console should be less verbose than file
|
||||
level_mapping = {
|
||||
'DEBUG': logging.DEBUG, # Show all debug info on console too
|
||||
'INFO': logging.INFO, # Show info and above
|
||||
'WARNING': logging.WARNING, # Show warnings and above
|
||||
'ERROR': logging.ERROR, # Show errors and above
|
||||
'CRITICAL': logging.CRITICAL # Show only critical
|
||||
}
|
||||
|
||||
return level_mapping.get(log_level.upper(), logging.INFO)
|
||||
|
||||
@classmethod
|
||||
def _get_console_formatter(cls) -> logging.Formatter:
|
||||
"""
|
||||
Get formatter for console output with potential color support.
|
||||
|
||||
Returns:
|
||||
Configured formatter for console output
|
||||
"""
|
||||
# Basic formatter - could be enhanced with colors in the future
|
||||
return logging.Formatter(
|
||||
'[%(asctime)s - %(levelname)s - %(message)s]',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _cleanup_old_logs(cls, component_name: str, max_files: int = 30):
|
||||
"""
|
||||
Clean up old log files for a component, keeping only the most recent files.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component
|
||||
max_files: Maximum number of log files to keep
|
||||
"""
|
||||
log_dir = Path("logs") / component_name
|
||||
if not log_dir.exists():
|
||||
return
|
||||
|
||||
# Get all log files sorted by modification time (newest first)
|
||||
log_files = sorted(
|
||||
log_dir.glob("*.txt"),
|
||||
key=lambda f: f.stat().st_mtime,
|
||||
reverse=True
|
||||
)
|
||||
|
||||
# Keep only the most recent max_files
|
||||
files_to_delete = log_files[max_files:]
|
||||
|
||||
for log_file in files_to_delete:
|
||||
try:
|
||||
log_file.unlink()
|
||||
# Only log to console to avoid recursive logging
|
||||
if cls._should_enable_console_logging(None):
|
||||
print(f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - INFO - "
|
||||
f"Deleted old log file: {log_file}]")
|
||||
except Exception as e:
|
||||
print(f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - WARNING - "
|
||||
f"Failed to delete old log file {log_file}: {e}]")
|
||||
|
||||
@classmethod
|
||||
def cleanup_old_logs(cls, component_name: str, days_to_keep: int = 30):
|
||||
"""
|
||||
Clean up old log files for a component based on age.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component
|
||||
days_to_keep: Number of days of logs to retain
|
||||
"""
|
||||
log_dir = Path("logs") / component_name
|
||||
if not log_dir.exists():
|
||||
return
|
||||
|
||||
cutoff_date = datetime.now().timestamp() - (days_to_keep * 24 * 60 * 60)
|
||||
|
||||
for log_file in log_dir.glob("*.txt"):
|
||||
if log_file.stat().st_mtime < cutoff_date:
|
||||
try:
|
||||
log_file.unlink()
|
||||
print(f"Deleted old log file: {log_file}")
|
||||
except Exception as e:
|
||||
print(f"Failed to delete old log file {log_file}: {e}")
|
||||
|
||||
|
||||
# Convenience function for easy import
|
||||
def get_logger(component_name: str, log_level: str = "INFO",
|
||||
verbose: Optional[bool] = None, clean_old_logs: bool = True,
|
||||
max_log_files: int = 30) -> logging.Logger:
|
||||
"""
|
||||
Get a logger instance for the specified component.
|
||||
Get or create a logger for the specified component.
|
||||
|
||||
This function is thread-safe and ensures that handlers are not duplicated.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component (e.g., 'bot_manager', 'data_collector')
|
||||
log_level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
verbose: Enable console logging. If None, uses VERBOSE_LOGGING from .env
|
||||
clean_old_logs: Automatically clean old log files when creating new ones
|
||||
clean_old_logs: (Deprecated) This is now handled by max_log_files.
|
||||
The parameter is kept for backward compatibility.
|
||||
max_log_files: Maximum number of log files to keep (default: 30)
|
||||
|
||||
Returns:
|
||||
Configured logger instance
|
||||
|
||||
Example:
|
||||
from utils.logger import get_logger
|
||||
|
||||
# Basic usage
|
||||
logger = get_logger('bot_manager')
|
||||
|
||||
# With verbose console output
|
||||
logger = get_logger('bot_manager', verbose=True)
|
||||
|
||||
# With custom cleanup settings
|
||||
logger = get_logger('bot_manager', clean_old_logs=True, max_log_files=7)
|
||||
|
||||
logger.info("Bot started successfully")
|
||||
logger.error("Connection failed", exc_info=True)
|
||||
Configured logger instance for the component
|
||||
"""
|
||||
return UnifiedLogger.get_logger(component_name, log_level, verbose, clean_old_logs, max_log_files)
|
||||
with _lock:
|
||||
logger_name = f"tcp_dashboard.{component_name}"
|
||||
logger = logging.getLogger(logger_name)
|
||||
|
||||
# Avoid re-configuring if logger already has handlers
|
||||
if logger.handlers:
|
||||
return logger
|
||||
|
||||
# Set logger level
|
||||
try:
|
||||
level = getattr(logging, log_level.upper())
|
||||
logger.setLevel(level)
|
||||
except AttributeError:
|
||||
print(f"Warning: Invalid log level '{log_level}'. Defaulting to INFO.")
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Prevent propagation to root logger
|
||||
logger.propagate = False
|
||||
|
||||
# Create log directory for component
|
||||
log_dir = Path("logs") / component_name
|
||||
log_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Unified formatter
|
||||
formatter = logging.Formatter(
|
||||
'[%(asctime)s - %(levelname)s - %(message)s]',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
|
||||
# Add date-rotating file handler
|
||||
try:
|
||||
log_file = log_dir / f"{component_name}.log"
|
||||
# Rotates at midnight, keeps 'max_log_files' backups
|
||||
file_handler = logging.handlers.TimedRotatingFileHandler(
|
||||
log_file, when='midnight', interval=1, backupCount=max_log_files,
|
||||
encoding='utf-8'
|
||||
)
|
||||
file_handler.setFormatter(formatter)
|
||||
logger.addHandler(file_handler)
|
||||
except Exception as e:
|
||||
print(f"Warning: Failed to setup file logging for {component_name}: {e}")
|
||||
|
||||
# Add console handler based on verbose setting
|
||||
if _should_enable_console_logging(verbose):
|
||||
console_handler = logging.StreamHandler()
|
||||
console_level = _get_console_log_level(log_level)
|
||||
console_handler.setLevel(console_level)
|
||||
console_handler.setFormatter(formatter)
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
return logger
|
||||
|
||||
def _should_enable_console_logging(verbose: Optional[bool]) -> bool:
|
||||
"""Determine if console logging should be enabled."""
|
||||
if verbose is not None:
|
||||
return verbose
|
||||
env_verbose = os.getenv('VERBOSE_LOGGING', 'false').lower()
|
||||
env_console = os.getenv('LOG_TO_CONSOLE', 'false').lower()
|
||||
return env_verbose in ('true', '1', 'yes') or env_console in ('true', '1', 'yes')
|
||||
|
||||
def _get_console_log_level(log_level: str) -> int:
|
||||
"""Get appropriate console log level."""
|
||||
level_mapping = {
|
||||
'DEBUG': logging.DEBUG,
|
||||
'INFO': logging.INFO,
|
||||
'WARNING': logging.WARNING,
|
||||
'ERROR': logging.ERROR,
|
||||
'CRITICAL': logging.CRITICAL
|
||||
}
|
||||
return level_mapping.get(log_level.upper(), logging.INFO)
|
||||
|
||||
def cleanup_old_logs(component_name: str, days_to_keep: int = 30):
|
||||
"""
|
||||
Clean up old log files for a component based on age.
|
||||
|
||||
Note: TimedRotatingFileHandler already manages log file counts. This function
|
||||
is for age-based cleanup, which might be redundant but is kept for specific use cases.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component
|
||||
days_to_keep: Number of days of logs to retain (default: 30)
|
||||
days_to_keep: Number of days of logs to retain
|
||||
"""
|
||||
UnifiedLogger.cleanup_old_logs(component_name, days_to_keep)
|
||||
log_dir = Path("logs") / component_name
|
||||
if not log_dir.is_dir():
|
||||
return
|
||||
|
||||
cutoff_date = datetime.now().timestamp() - (days_to_keep * 24 * 60 * 60)
|
||||
|
||||
for log_file in log_dir.glob("*"):
|
||||
try:
|
||||
if log_file.is_file() and log_file.stat().st_mtime < cutoff_date:
|
||||
log_file.unlink()
|
||||
print(f"Deleted old log file: {log_file}")
|
||||
except Exception as e:
|
||||
print(f"Failed to delete old log file {log_file}: {e}")
|
||||
|
||||
def shutdown_logging():
|
||||
"""
|
||||
Shuts down the logging system, closing all file handlers.
|
||||
This is important for clean exit, especially in tests.
|
||||
"""
|
||||
logging.shutdown()
|
||||
Reference in New Issue
Block a user