File size: 15,702 Bytes
01b7e60
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
 
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
 
 
 
 
 
 
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
 
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
 
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
 
 
 
 
e67da5f
 
 
 
 
 
 
 
 
 
 
01b7e60
e67da5f
 
 
 
 
 
 
 
 
 
01b7e60
e67da5f
 
 
 
 
 
 
 
 
 
 
01b7e60
e67da5f
 
 
 
 
01b7e60
e67da5f
 
 
 
 
 
 
01b7e60
e67da5f
 
 
01b7e60
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
 
 
 
 
 
 
 
 
 
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
e67da5f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
01b7e60
e67da5f
 
 
 
 
 
01b7e60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
#!/usr/bin/env python3
"""
Logging Management Module
========================

Professional logging system that integrates with existing LOGS/ folder
and provides structured, comprehensive logging for BackgroundFX Pro.

Features:
- Integration with existing LOGS/ folder
- Structured logging with different levels
- Performance tracking and metrics
- Error tracking and debugging
- Rotating log files
- Console and file output

Author: BackgroundFX Pro Team  
License: MIT
"""

import os
import sys
import logging
import logging.handlers
from typing import Optional, Dict, Any
from datetime import datetime
from pathlib import Path
import json
import traceback
from functools import wraps
import time
from typing import Dict, List


class ColoredFormatter(logging.Formatter):
    """Custom formatter with colors for console output"""
    
    # Color codes
    COLORS = {
        'DEBUG': '\033[36m',     # Cyan
        'INFO': '\033[32m',      # Green
        'WARNING': '\033[33m',   # Yellow
        'ERROR': '\033[31m',     # Red
        'CRITICAL': '\033[35m',  # Magenta
        'RESET': '\033[0m'       # Reset
    }
    
    def format(self, record):
        # Add color to levelname
        if record.levelname in self.COLORS:
            record.levelname = f"{self.COLORS[record.levelname]}{record.levelname}{self.COLORS['RESET']}"
        
        return super().format(record)


class BackgroundFXLogger:
    """Main logger class for BackgroundFX Pro"""
    
    def __init__(self, 
                 name: str = "BackgroundFX",
                 logs_dir: str = "LOGS",
                 level: int = logging.INFO,
                 console_output: bool = True,
                 file_output: bool = True):
        
        self.name = name
        self.logs_dir = Path(logs_dir)
        self.level = level
        self.console_output = console_output
        self.file_output = file_output
        
        # Create logs directory if it doesn't exist
        self.logs_dir.mkdir(exist_ok=True)
        
        # Initialize logger
        self.logger = logging.getLogger(name)
        self.logger.setLevel(level)
        
        # Clear existing handlers to avoid duplicates
        self.logger.handlers.clear()
        
        # Setup handlers
        self._setup_handlers()
        
        # Performance tracking
        self.performance_data = {}
        self.start_times = {}
    
    def __getattr__(self, name):
        """
        Delegate unknown attributes/methods to the underlying stdlib logger.
        This makes BackgroundFXLogger behave like logging.Logger where needed.
        """
        return getattr(self.logger, name)
        
    def _setup_handlers(self):
        """Setup logging handlers for console and file output"""
        
        # Console handler
        if self.console_output:
            console_handler = logging.StreamHandler(sys.stdout)
            console_handler.setLevel(self.level)
            
            # Colored formatter for console
            console_formatter = ColoredFormatter(
                '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
                datefmt='%Y-%m-%d %H:%M:%S'
            )
            console_handler.setFormatter(console_formatter)
            self.logger.addHandler(console_handler)
        
        # File handlers
        if self.file_output:
            # Main log file (rotating)
            main_log_file = self.logs_dir / "backgroundfx.log"
            file_handler = logging.handlers.RotatingFileHandler(
                main_log_file,
                maxBytes=10*1024*1024,  # 10MB
                backupCount=5,
                encoding="utf-8"
            )
            file_handler.setLevel(self.level)
            
            # Plain formatter for file
            file_formatter = logging.Formatter(
                '%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s',
                datefmt='%Y-%m-%d %H:%M:%S'
            )
            file_handler.setFormatter(file_formatter)
            self.logger.addHandler(file_handler)
            
            # Error-only log file
            error_log_file = self.logs_dir / "errors.log"
            error_handler = logging.handlers.RotatingFileHandler(
                error_log_file,
                maxBytes=5*1024*1024,  # 5MB
                backupCount=3,
                encoding="utf-8"
            )
            error_handler.setLevel(logging.ERROR)
            error_handler.setFormatter(file_formatter)
            self.logger.addHandler(error_handler)
            
            # Performance log file (JSON format)
            self.performance_log_file = self.logs_dir / "performance.json"
    
    def debug(self, message: str, **kwargs):
        """Log debug message"""
        self.logger.debug(message, extra=kwargs)
    
    def info(self, message: str, **kwargs):
        """Log info message"""
        self.logger.info(message, extra=kwargs)
    
    def warning(self, message: str, **kwargs):
        """Log warning message"""
        self.logger.warning(message, extra=kwargs)
    
    def error(self, message: str, exception: Optional[Exception] = None, **kwargs):
        """Log error message with optional exception details"""
        if exception:
            message = f"{message} | Exception: {str(exception)}"
            # Log full traceback to file
            self.logger.error(f"{message}\n{traceback.format_exc()}", extra=kwargs)
        else:
            self.logger.error(message, extra=kwargs)
    
    def critical(self, message: str, exception: Optional[Exception] = None, **kwargs):
        """Log critical message"""
        if exception:
            message = f"{message} | Exception: {str(exception)}"
            self.logger.critical(f"{message}\n{traceback.format_exc()}", extra=kwargs)
        else:
            self.logger.critical(message, extra=kwargs)
    
    def log_processing_step(self, step_name: str, details: Dict[str, Any] = None):
        """Log a processing step with details"""
        details = details or {}
        self.info(f"πŸ”„ Processing: {step_name}", **details)
    
    def log_performance_metric(self, metric_name: str, value: float, unit: str = "", details: Dict = None):
        """Log performance metric"""
        details = details or {}
        message = f"πŸ“Š {metric_name}: {value:.3f}{unit}"
        self.info(message, **details)
        
        # Store for performance analysis
        timestamp = datetime.now().isoformat()
        self.performance_data[timestamp] = {
            'metric': metric_name,
            'value': value,
            'unit': unit,
            'details': details
        }
        
        # Save to performance log
        self._save_performance_data()
    
    def log_model_status(self, model_name: str, status: str, details: Dict = None):
        """Log model initialization/status"""
        details = details or {}
        if status == "initialized":
            self.info(f"βœ… {model_name} initialized successfully", **details)
        elif status == "failed":
            self.error(f"❌ {model_name} initialization failed", **details)
        elif status == "loading":
            self.info(f"πŸ”„ Loading {model_name}...", **details)
        else:
            self.info(f"πŸ”§ {model_name}: {status}", **details)
    
    def log_quality_metrics(self, frame_id: int, metrics: Dict[str, float]):
        """Log quality assessment metrics"""
        metric_str = " | ".join([f"{k}: {v:.3f}" for k, v in metrics.items()])
        self.info(f"πŸ“Š Frame {frame_id} Quality: {metric_str}")
        
        # Store detailed metrics
        timestamp = datetime.now().isoformat()
        self.performance_data[f"{timestamp}_quality_{frame_id}"] = {
            'type': 'quality_metrics',
            'frame_id': frame_id,
            'metrics': metrics
        }
    
    def log_video_processing(self, input_path: str, output_path: str, 
                           frame_count: int, processing_time: float):
        """Log video processing completion"""
        fps = frame_count / max(processing_time, 0.001)
        self.info(
            f"🎬 Video processed: {frame_count} frames in {processing_time:.1f}s ({fps:.1f} FPS)",
            input_path=input_path,
            output_path=output_path,
            frame_count=frame_count,
            processing_time=processing_time,
            fps=fps
        )
    
    def start_timer(self, operation_name: str):
        """Start timing an operation"""
        self.start_times[operation_name] = time.time()
        self.debug(f"⏱️ Started timing: {operation_name}")
    
    def end_timer(self, operation_name: str, log_result: bool = True) -> float:
        """End timing an operation and optionally log result"""
        if operation_name not in self.start_times:
            self.warning(f"Timer '{operation_name}' was not started")
            return 0.0
        
        elapsed = time.time() - self.start_times[operation_name]
        del self.start_times[operation_name]
        
        if log_result:
            self.log_performance_metric(f"{operation_name}_time", elapsed, "s")
        
        return elapsed
    
    def _save_performance_data(self):
        """Save performance data to JSON file"""
        try:
            # Load existing data
            existing_data = {}
            if self.performance_log_file.exists():
                with open(self.performance_log_file, 'r', encoding="utf-8") as f:
                    try:
                        existing_data = json.load(f)
                    except json.JSONDecodeError:
                        existing_data = {}
            
            # Merge with new data
            existing_data.update(self.performance_data)
            
            # Keep only last 1000 entries to prevent file from growing too large
            if len(existing_data) > 1000:
                sorted_keys = sorted(existing_data.keys())
                keep_keys = sorted_keys[-1000:]
                existing_data = {k: existing_data[k] for k in keep_keys}
            
            # Save updated data
            with open(self.performance_log_file, 'w', encoding="utf-8") as f:
                json.dump(existing_data, f, indent=2)
                
        except Exception as e:
            self.warning(f"Failed to save performance data: {e}")
    
    def get_log_files(self) -> Dict[str, str]:
        """Get paths to all log files"""
        return {
            'main_log': str(self.logs_dir / "backgroundfx.log"),
            'error_log': str(self.logs_dir / "errors.log"), 
            'performance_log': str(self.performance_log_file),
            'logs_directory': str(self.logs_dir)
        }
    
    def get_recent_logs(self, lines: int = 50) -> Dict[str, List[str]]:
        """Get recent log entries"""
        logs = {}
        
        try:
            # Main log
            main_log_file = self.logs_dir / "backgroundfx.log"
            if main_log_file.exists():
                with open(main_log_file, 'r', encoding="utf-8") as f:
                    logs['main'] = f.readlines()[-lines:]
            
            # Error log
            error_log_file = self.logs_dir / "errors.log"
            if error_log_file.exists():
                with open(error_log_file, 'r', encoding="utf-8") as f:
                    logs['errors'] = f.readlines()[-lines:]
            
        except Exception as e:
            self.warning(f"Failed to read recent logs: {e}")
        
        return logs


# Global logger instance
_global_logger: Optional[BackgroundFXLogger] = None


def setup_logging(logs_dir: str = "LOGS", 
                 level: int = logging.INFO,
                 console_output: bool = True,
                 file_output: bool = True) -> BackgroundFXLogger:
    """Setup global logging configuration"""
    global _global_logger
    
    if _global_logger is None:
        _global_logger = BackgroundFXLogger(
            name="BackgroundFX",
            logs_dir=logs_dir,
            level=level,
            console_output=console_output,
            file_output=file_output
        )
    
    return _global_logger


# --- Backward-compat alias for legacy imports ---
def setup_logger(*args, **kwargs):
    """
    Alias for old code: `from utils.logger import setup_logger`.
    Behaves the same as setup_logging and returns a BackgroundFXLogger.
    """
    return setup_logging(*args, **kwargs)


def get_logger(name: str = None) -> BackgroundFXLogger:
    """Get logger instance"""
    if _global_logger is None:
        setup_logging()
    
    if name and name != "BackgroundFX":
        # Create module-specific logger that inherits from main logger
        module_logger = BackgroundFXLogger(
            name=name,
            logs_dir=_global_logger.logs_dir,
            level=_global_logger.level,
            console_output=False,  # Use main logger for console
            file_output=True
        )
        return module_logger
    
    return _global_logger


def log_function_call(logger: BackgroundFXLogger = None):
    """Decorator to log function calls with timing"""
    if logger is None:
        logger = get_logger()
    
    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            func_name = f"{func.__module__}.{func.__name__}"
            logger.debug(f"πŸ”§ Calling: {func_name}")
            logger.start_timer(func_name)
            
            try:
                result = func(*args, **kwargs)
                elapsed = logger.end_timer(func_name, log_result=False)
                logger.debug(f"βœ… Completed: {func_name} ({elapsed:.3f}s)")
                return result
                
            except Exception as e:
                elapsed = logger.end_timer(func_name, log_result=False)
                logger.error(f"❌ Failed: {func_name} ({elapsed:.3f}s)", exception=e)
                raise
        
        return wrapper
    return decorator


def log_processing_pipeline():
    """Decorator for logging processing pipeline steps"""
    logger = get_logger()
    
    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            step_name = func.__name__.replace('_', ' ').title()
            logger.log_processing_step(step_name)
            
            try:
                result = func(*args, **kwargs)
                logger.info(f"βœ… {step_name} completed successfully")
                return result
                
            except Exception as e:
                logger.error(f"❌ {step_name} failed", exception=e)
                raise
        
        return wrapper
    return decorator


# Convenience functions
def log_info(message: str, **kwargs):
    """Quick info logging"""
    get_logger().info(message, **kwargs)

def log_error(message: str, exception: Exception = None, **kwargs):
    """Quick error logging"""
    get_logger().error(message, exception=exception, **kwargs)

def log_warning(message: str, **kwargs):
    """Quick warning logging"""
    get_logger().warning(message, **kwargs)

def log_debug(message: str, **kwargs):
    """Quick debug logging"""
    get_logger().debug(message, **kwargs)


# Initialize logging on module import
if _global_logger is None:
    try:
        setup_logging()
        log_info("βœ… Logging system initialized")
    except Exception as e:
        print(f"⚠️ Failed to initialize logging: {e}")


__all__ = [
    "BackgroundFXLogger",
    "setup_logging",
    "setup_logger",   # alias for legacy code
    "get_logger",
    "log_function_call",
    "log_processing_pipeline",
    "log_info",
    "log_error",
    "log_warning",
    "log_debug",
]