mirror of
https://github.com/unshackle-dl/unshackle.git
synced 2025-10-23 15:11:08 +00:00
feat: Add comprehensive JSON debug logging system
Implements a complete structured logging system for troubleshooting and service development. Features: - Binary toggle via --debug flag or debug: true in config - JSON Lines (.jsonl) format for easy parsing and analysis - Comprehensive logging of all operations: * Session info (version, platform, Python version) * CLI parameters and service configuration * CDM details (Widevine/PlayReady, security levels) * Authentication status * Title and track metadata * DRM operations (PSSH, KIDs, license requests) * Vault queries with key retrieval * Full error traces with context - Configurable key logging via debug_keys option - Smart redaction (passwords, tokens, cookies always redacted) - Error logging for all critical operations: * Authentication failures * Title fetching errors * Track retrieval errors * License request failures (Widevine & PlayReady) * Vault operation errors - Removed old text logging system
This commit is contained in:
@@ -1,6 +1,5 @@
|
||||
import atexit
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
import urllib3
|
||||
@@ -16,23 +15,16 @@ from unshackle.core.config import config
|
||||
from unshackle.core.console import ComfyRichHandler, console
|
||||
from unshackle.core.constants import context_settings
|
||||
from unshackle.core.update_checker import UpdateChecker
|
||||
from unshackle.core.utilities import rotate_log_file
|
||||
|
||||
LOGGING_PATH = None
|
||||
from unshackle.core.utilities import close_debug_logger, init_debug_logger
|
||||
|
||||
|
||||
@click.command(cls=Commands, invoke_without_command=True, context_settings=context_settings)
|
||||
@click.option("-v", "--version", is_flag=True, default=False, help="Print version information.")
|
||||
@click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs.")
|
||||
@click.option(
|
||||
"--log",
|
||||
"log_path",
|
||||
type=Path,
|
||||
default=config.directories.logs / config.filenames.log,
|
||||
help="Log path (or filename). Path can contain the following f-string args: {name} {time}.",
|
||||
)
|
||||
def main(version: bool, debug: bool, log_path: Path) -> None:
|
||||
@click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs and JSON debug logging.")
|
||||
def main(version: bool, debug: bool) -> None:
|
||||
"""unshackle—Modular Movie, TV, and Music Archival Software."""
|
||||
debug_logging_enabled = debug or config.debug
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG if debug else logging.INFO,
|
||||
format="%(message)s",
|
||||
@@ -48,11 +40,8 @@ def main(version: bool, debug: bool, log_path: Path) -> None:
|
||||
],
|
||||
)
|
||||
|
||||
if log_path:
|
||||
global LOGGING_PATH
|
||||
console.record = True
|
||||
new_log_path = rotate_log_file(log_path)
|
||||
LOGGING_PATH = new_log_path
|
||||
if debug_logging_enabled:
|
||||
init_debug_logger(enabled=True)
|
||||
|
||||
urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
@@ -98,10 +87,9 @@ def main(version: bool, debug: bool, log_path: Path) -> None:
|
||||
|
||||
|
||||
@atexit.register
|
||||
def save_log():
|
||||
if console.record and LOGGING_PATH:
|
||||
# TODO: Currently semi-bust. Everything that refreshes gets duplicated.
|
||||
console.save_text(LOGGING_PATH)
|
||||
def cleanup():
|
||||
"""Clean up resources on exit."""
|
||||
close_debug_logger()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -31,6 +31,7 @@ class Config:
|
||||
class _Filenames:
|
||||
# default filenames, do not modify here, set via config
|
||||
log = "unshackle_{name}_{time}.log" # Directories.logs
|
||||
debug_log = "unshackle_debug_{service}_{time}.jsonl" # Directories.logs
|
||||
config = "config.yaml" # Directories.services / tag
|
||||
root_config = "unshackle.yaml" # Directories.user_configs
|
||||
chapters = "Chapters_{title}_{random}.txt" # Directories.temp
|
||||
@@ -98,6 +99,9 @@ class Config:
|
||||
self.title_cache_max_retention: int = kwargs.get("title_cache_max_retention", 86400) # 24 hours default
|
||||
self.title_cache_enabled: bool = kwargs.get("title_cache_enabled", True)
|
||||
|
||||
self.debug: bool = kwargs.get("debug", False)
|
||||
self.debug_keys: bool = kwargs.get("debug_keys", False)
|
||||
|
||||
@classmethod
|
||||
def from_yaml(cls, path: Path) -> Config:
|
||||
if not path.exists():
|
||||
|
||||
@@ -1,19 +1,22 @@
|
||||
import ast
|
||||
import contextlib
|
||||
import importlib.util
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import unicodedata
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import Optional, Sequence, Union
|
||||
from typing import Any, Optional, Sequence, Union
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
import chardet
|
||||
import requests
|
||||
@@ -122,7 +125,7 @@ def is_exact_match(language: Union[str, Language], languages: Sequence[Union[str
|
||||
return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_EXACT_DISTANCE
|
||||
|
||||
|
||||
def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box:
|
||||
def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box: # type: ignore
|
||||
"""
|
||||
Scan a byte array for a wanted MP4/ISOBMFF box, then parse and yield each find.
|
||||
|
||||
@@ -457,3 +460,334 @@ class FPS(ast.NodeVisitor):
|
||||
@classmethod
|
||||
def parse(cls, expr: str) -> float:
|
||||
return cls().visit(ast.parse(expr).body[0])
|
||||
|
||||
|
||||
"""
|
||||
Structured JSON debug logging for unshackle.
|
||||
|
||||
Provides comprehensive debugging information for service developers and troubleshooting.
|
||||
When enabled, logs all operations, requests, responses, DRM operations, and errors in JSON format.
|
||||
"""
|
||||
|
||||
|
||||
class DebugLogger:
|
||||
"""
|
||||
Structured JSON debug logger for unshackle.
|
||||
|
||||
Outputs JSON Lines format where each line is a complete JSON object.
|
||||
This makes it easy to parse, filter, and analyze logs programmatically.
|
||||
"""
|
||||
|
||||
def __init__(self, log_path: Optional[Path] = None, enabled: bool = False, log_keys: bool = False):
|
||||
"""
|
||||
Initialize the debug logger.
|
||||
|
||||
Args:
|
||||
log_path: Path to the log file. If None, logging is disabled.
|
||||
enabled: Whether debug logging is enabled.
|
||||
log_keys: Whether to log decryption keys (for debugging key issues).
|
||||
"""
|
||||
self.enabled = enabled and log_path is not None
|
||||
self.log_path = log_path
|
||||
self.session_id = str(uuid4())[:8]
|
||||
self.file_handle = None
|
||||
self.log_keys = log_keys
|
||||
|
||||
if self.enabled:
|
||||
self.log_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.file_handle = open(self.log_path, "a", encoding="utf-8")
|
||||
self._log_session_start()
|
||||
|
||||
def _log_session_start(self):
|
||||
"""Log the start of a new session with environment information."""
|
||||
import platform
|
||||
|
||||
from unshackle.core import __version__
|
||||
|
||||
self.log(
|
||||
level="INFO",
|
||||
operation="session_start",
|
||||
message="Debug logging session started",
|
||||
context={
|
||||
"unshackle_version": __version__,
|
||||
"python_version": sys.version,
|
||||
"platform": platform.platform(),
|
||||
"platform_system": platform.system(),
|
||||
"platform_release": platform.release(),
|
||||
},
|
||||
)
|
||||
|
||||
def log(
|
||||
self,
|
||||
level: str = "DEBUG",
|
||||
operation: str = "",
|
||||
message: str = "",
|
||||
context: Optional[dict[str, Any]] = None,
|
||||
service: Optional[str] = None,
|
||||
error: Optional[Exception] = None,
|
||||
request: Optional[dict[str, Any]] = None,
|
||||
response: Optional[dict[str, Any]] = None,
|
||||
duration_ms: Optional[float] = None,
|
||||
success: Optional[bool] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Log a structured JSON entry.
|
||||
|
||||
Args:
|
||||
level: Log level (DEBUG, INFO, WARNING, ERROR)
|
||||
operation: Name of the operation being performed
|
||||
message: Human-readable message
|
||||
context: Additional context information
|
||||
service: Service name (e.g., DSNP, NF)
|
||||
error: Exception object if an error occurred
|
||||
request: Request details (URL, method, headers, body)
|
||||
response: Response details (status, headers, body)
|
||||
duration_ms: Operation duration in milliseconds
|
||||
success: Whether the operation succeeded
|
||||
**kwargs: Additional fields to include in the log entry
|
||||
"""
|
||||
if not self.enabled or not self.file_handle:
|
||||
return
|
||||
|
||||
entry = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"session_id": self.session_id,
|
||||
"level": level,
|
||||
}
|
||||
|
||||
if operation:
|
||||
entry["operation"] = operation
|
||||
if message:
|
||||
entry["message"] = message
|
||||
if service:
|
||||
entry["service"] = service
|
||||
if context:
|
||||
entry["context"] = self._sanitize_data(context)
|
||||
if request:
|
||||
entry["request"] = self._sanitize_data(request)
|
||||
if response:
|
||||
entry["response"] = self._sanitize_data(response)
|
||||
if duration_ms is not None:
|
||||
entry["duration_ms"] = duration_ms
|
||||
if success is not None:
|
||||
entry["success"] = success
|
||||
|
||||
if error:
|
||||
entry["error"] = {
|
||||
"type": type(error).__name__,
|
||||
"message": str(error),
|
||||
"traceback": traceback.format_exception(type(error), error, error.__traceback__),
|
||||
}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
if key not in entry:
|
||||
entry[key] = self._sanitize_data(value)
|
||||
|
||||
try:
|
||||
self.file_handle.write(json.dumps(entry, default=str) + "\n")
|
||||
self.file_handle.flush()
|
||||
except Exception as e:
|
||||
print(f"Failed to write debug log: {e}", file=sys.stderr)
|
||||
|
||||
def _sanitize_data(self, data: Any) -> Any:
|
||||
"""
|
||||
Sanitize data for JSON serialization.
|
||||
Handles complex objects and removes sensitive information.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if isinstance(data, (str, int, float, bool)):
|
||||
return data
|
||||
|
||||
if isinstance(data, (list, tuple)):
|
||||
return [self._sanitize_data(item) for item in data]
|
||||
|
||||
if isinstance(data, dict):
|
||||
sanitized = {}
|
||||
for key, value in data.items():
|
||||
key_lower = str(key).lower()
|
||||
has_prefix = key_lower.startswith("has_")
|
||||
|
||||
is_always_sensitive = not has_prefix and any(
|
||||
sensitive in key_lower for sensitive in ["password", "token", "secret", "auth", "cookie"]
|
||||
)
|
||||
|
||||
is_key_field = (
|
||||
"key" in key_lower
|
||||
and not has_prefix
|
||||
and not any(safe in key_lower for safe in ["_count", "_id", "_type", "kid", "keys_", "key_found"])
|
||||
)
|
||||
|
||||
should_redact = is_always_sensitive or (is_key_field and not self.log_keys)
|
||||
|
||||
if should_redact:
|
||||
sanitized[key] = "[REDACTED]"
|
||||
else:
|
||||
sanitized[key] = self._sanitize_data(value)
|
||||
return sanitized
|
||||
|
||||
if isinstance(data, bytes):
|
||||
try:
|
||||
return data.hex()
|
||||
except Exception:
|
||||
return "[BINARY_DATA]"
|
||||
|
||||
if isinstance(data, Path):
|
||||
return str(data)
|
||||
|
||||
try:
|
||||
return str(data)
|
||||
except Exception:
|
||||
return f"[{type(data).__name__}]"
|
||||
|
||||
def log_operation_start(self, operation: str, **kwargs) -> str:
|
||||
"""
|
||||
Log the start of an operation and return an operation ID.
|
||||
|
||||
Args:
|
||||
operation: Name of the operation
|
||||
**kwargs: Additional context
|
||||
|
||||
Returns:
|
||||
Operation ID that can be used to log the end of the operation
|
||||
"""
|
||||
op_id = str(uuid4())[:8]
|
||||
self.log(
|
||||
level="DEBUG",
|
||||
operation=f"{operation}_start",
|
||||
message=f"Starting operation: {operation}",
|
||||
operation_id=op_id,
|
||||
**kwargs,
|
||||
)
|
||||
return op_id
|
||||
|
||||
def log_operation_end(
|
||||
self, operation: str, operation_id: str, success: bool = True, duration_ms: Optional[float] = None, **kwargs
|
||||
):
|
||||
"""
|
||||
Log the end of an operation.
|
||||
|
||||
Args:
|
||||
operation: Name of the operation
|
||||
operation_id: Operation ID from log_operation_start
|
||||
success: Whether the operation succeeded
|
||||
duration_ms: Operation duration in milliseconds
|
||||
**kwargs: Additional context
|
||||
"""
|
||||
self.log(
|
||||
level="INFO" if success else "ERROR",
|
||||
operation=f"{operation}_end",
|
||||
message=f"Finished operation: {operation}",
|
||||
operation_id=operation_id,
|
||||
success=success,
|
||||
duration_ms=duration_ms,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def log_service_call(self, method: str, url: str, **kwargs):
|
||||
"""
|
||||
Log a service API call.
|
||||
|
||||
Args:
|
||||
method: HTTP method (GET, POST, etc.)
|
||||
url: Request URL
|
||||
**kwargs: Additional request details (headers, body, etc.)
|
||||
"""
|
||||
self.log(level="DEBUG", operation="service_call", request={"method": method, "url": url, **kwargs})
|
||||
|
||||
def log_drm_operation(self, drm_type: str, operation: str, **kwargs):
|
||||
"""
|
||||
Log a DRM operation (PSSH extraction, license request, key retrieval).
|
||||
|
||||
Args:
|
||||
drm_type: DRM type (Widevine, PlayReady, etc.)
|
||||
operation: DRM operation name
|
||||
**kwargs: Additional context (PSSH, KIDs, keys, etc.)
|
||||
"""
|
||||
self.log(
|
||||
level="DEBUG", operation=f"drm_{operation}", message=f"{drm_type} {operation}", drm_type=drm_type, **kwargs
|
||||
)
|
||||
|
||||
def log_vault_query(self, vault_name: str, operation: str, **kwargs):
|
||||
"""
|
||||
Log a vault query operation.
|
||||
|
||||
Args:
|
||||
vault_name: Name of the vault
|
||||
operation: Vault operation (get_key, add_key, etc.)
|
||||
**kwargs: Additional context (KID, key, success, etc.)
|
||||
"""
|
||||
self.log(
|
||||
level="DEBUG",
|
||||
operation=f"vault_{operation}",
|
||||
message=f"Vault {vault_name}: {operation}",
|
||||
vault=vault_name,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def log_error(self, operation: str, error: Exception, **kwargs):
|
||||
"""
|
||||
Log an error with full context.
|
||||
|
||||
Args:
|
||||
operation: Operation that failed
|
||||
error: Exception that occurred
|
||||
**kwargs: Additional context
|
||||
"""
|
||||
self.log(
|
||||
level="ERROR",
|
||||
operation=operation,
|
||||
message=f"Error in {operation}: {str(error)}",
|
||||
error=error,
|
||||
success=False,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def close(self):
|
||||
"""Close the log file and clean up resources."""
|
||||
if self.file_handle:
|
||||
self.log(level="INFO", operation="session_end", message="Debug logging session ended")
|
||||
self.file_handle.close()
|
||||
self.file_handle = None
|
||||
|
||||
|
||||
# Global debug logger instance
|
||||
_debug_logger: Optional[DebugLogger] = None
|
||||
|
||||
|
||||
def get_debug_logger() -> Optional[DebugLogger]:
|
||||
"""Get the global debug logger instance."""
|
||||
return _debug_logger
|
||||
|
||||
|
||||
def init_debug_logger(log_path: Optional[Path] = None, enabled: bool = False, log_keys: bool = False):
|
||||
"""
|
||||
Initialize the global debug logger.
|
||||
|
||||
Args:
|
||||
log_path: Path to the log file
|
||||
enabled: Whether debug logging is enabled
|
||||
log_keys: Whether to log decryption keys (for debugging key issues)
|
||||
"""
|
||||
global _debug_logger
|
||||
if _debug_logger:
|
||||
_debug_logger.close()
|
||||
_debug_logger = DebugLogger(log_path=log_path, enabled=enabled, log_keys=log_keys)
|
||||
|
||||
|
||||
def close_debug_logger():
|
||||
"""Close the global debug logger."""
|
||||
global _debug_logger
|
||||
if _debug_logger:
|
||||
_debug_logger.close()
|
||||
_debug_logger = None
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DebugLogger",
|
||||
"get_debug_logger",
|
||||
"init_debug_logger",
|
||||
"close_debug_logger",
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user