mirror of
https://github.com/unshackle-dl/unshackle.git
synced 2025-10-23 15:11:08 +00:00
Compare commits
39 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
07574d8d02 | ||
|
|
9b5d233c69 | ||
|
|
98d4bb4333 | ||
|
|
1c48b282de | ||
|
|
93debf149a | ||
|
|
57fc07ea41 | ||
|
|
df09998a47 | ||
|
|
e04399fbce | ||
|
|
087df59fb6 | ||
|
|
5384b775a4 | ||
|
|
2a90e60a49 | ||
|
|
1409f93de5 | ||
|
|
d3ca8e7039 | ||
|
|
8f2ead2107 | ||
|
|
9921690339 | ||
|
|
3dd12b0cbe | ||
|
|
ed1314572b | ||
|
|
7a49a6a4f9 | ||
|
|
888647ad64 | ||
|
|
133f91a2e8 | ||
|
|
6c1cb21630 | ||
|
|
8437ba24d5 | ||
|
|
062e060fca | ||
|
|
b48eecacb5 | ||
|
|
e4bb7e9135 | ||
|
|
0f4a68ca62 | ||
|
|
3a8dfb26fe | ||
|
|
c9bb0e4224 | ||
|
|
e1e2e35ff4 | ||
|
|
97f7eb0674 | ||
|
|
03f08159b4 | ||
|
|
724703d14b | ||
|
|
da00258ae0 | ||
|
|
4f3d0f1f7a | ||
|
|
bade3f8c09 | ||
|
|
55f116f1e8 | ||
|
|
2e2f8f5099 | ||
|
|
4459ec4db6 | ||
|
|
2afc59624d |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -25,6 +25,7 @@ unshackle/certs/
|
|||||||
unshackle/WVDs/
|
unshackle/WVDs/
|
||||||
unshackle/PRDs/
|
unshackle/PRDs/
|
||||||
temp/
|
temp/
|
||||||
|
logs/
|
||||||
services/
|
services/
|
||||||
|
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
|
|||||||
@@ -547,9 +547,12 @@ Configuration data for pywidevine's serve functionality run through unshackle.
|
|||||||
This effectively allows you to run `unshackle serve` to start serving pywidevine Serve-compliant CDMs right from your
|
This effectively allows you to run `unshackle serve` to start serving pywidevine Serve-compliant CDMs right from your
|
||||||
local widevine device files.
|
local widevine device files.
|
||||||
|
|
||||||
|
- `api_secret` - Secret key for REST API authentication. When set, enables the REST API server alongside the CDM serve functionality. This key is required for authenticating API requests.
|
||||||
|
|
||||||
For example,
|
For example,
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
api_secret: "your-secret-key-here"
|
||||||
users:
|
users:
|
||||||
secret_key_for_jane: # 32bit hex recommended, case-sensitive
|
secret_key_for_jane: # 32bit hex recommended, case-sensitive
|
||||||
devices: # list of allowed devices for this user
|
devices: # list of allowed devices for this user
|
||||||
|
|||||||
@@ -8,6 +8,9 @@
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> **Development Branch**: This is the `dev` branch containing bleeding-edge features and experimental changes. Use for testing only. For stable releases, use the [`main`](https://github.com/unshackle-dl/unshackle/tree/main) branch.
|
||||||
|
|
||||||
## What is unshackle?
|
## What is unshackle?
|
||||||
|
|
||||||
unshackle is a fork of [Devine](https://github.com/devine-dl/devine/), a powerful archival tool for downloading movies, TV shows, and music from streaming services. Built with a focus on modularity and extensibility, it provides a robust framework for content acquisition with support for DRM-protected content.
|
unshackle is a fork of [Devine](https://github.com/devine-dl/devine/), a powerful archival tool for downloading movies, TV shows, and music from streaming services. Built with a focus on modularity and extensibility, it provides a robust framework for content acquisition with support for DRM-protected content.
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "unshackle"
|
name = "unshackle"
|
||||||
version = "1.4.8"
|
version = "2.0.0"
|
||||||
description = "Modular Movie, TV, and Music Archival Software."
|
description = "Modular Movie, TV, and Music Archival Software."
|
||||||
authors = [{ name = "unshackle team" }]
|
authors = [{ name = "unshackle team" }]
|
||||||
requires-python = ">=3.10,<3.13"
|
requires-python = ">=3.10,<3.13"
|
||||||
@@ -58,6 +58,7 @@ dependencies = [
|
|||||||
"httpx>=0.28.1,<0.29",
|
"httpx>=0.28.1,<0.29",
|
||||||
"cryptography>=45.0.0",
|
"cryptography>=45.0.0",
|
||||||
"subby",
|
"subby",
|
||||||
|
"aiohttp-swagger3>=0.9.0,<1",
|
||||||
"pysubs2>=1.7.0,<2",
|
"pysubs2>=1.7.0,<2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -41,14 +41,14 @@ from rich.text import Text
|
|||||||
from rich.tree import Tree
|
from rich.tree import Tree
|
||||||
|
|
||||||
from unshackle.core import binaries
|
from unshackle.core import binaries
|
||||||
from unshackle.core.cdm import DecryptLabsRemoteCDM
|
from unshackle.core.cdm import CustomRemoteCDM, DecryptLabsRemoteCDM
|
||||||
from unshackle.core.config import config
|
from unshackle.core.config import config
|
||||||
from unshackle.core.console import console
|
from unshackle.core.console import console
|
||||||
from unshackle.core.constants import DOWNLOAD_LICENCE_ONLY, AnyTrack, context_settings
|
from unshackle.core.constants import DOWNLOAD_LICENCE_ONLY, AnyTrack, context_settings
|
||||||
from unshackle.core.credential import Credential
|
from unshackle.core.credential import Credential
|
||||||
from unshackle.core.drm import DRM_T, PlayReady, Widevine
|
from unshackle.core.drm import DRM_T, PlayReady, Widevine
|
||||||
from unshackle.core.events import events
|
from unshackle.core.events import events
|
||||||
from unshackle.core.proxies import Basic, Hola, NordVPN, SurfsharkVPN
|
from unshackle.core.proxies import Basic, Hola, NordVPN, SurfsharkVPN, WindscribeVPN
|
||||||
from unshackle.core.service import Service
|
from unshackle.core.service import Service
|
||||||
from unshackle.core.services import Services
|
from unshackle.core.services import Services
|
||||||
from unshackle.core.titles import Movie, Movies, Series, Song, Title_T
|
from unshackle.core.titles import Movie, Movies, Series, Song, Title_T
|
||||||
@@ -56,7 +56,8 @@ from unshackle.core.titles.episode import Episode
|
|||||||
from unshackle.core.tracks import Audio, Subtitle, Tracks, Video
|
from unshackle.core.tracks import Audio, Subtitle, Tracks, Video
|
||||||
from unshackle.core.tracks.attachment import Attachment
|
from unshackle.core.tracks.attachment import Attachment
|
||||||
from unshackle.core.tracks.hybrid import Hybrid
|
from unshackle.core.tracks.hybrid import Hybrid
|
||||||
from unshackle.core.utilities import get_system_fonts, is_close_match, time_elapsed_since
|
from unshackle.core.utilities import (get_debug_logger, get_system_fonts, init_debug_logger, is_close_match,
|
||||||
|
time_elapsed_since)
|
||||||
from unshackle.core.utils import tags
|
from unshackle.core.utils import tags
|
||||||
from unshackle.core.utils.click_types import (LANGUAGE_RANGE, QUALITY_LIST, SEASON_RANGE, ContextData, MultipleChoice,
|
from unshackle.core.utils.click_types import (LANGUAGE_RANGE, QUALITY_LIST, SEASON_RANGE, ContextData, MultipleChoice,
|
||||||
SubtitleCodecChoice, VideoCodecChoice)
|
SubtitleCodecChoice, VideoCodecChoice)
|
||||||
@@ -151,6 +152,13 @@ class dl:
|
|||||||
default=None,
|
default=None,
|
||||||
help="Wanted episodes, e.g. `S01-S05,S07`, `S01E01-S02E03`, `S02-S02E03`, e.t.c, defaults to all.",
|
help="Wanted episodes, e.g. `S01-S05,S07`, `S01E01-S02E03`, `S02-S02E03`, e.t.c, defaults to all.",
|
||||||
)
|
)
|
||||||
|
@click.option(
|
||||||
|
"-le",
|
||||||
|
"--latest-episode",
|
||||||
|
is_flag=True,
|
||||||
|
default=False,
|
||||||
|
help="Download only the single most recent episode available.",
|
||||||
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"-l",
|
"-l",
|
||||||
"--lang",
|
"--lang",
|
||||||
@@ -313,6 +321,40 @@ class dl:
|
|||||||
self.tmdb_name = tmdb_name
|
self.tmdb_name = tmdb_name
|
||||||
self.tmdb_year = tmdb_year
|
self.tmdb_year = tmdb_year
|
||||||
|
|
||||||
|
# Initialize debug logger with service name if debug logging is enabled
|
||||||
|
if config.debug or logging.root.level == logging.DEBUG:
|
||||||
|
from collections import defaultdict
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
debug_log_path = config.directories.logs / config.filenames.debug_log.format_map(
|
||||||
|
defaultdict(str, service=self.service, time=datetime.now().strftime("%Y%m%d-%H%M%S"))
|
||||||
|
)
|
||||||
|
init_debug_logger(log_path=debug_log_path, enabled=True, log_keys=config.debug_keys)
|
||||||
|
self.debug_logger = get_debug_logger()
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="INFO",
|
||||||
|
operation="download_init",
|
||||||
|
message=f"Download command initialized for service {self.service}",
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"profile": profile,
|
||||||
|
"proxy": proxy,
|
||||||
|
"tag": tag,
|
||||||
|
"tmdb_id": tmdb_id,
|
||||||
|
"tmdb_name": tmdb_name,
|
||||||
|
"tmdb_year": tmdb_year,
|
||||||
|
"cli_params": {
|
||||||
|
k: v
|
||||||
|
for k, v in ctx.params.items()
|
||||||
|
if k not in ["profile", "proxy", "tag", "tmdb_id", "tmdb_name", "tmdb_year"]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.debug_logger = None
|
||||||
|
|
||||||
if self.profile:
|
if self.profile:
|
||||||
self.log.info(f"Using profile: '{self.profile}'")
|
self.log.info(f"Using profile: '{self.profile}'")
|
||||||
|
|
||||||
@@ -321,6 +363,13 @@ class dl:
|
|||||||
if service_config_path.exists():
|
if service_config_path.exists():
|
||||||
self.service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
self.service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||||
self.log.info("Service Config loaded")
|
self.log.info("Service Config loaded")
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="DEBUG",
|
||||||
|
operation="load_service_config",
|
||||||
|
service=self.service,
|
||||||
|
context={"config_path": str(service_config_path), "config": self.service_config},
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.service_config = {}
|
self.service_config = {}
|
||||||
merge_dict(config.services.get(self.service), self.service_config)
|
merge_dict(config.services.get(self.service), self.service_config)
|
||||||
@@ -331,6 +380,33 @@ class dl:
|
|||||||
if getattr(config, "decryption_map", None):
|
if getattr(config, "decryption_map", None):
|
||||||
config.decryption = config.decryption_map.get(self.service, config.decryption)
|
config.decryption = config.decryption_map.get(self.service, config.decryption)
|
||||||
|
|
||||||
|
service_config = config.services.get(self.service, {})
|
||||||
|
|
||||||
|
reserved_keys = {
|
||||||
|
"profiles",
|
||||||
|
"api_key",
|
||||||
|
"certificate",
|
||||||
|
"api_endpoint",
|
||||||
|
"region",
|
||||||
|
"device",
|
||||||
|
"endpoints",
|
||||||
|
"client",
|
||||||
|
}
|
||||||
|
|
||||||
|
for config_key, override_value in service_config.items():
|
||||||
|
if config_key in reserved_keys:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(override_value, dict) and hasattr(config, config_key):
|
||||||
|
current_config = getattr(config, config_key, {})
|
||||||
|
if isinstance(current_config, dict):
|
||||||
|
merged_config = {**current_config, **override_value}
|
||||||
|
setattr(config, config_key, merged_config)
|
||||||
|
|
||||||
|
self.log.debug(
|
||||||
|
f"Applied service-specific '{config_key}' overrides for {self.service}: {override_value}"
|
||||||
|
)
|
||||||
|
|
||||||
with console.status("Loading Key Vaults...", spinner="dots"):
|
with console.status("Loading Key Vaults...", spinner="dots"):
|
||||||
self.vaults = Vaults(self.service)
|
self.vaults = Vaults(self.service)
|
||||||
total_vaults = len(config.key_vaults)
|
total_vaults = len(config.key_vaults)
|
||||||
@@ -384,18 +460,38 @@ class dl:
|
|||||||
self.cdm = self.get_cdm(self.service, self.profile)
|
self.cdm = self.get_cdm(self.service, self.profile)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
self.log.error(f"Failed to load CDM, {e}")
|
self.log.error(f"Failed to load CDM, {e}")
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_error("load_cdm", e, service=self.service)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if self.cdm:
|
if self.cdm:
|
||||||
|
cdm_info = {}
|
||||||
if isinstance(self.cdm, DecryptLabsRemoteCDM):
|
if isinstance(self.cdm, DecryptLabsRemoteCDM):
|
||||||
drm_type = "PlayReady" if self.cdm.is_playready else "Widevine"
|
drm_type = "PlayReady" if self.cdm.is_playready else "Widevine"
|
||||||
self.log.info(f"Loaded {drm_type} Remote CDM: DecryptLabs (L{self.cdm.security_level})")
|
self.log.info(f"Loaded {drm_type} Remote CDM: DecryptLabs (L{self.cdm.security_level})")
|
||||||
|
cdm_info = {"type": "DecryptLabs", "drm_type": drm_type, "security_level": self.cdm.security_level}
|
||||||
elif hasattr(self.cdm, "device_type") and self.cdm.device_type.name in ["ANDROID", "CHROME"]:
|
elif hasattr(self.cdm, "device_type") and self.cdm.device_type.name in ["ANDROID", "CHROME"]:
|
||||||
self.log.info(f"Loaded Widevine CDM: {self.cdm.system_id} (L{self.cdm.security_level})")
|
self.log.info(f"Loaded Widevine CDM: {self.cdm.system_id} (L{self.cdm.security_level})")
|
||||||
|
cdm_info = {
|
||||||
|
"type": "Widevine",
|
||||||
|
"system_id": self.cdm.system_id,
|
||||||
|
"security_level": self.cdm.security_level,
|
||||||
|
"device_type": self.cdm.device_type.name,
|
||||||
|
}
|
||||||
else:
|
else:
|
||||||
self.log.info(
|
self.log.info(
|
||||||
f"Loaded PlayReady CDM: {self.cdm.certificate_chain.get_name()} (L{self.cdm.security_level})"
|
f"Loaded PlayReady CDM: {self.cdm.certificate_chain.get_name()} (L{self.cdm.security_level})"
|
||||||
)
|
)
|
||||||
|
cdm_info = {
|
||||||
|
"type": "PlayReady",
|
||||||
|
"certificate": self.cdm.certificate_chain.get_name(),
|
||||||
|
"security_level": self.cdm.security_level,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.debug_logger and cdm_info:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="INFO", operation="load_cdm", service=self.service, context={"cdm": cdm_info}
|
||||||
|
)
|
||||||
|
|
||||||
self.proxy_providers = []
|
self.proxy_providers = []
|
||||||
if no_proxy:
|
if no_proxy:
|
||||||
@@ -408,6 +504,8 @@ class dl:
|
|||||||
self.proxy_providers.append(NordVPN(**config.proxy_providers["nordvpn"]))
|
self.proxy_providers.append(NordVPN(**config.proxy_providers["nordvpn"]))
|
||||||
if config.proxy_providers.get("surfsharkvpn"):
|
if config.proxy_providers.get("surfsharkvpn"):
|
||||||
self.proxy_providers.append(SurfsharkVPN(**config.proxy_providers["surfsharkvpn"]))
|
self.proxy_providers.append(SurfsharkVPN(**config.proxy_providers["surfsharkvpn"]))
|
||||||
|
if config.proxy_providers.get("windscribevpn"):
|
||||||
|
self.proxy_providers.append(WindscribeVPN(**config.proxy_providers["windscribevpn"]))
|
||||||
if binaries.HolaProxy:
|
if binaries.HolaProxy:
|
||||||
self.proxy_providers.append(Hola())
|
self.proxy_providers.append(Hola())
|
||||||
for proxy_provider in self.proxy_providers:
|
for proxy_provider in self.proxy_providers:
|
||||||
@@ -468,6 +566,7 @@ class dl:
|
|||||||
channels: float,
|
channels: float,
|
||||||
no_atmos: bool,
|
no_atmos: bool,
|
||||||
wanted: list[str],
|
wanted: list[str],
|
||||||
|
latest_episode: bool,
|
||||||
lang: list[str],
|
lang: list[str],
|
||||||
v_lang: list[str],
|
v_lang: list[str],
|
||||||
a_lang: list[str],
|
a_lang: list[str],
|
||||||
@@ -521,18 +620,73 @@ class dl:
|
|||||||
else:
|
else:
|
||||||
vaults_only = not cdm_only
|
vaults_only = not cdm_only
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="DEBUG",
|
||||||
|
operation="drm_mode_config",
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"cdm_only": cdm_only,
|
||||||
|
"vaults_only": vaults_only,
|
||||||
|
"mode": "CDM only" if cdm_only else ("Vaults only" if vaults_only else "Both CDM and Vaults"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
with console.status("Authenticating with Service...", spinner="dots"):
|
with console.status("Authenticating with Service...", spinner="dots"):
|
||||||
cookies = self.get_cookie_jar(self.service, self.profile)
|
try:
|
||||||
credential = self.get_credentials(self.service, self.profile)
|
cookies = self.get_cookie_jar(self.service, self.profile)
|
||||||
service.authenticate(cookies, credential)
|
credential = self.get_credentials(self.service, self.profile)
|
||||||
if cookies or credential:
|
service.authenticate(cookies, credential)
|
||||||
self.log.info("Authenticated with Service")
|
if cookies or credential:
|
||||||
|
self.log.info("Authenticated with Service")
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="INFO",
|
||||||
|
operation="authenticate",
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"has_cookies": bool(cookies),
|
||||||
|
"has_credentials": bool(credential),
|
||||||
|
"profile": self.profile,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_error(
|
||||||
|
"authenticate", e, service=self.service, context={"profile": self.profile}
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
with console.status("Fetching Title Metadata...", spinner="dots"):
|
with console.status("Fetching Title Metadata...", spinner="dots"):
|
||||||
titles = service.get_titles_cached()
|
try:
|
||||||
if not titles:
|
titles = service.get_titles_cached()
|
||||||
self.log.error("No titles returned, nothing to download...")
|
if not titles:
|
||||||
sys.exit(1)
|
self.log.error("No titles returned, nothing to download...")
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="ERROR",
|
||||||
|
operation="get_titles",
|
||||||
|
service=self.service,
|
||||||
|
message="No titles returned from service",
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_error("get_titles", e, service=self.service)
|
||||||
|
raise
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
titles_info = {
|
||||||
|
"type": titles.__class__.__name__,
|
||||||
|
"count": len(titles) if hasattr(titles, "__len__") else 1,
|
||||||
|
"title": str(titles),
|
||||||
|
}
|
||||||
|
if hasattr(titles, "seasons"):
|
||||||
|
titles_info["seasons"] = len(titles.seasons) if hasattr(titles, "seasons") else 0
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="INFO", operation="get_titles", service=self.service, context={"titles": titles_info}
|
||||||
|
)
|
||||||
|
|
||||||
if self.tmdb_year and self.tmdb_id:
|
if self.tmdb_year and self.tmdb_id:
|
||||||
sample_title = titles[0] if hasattr(titles, "__getitem__") else titles
|
sample_title = titles[0] if hasattr(titles, "__getitem__") else titles
|
||||||
@@ -551,8 +705,21 @@ class dl:
|
|||||||
if list_titles:
|
if list_titles:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Determine the latest episode if --latest-episode is set
|
||||||
|
latest_episode_id = None
|
||||||
|
if latest_episode and isinstance(titles, Series) and len(titles) > 0:
|
||||||
|
# Series is already sorted by (season, number, year)
|
||||||
|
# The last episode in the sorted list is the latest
|
||||||
|
latest_ep = titles[-1]
|
||||||
|
latest_episode_id = f"{latest_ep.season}x{latest_ep.number}"
|
||||||
|
self.log.info(f"Latest episode mode: Selecting S{latest_ep.season:02}E{latest_ep.number:02}")
|
||||||
|
|
||||||
for i, title in enumerate(titles):
|
for i, title in enumerate(titles):
|
||||||
if isinstance(title, Episode) and wanted and f"{title.season}x{title.number}" not in wanted:
|
if isinstance(title, Episode) and latest_episode and latest_episode_id:
|
||||||
|
# If --latest-episode is set, only process the latest episode
|
||||||
|
if f"{title.season}x{title.number}" != latest_episode_id:
|
||||||
|
continue
|
||||||
|
elif isinstance(title, Episode) and wanted and f"{title.season}x{title.number}" not in wanted:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
console.print(Padding(Rule(f"[rule.text]{title}"), (1, 2)))
|
console.print(Padding(Rule(f"[rule.text]{title}"), (1, 2)))
|
||||||
@@ -621,8 +788,58 @@ class dl:
|
|||||||
title.tracks.subtitles = []
|
title.tracks.subtitles = []
|
||||||
|
|
||||||
with console.status("Getting tracks...", spinner="dots"):
|
with console.status("Getting tracks...", spinner="dots"):
|
||||||
title.tracks.add(service.get_tracks(title), warn_only=True)
|
try:
|
||||||
title.tracks.chapters = service.get_chapters(title)
|
title.tracks.add(service.get_tracks(title), warn_only=True)
|
||||||
|
title.tracks.chapters = service.get_chapters(title)
|
||||||
|
except Exception as e:
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_error(
|
||||||
|
"get_tracks", e, service=self.service, context={"title": str(title)}
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
tracks_info = {
|
||||||
|
"title": str(title),
|
||||||
|
"video_tracks": len(title.tracks.videos),
|
||||||
|
"audio_tracks": len(title.tracks.audio),
|
||||||
|
"subtitle_tracks": len(title.tracks.subtitles),
|
||||||
|
"has_chapters": bool(title.tracks.chapters),
|
||||||
|
"videos": [
|
||||||
|
{
|
||||||
|
"codec": str(v.codec),
|
||||||
|
"resolution": f"{v.width}x{v.height}" if v.width and v.height else "unknown",
|
||||||
|
"bitrate": v.bitrate,
|
||||||
|
"range": str(v.range),
|
||||||
|
"language": str(v.language) if v.language else None,
|
||||||
|
"drm": [str(type(d).__name__) for d in v.drm] if v.drm else [],
|
||||||
|
}
|
||||||
|
for v in title.tracks.videos
|
||||||
|
],
|
||||||
|
"audio": [
|
||||||
|
{
|
||||||
|
"codec": str(a.codec),
|
||||||
|
"bitrate": a.bitrate,
|
||||||
|
"channels": a.channels,
|
||||||
|
"language": str(a.language) if a.language else None,
|
||||||
|
"descriptive": a.descriptive,
|
||||||
|
"drm": [str(type(d).__name__) for d in a.drm] if a.drm else [],
|
||||||
|
}
|
||||||
|
for a in title.tracks.audio
|
||||||
|
],
|
||||||
|
"subtitles": [
|
||||||
|
{
|
||||||
|
"codec": str(s.codec),
|
||||||
|
"language": str(s.language) if s.language else None,
|
||||||
|
"forced": s.forced,
|
||||||
|
"sdh": s.sdh,
|
||||||
|
}
|
||||||
|
for s in title.tracks.subtitles
|
||||||
|
],
|
||||||
|
}
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="INFO", operation="get_tracks", service=self.service, context=tracks_info
|
||||||
|
)
|
||||||
|
|
||||||
# strip SDH subs to non-SDH if no equivalent same-lang non-SDH is available
|
# strip SDH subs to non-SDH if no equivalent same-lang non-SDH is available
|
||||||
# uses a loose check, e.g, wont strip en-US SDH sub if a non-SDH en-GB is available
|
# uses a loose check, e.g, wont strip en-US SDH sub if a non-SDH en-GB is available
|
||||||
@@ -785,6 +1002,29 @@ class dl:
|
|||||||
selected_videos.append(match)
|
selected_videos.append(match)
|
||||||
title.tracks.videos = selected_videos
|
title.tracks.videos = selected_videos
|
||||||
|
|
||||||
|
# validate hybrid mode requirements
|
||||||
|
if any(r == Video.Range.HYBRID for r in range_):
|
||||||
|
hdr10_tracks = [v for v in title.tracks.videos if v.range == Video.Range.HDR10]
|
||||||
|
dv_tracks = [v for v in title.tracks.videos if v.range == Video.Range.DV]
|
||||||
|
|
||||||
|
if not hdr10_tracks and not dv_tracks:
|
||||||
|
available_ranges = sorted(set(v.range.name for v in title.tracks.videos))
|
||||||
|
self.log.error("HYBRID mode requires both HDR10 and DV tracks, but neither is available")
|
||||||
|
self.log.error(
|
||||||
|
f"Available ranges: {', '.join(available_ranges) if available_ranges else 'none'}"
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
elif not hdr10_tracks:
|
||||||
|
available_ranges = sorted(set(v.range.name for v in title.tracks.videos))
|
||||||
|
self.log.error("HYBRID mode requires both HDR10 and DV tracks, but only DV is available")
|
||||||
|
self.log.error(f"Available ranges: {', '.join(available_ranges)}")
|
||||||
|
sys.exit(1)
|
||||||
|
elif not dv_tracks:
|
||||||
|
available_ranges = sorted(set(v.range.name for v in title.tracks.videos))
|
||||||
|
self.log.error("HYBRID mode requires both HDR10 and DV tracks, but only HDR10 is available")
|
||||||
|
self.log.error(f"Available ranges: {', '.join(available_ranges)}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# filter subtitle tracks
|
# filter subtitle tracks
|
||||||
if require_subs:
|
if require_subs:
|
||||||
missing_langs = [
|
missing_langs = [
|
||||||
@@ -1009,6 +1249,14 @@ class dl:
|
|||||||
download.result()
|
download.result()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
console.print(Padding(":x: Download Cancelled...", (0, 5, 1, 5)))
|
console.print(Padding(":x: Download Cancelled...", (0, 5, 1, 5)))
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="WARNING",
|
||||||
|
operation="download_tracks",
|
||||||
|
service=self.service,
|
||||||
|
message="Download cancelled by user",
|
||||||
|
context={"title": str(title)},
|
||||||
|
)
|
||||||
return
|
return
|
||||||
except Exception as e: # noqa
|
except Exception as e: # noqa
|
||||||
error_messages = [
|
error_messages = [
|
||||||
@@ -1031,6 +1279,19 @@ class dl:
|
|||||||
# CalledProcessError already lists the exception trace
|
# CalledProcessError already lists the exception trace
|
||||||
console.print_exception()
|
console.print_exception()
|
||||||
console.print(Padding(Group(*error_messages), (1, 5)))
|
console.print(Padding(Group(*error_messages), (1, 5)))
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_error(
|
||||||
|
"download_tracks",
|
||||||
|
e,
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"title": str(title),
|
||||||
|
"error_type": type(e).__name__,
|
||||||
|
"tracks_count": len(title.tracks),
|
||||||
|
"returncode": getattr(e, "returncode", None),
|
||||||
|
},
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if skip_dl:
|
if skip_dl:
|
||||||
@@ -1284,9 +1545,13 @@ class dl:
|
|||||||
if not no_folder and isinstance(title, (Episode, Song)):
|
if not no_folder and isinstance(title, (Episode, Song)):
|
||||||
# Create folder based on title
|
# Create folder based on title
|
||||||
# Use first available track for filename generation
|
# Use first available track for filename generation
|
||||||
sample_track = title.tracks.videos[0] if title.tracks.videos else (
|
sample_track = (
|
||||||
title.tracks.audio[0] if title.tracks.audio else (
|
title.tracks.videos[0]
|
||||||
title.tracks.subtitles[0] if title.tracks.subtitles else None
|
if title.tracks.videos
|
||||||
|
else (
|
||||||
|
title.tracks.audio[0]
|
||||||
|
if title.tracks.audio
|
||||||
|
else (title.tracks.subtitles[0] if title.tracks.subtitles else None)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if sample_track and sample_track.path:
|
if sample_track and sample_track.path:
|
||||||
@@ -1307,7 +1572,9 @@ class dl:
|
|||||||
track_suffix = f".{track.codec.name if hasattr(track.codec, 'name') else 'video'}"
|
track_suffix = f".{track.codec.name if hasattr(track.codec, 'name') else 'video'}"
|
||||||
elif isinstance(track, Audio):
|
elif isinstance(track, Audio):
|
||||||
lang_suffix = f".{track.language}" if track.language else ""
|
lang_suffix = f".{track.language}" if track.language else ""
|
||||||
track_suffix = f"{lang_suffix}.{track.codec.name if hasattr(track.codec, 'name') else 'audio'}"
|
track_suffix = (
|
||||||
|
f"{lang_suffix}.{track.codec.name if hasattr(track.codec, 'name') else 'audio'}"
|
||||||
|
)
|
||||||
elif isinstance(track, Subtitle):
|
elif isinstance(track, Subtitle):
|
||||||
lang_suffix = f".{track.language}" if track.language else ""
|
lang_suffix = f".{track.language}" if track.language else ""
|
||||||
forced_suffix = ".forced" if track.forced else ""
|
forced_suffix = ".forced" if track.forced else ""
|
||||||
@@ -1394,6 +1661,20 @@ class dl:
|
|||||||
self.cdm = playready_cdm
|
self.cdm = playready_cdm
|
||||||
|
|
||||||
if isinstance(drm, Widevine):
|
if isinstance(drm, Widevine):
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_drm_operation(
|
||||||
|
drm_type="Widevine",
|
||||||
|
operation="prepare_drm",
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"track": str(track),
|
||||||
|
"title": str(title),
|
||||||
|
"pssh": drm.pssh.dumps() if drm.pssh else None,
|
||||||
|
"kids": [k.hex for k in drm.kids],
|
||||||
|
"track_kid": track_kid.hex if track_kid else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
with self.DRM_TABLE_LOCK:
|
with self.DRM_TABLE_LOCK:
|
||||||
pssh_display = self._truncate_pssh_for_display(drm.pssh.dumps(), "Widevine")
|
pssh_display = self._truncate_pssh_for_display(drm.pssh.dumps(), "Widevine")
|
||||||
cek_tree = Tree(Text.assemble(("Widevine", "cyan"), (f"({pssh_display})", "text"), overflow="fold"))
|
cek_tree = Tree(Text.assemble(("Widevine", "cyan"), (f"({pssh_display})", "text"), overflow="fold"))
|
||||||
@@ -1422,11 +1703,32 @@ class dl:
|
|||||||
if not any(f"{kid.hex}:{content_key}" in x.label for x in cek_tree.children):
|
if not any(f"{kid.hex}:{content_key}" in x.label for x in cek_tree.children):
|
||||||
cek_tree.add(label)
|
cek_tree.add(label)
|
||||||
self.vaults.add_key(kid, content_key, excluding=vault_used)
|
self.vaults.add_key(kid, content_key, excluding=vault_used)
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_vault_query(
|
||||||
|
vault_name=vault_used,
|
||||||
|
operation="get_key_success",
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"kid": kid.hex,
|
||||||
|
"content_key": content_key,
|
||||||
|
"track": str(track),
|
||||||
|
"from_cache": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
elif vaults_only:
|
elif vaults_only:
|
||||||
msg = f"No Vault has a Key for {kid.hex} and --vaults-only was used"
|
msg = f"No Vault has a Key for {kid.hex} and --vaults-only was used"
|
||||||
cek_tree.add(f"[logging.level.error]{msg}")
|
cek_tree.add(f"[logging.level.error]{msg}")
|
||||||
if not pre_existing_tree:
|
if not pre_existing_tree:
|
||||||
table.add_row(cek_tree)
|
table.add_row(cek_tree)
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="ERROR",
|
||||||
|
operation="vault_key_not_found",
|
||||||
|
service=self.service,
|
||||||
|
message=msg,
|
||||||
|
context={"kid": kid.hex, "track": str(track)},
|
||||||
|
)
|
||||||
raise Widevine.Exceptions.CEKNotFound(msg)
|
raise Widevine.Exceptions.CEKNotFound(msg)
|
||||||
else:
|
else:
|
||||||
need_license = True
|
need_license = True
|
||||||
@@ -1437,6 +1739,18 @@ class dl:
|
|||||||
if need_license and not vaults_only:
|
if need_license and not vaults_only:
|
||||||
from_vaults = drm.content_keys.copy()
|
from_vaults = drm.content_keys.copy()
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="INFO",
|
||||||
|
operation="get_license",
|
||||||
|
service=self.service,
|
||||||
|
message="Requesting Widevine license from service",
|
||||||
|
context={
|
||||||
|
"track": str(track),
|
||||||
|
"kids_needed": [k.hex for k in all_kids if k not in drm.content_keys],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.service == "NF":
|
if self.service == "NF":
|
||||||
drm.get_NF_content_keys(cdm=self.cdm, licence=licence, certificate=certificate)
|
drm.get_NF_content_keys(cdm=self.cdm, licence=licence, certificate=certificate)
|
||||||
@@ -1450,8 +1764,27 @@ class dl:
|
|||||||
cek_tree.add(f"[logging.level.error]{msg}")
|
cek_tree.add(f"[logging.level.error]{msg}")
|
||||||
if not pre_existing_tree:
|
if not pre_existing_tree:
|
||||||
table.add_row(cek_tree)
|
table.add_row(cek_tree)
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_error(
|
||||||
|
"get_license",
|
||||||
|
e,
|
||||||
|
service=self.service,
|
||||||
|
context={"track": str(track), "exception_type": type(e).__name__},
|
||||||
|
)
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="INFO",
|
||||||
|
operation="license_keys_retrieved",
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"track": str(track),
|
||||||
|
"keys_count": len(drm.content_keys),
|
||||||
|
"kids": [k.hex for k in drm.content_keys.keys()],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
for kid_, key in drm.content_keys.items():
|
for kid_, key in drm.content_keys.items():
|
||||||
if key == "0" * 32:
|
if key == "0" * 32:
|
||||||
key = f"[red]{key}[/]"
|
key = f"[red]{key}[/]"
|
||||||
@@ -1497,6 +1830,20 @@ class dl:
|
|||||||
export.write_text(jsonpickle.dumps(keys, indent=4), encoding="utf8")
|
export.write_text(jsonpickle.dumps(keys, indent=4), encoding="utf8")
|
||||||
|
|
||||||
elif isinstance(drm, PlayReady):
|
elif isinstance(drm, PlayReady):
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_drm_operation(
|
||||||
|
drm_type="PlayReady",
|
||||||
|
operation="prepare_drm",
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"track": str(track),
|
||||||
|
"title": str(title),
|
||||||
|
"pssh": drm.pssh_b64 or "",
|
||||||
|
"kids": [k.hex for k in drm.kids],
|
||||||
|
"track_kid": track_kid.hex if track_kid else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
with self.DRM_TABLE_LOCK:
|
with self.DRM_TABLE_LOCK:
|
||||||
pssh_display = self._truncate_pssh_for_display(drm.pssh_b64 or "", "PlayReady")
|
pssh_display = self._truncate_pssh_for_display(drm.pssh_b64 or "", "PlayReady")
|
||||||
cek_tree = Tree(
|
cek_tree = Tree(
|
||||||
@@ -1531,11 +1878,33 @@ class dl:
|
|||||||
if not any(f"{kid.hex}:{content_key}" in x.label for x in cek_tree.children):
|
if not any(f"{kid.hex}:{content_key}" in x.label for x in cek_tree.children):
|
||||||
cek_tree.add(label)
|
cek_tree.add(label)
|
||||||
self.vaults.add_key(kid, content_key, excluding=vault_used)
|
self.vaults.add_key(kid, content_key, excluding=vault_used)
|
||||||
|
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_vault_query(
|
||||||
|
vault_name=vault_used,
|
||||||
|
operation="get_key_success",
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"kid": kid.hex,
|
||||||
|
"content_key": content_key,
|
||||||
|
"track": str(track),
|
||||||
|
"from_cache": True,
|
||||||
|
"drm_type": "PlayReady",
|
||||||
|
},
|
||||||
|
)
|
||||||
elif vaults_only:
|
elif vaults_only:
|
||||||
msg = f"No Vault has a Key for {kid.hex} and --vaults-only was used"
|
msg = f"No Vault has a Key for {kid.hex} and --vaults-only was used"
|
||||||
cek_tree.add(f"[logging.level.error]{msg}")
|
cek_tree.add(f"[logging.level.error]{msg}")
|
||||||
if not pre_existing_tree:
|
if not pre_existing_tree:
|
||||||
table.add_row(cek_tree)
|
table.add_row(cek_tree)
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log(
|
||||||
|
level="ERROR",
|
||||||
|
operation="vault_key_not_found",
|
||||||
|
service=self.service,
|
||||||
|
message=msg,
|
||||||
|
context={"kid": kid.hex, "track": str(track), "drm_type": "PlayReady"},
|
||||||
|
)
|
||||||
raise PlayReady.Exceptions.CEKNotFound(msg)
|
raise PlayReady.Exceptions.CEKNotFound(msg)
|
||||||
else:
|
else:
|
||||||
need_license = True
|
need_license = True
|
||||||
@@ -1556,6 +1925,17 @@ class dl:
|
|||||||
cek_tree.add(f"[logging.level.error]{msg}")
|
cek_tree.add(f"[logging.level.error]{msg}")
|
||||||
if not pre_existing_tree:
|
if not pre_existing_tree:
|
||||||
table.add_row(cek_tree)
|
table.add_row(cek_tree)
|
||||||
|
if self.debug_logger:
|
||||||
|
self.debug_logger.log_error(
|
||||||
|
"get_license_playready",
|
||||||
|
e,
|
||||||
|
service=self.service,
|
||||||
|
context={
|
||||||
|
"track": str(track),
|
||||||
|
"exception_type": type(e).__name__,
|
||||||
|
"drm_type": "PlayReady",
|
||||||
|
},
|
||||||
|
)
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
for kid_, key in drm.content_keys.items():
|
for kid_, key in drm.content_keys.items():
|
||||||
@@ -1630,7 +2010,7 @@ class dl:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def save_cookies(path: Path, cookies: CookieJar):
|
def save_cookies(path: Path, cookies: CookieJar):
|
||||||
if hasattr(cookies, 'jar'):
|
if hasattr(cookies, "jar"):
|
||||||
cookies = cookies.jar
|
cookies = cookies.jar
|
||||||
|
|
||||||
cookie_jar = MozillaCookieJar(path)
|
cookie_jar = MozillaCookieJar(path)
|
||||||
@@ -1750,8 +2130,9 @@ class dl:
|
|||||||
|
|
||||||
cdm_api = next(iter(x.copy() for x in config.remote_cdm if x["name"] == cdm_name), None)
|
cdm_api = next(iter(x.copy() for x in config.remote_cdm if x["name"] == cdm_name), None)
|
||||||
if cdm_api:
|
if cdm_api:
|
||||||
is_decrypt_lab = True if cdm_api.get("type") == "decrypt_labs" else False
|
cdm_type = cdm_api.get("type")
|
||||||
if is_decrypt_lab:
|
|
||||||
|
if cdm_type == "decrypt_labs":
|
||||||
del cdm_api["name"]
|
del cdm_api["name"]
|
||||||
del cdm_api["type"]
|
del cdm_api["type"]
|
||||||
|
|
||||||
@@ -1766,14 +2147,22 @@ class dl:
|
|||||||
|
|
||||||
# All DecryptLabs CDMs use DecryptLabsRemoteCDM
|
# All DecryptLabs CDMs use DecryptLabsRemoteCDM
|
||||||
return DecryptLabsRemoteCDM(service_name=service, vaults=self.vaults, **cdm_api)
|
return DecryptLabsRemoteCDM(service_name=service, vaults=self.vaults, **cdm_api)
|
||||||
|
|
||||||
|
elif cdm_type == "custom_api":
|
||||||
|
del cdm_api["name"]
|
||||||
|
del cdm_api["type"]
|
||||||
|
|
||||||
|
# All Custom API CDMs use CustomRemoteCDM
|
||||||
|
return CustomRemoteCDM(service_name=service, vaults=self.vaults, **cdm_api)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return RemoteCdm(
|
return RemoteCdm(
|
||||||
device_type=cdm_api['Device Type'],
|
device_type=cdm_api["Device Type"],
|
||||||
system_id=cdm_api['System ID'],
|
system_id=cdm_api["System ID"],
|
||||||
security_level=cdm_api['Security Level'],
|
security_level=cdm_api["Security Level"],
|
||||||
host=cdm_api['Host'],
|
host=cdm_api["Host"],
|
||||||
secret=cdm_api['Secret'],
|
secret=cdm_api["Secret"],
|
||||||
device_name=cdm_api['Device Name'],
|
device_name=cdm_api["Device Name"],
|
||||||
)
|
)
|
||||||
|
|
||||||
prd_path = config.directories.prds / f"{cdm_name}.prd"
|
prd_path = config.directories.prds / f"{cdm_name}.prd"
|
||||||
|
|||||||
@@ -1,19 +1,26 @@
|
|||||||
|
import logging
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
from unshackle.core import binaries
|
from unshackle.core import binaries
|
||||||
|
from unshackle.core.api import cors_middleware, setup_routes, setup_swagger
|
||||||
from unshackle.core.config import config
|
from unshackle.core.config import config
|
||||||
from unshackle.core.constants import context_settings
|
from unshackle.core.constants import context_settings
|
||||||
|
|
||||||
|
|
||||||
@click.command(short_help="Serve your Local Widevine Devices for Remote Access.", context_settings=context_settings)
|
@click.command(
|
||||||
|
short_help="Serve your Local Widevine Devices and REST API for Remote Access.", context_settings=context_settings
|
||||||
|
)
|
||||||
@click.option("-h", "--host", type=str, default="0.0.0.0", help="Host to serve from.")
|
@click.option("-h", "--host", type=str, default="0.0.0.0", help="Host to serve from.")
|
||||||
@click.option("-p", "--port", type=int, default=8786, help="Port to serve from.")
|
@click.option("-p", "--port", type=int, default=8786, help="Port to serve from.")
|
||||||
@click.option("--caddy", is_flag=True, default=False, help="Also serve with Caddy.")
|
@click.option("--caddy", is_flag=True, default=False, help="Also serve with Caddy.")
|
||||||
def serve(host: str, port: int, caddy: bool) -> None:
|
@click.option("--api-only", is_flag=True, default=False, help="Serve only the REST API, not pywidevine CDM.")
|
||||||
|
@click.option("--no-key", is_flag=True, default=False, help="Disable API key authentication (allows all requests).")
|
||||||
|
def serve(host: str, port: int, caddy: bool, api_only: bool, no_key: bool) -> None:
|
||||||
"""
|
"""
|
||||||
Serve your Local Widevine Devices for Remote Access.
|
Serve your Local Widevine Devices and REST API for Remote Access.
|
||||||
|
|
||||||
\b
|
\b
|
||||||
Host as 127.0.0.1 may block remote access even if port-forwarded.
|
Host as 127.0.0.1 may block remote access even if port-forwarded.
|
||||||
@@ -23,8 +30,25 @@ def serve(host: str, port: int, caddy: bool) -> None:
|
|||||||
You may serve with Caddy at the same time with --caddy. You can use Caddy
|
You may serve with Caddy at the same time with --caddy. You can use Caddy
|
||||||
as a reverse-proxy to serve with HTTPS. The config used will be the Caddyfile
|
as a reverse-proxy to serve with HTTPS. The config used will be the Caddyfile
|
||||||
next to the unshackle config.
|
next to the unshackle config.
|
||||||
|
|
||||||
|
\b
|
||||||
|
The REST API provides programmatic access to unshackle functionality.
|
||||||
|
Configure authentication in your config under serve.users and serve.api_secret.
|
||||||
"""
|
"""
|
||||||
from pywidevine import serve
|
from pywidevine import serve as pywidevine_serve
|
||||||
|
|
||||||
|
log = logging.getLogger("serve")
|
||||||
|
|
||||||
|
# Validate API secret for REST API routes (unless --no-key is used)
|
||||||
|
if not no_key:
|
||||||
|
api_secret = config.serve.get("api_secret")
|
||||||
|
if not api_secret:
|
||||||
|
raise click.ClickException(
|
||||||
|
"API secret key is not configured. Please add 'api_secret' to the 'serve' section in your config."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
api_secret = None
|
||||||
|
log.warning("Running with --no-key: Authentication is DISABLED for all API endpoints!")
|
||||||
|
|
||||||
if caddy:
|
if caddy:
|
||||||
if not binaries.Caddy:
|
if not binaries.Caddy:
|
||||||
@@ -39,7 +63,51 @@ def serve(host: str, port: int, caddy: bool) -> None:
|
|||||||
if not config.serve.get("devices"):
|
if not config.serve.get("devices"):
|
||||||
config.serve["devices"] = []
|
config.serve["devices"] = []
|
||||||
config.serve["devices"].extend(list(config.directories.wvds.glob("*.wvd")))
|
config.serve["devices"].extend(list(config.directories.wvds.glob("*.wvd")))
|
||||||
serve.run(config.serve, host, port)
|
|
||||||
|
if api_only:
|
||||||
|
# API-only mode: serve just the REST API
|
||||||
|
log.info("Starting REST API server (pywidevine CDM disabled)")
|
||||||
|
if no_key:
|
||||||
|
app = web.Application(middlewares=[cors_middleware])
|
||||||
|
app["config"] = {"users": []}
|
||||||
|
else:
|
||||||
|
app = web.Application(middlewares=[cors_middleware, pywidevine_serve.authentication])
|
||||||
|
app["config"] = {"users": [api_secret]}
|
||||||
|
setup_routes(app)
|
||||||
|
setup_swagger(app)
|
||||||
|
log.info(f"REST API endpoints available at http://{host}:{port}/api/")
|
||||||
|
log.info(f"Swagger UI available at http://{host}:{port}/api/docs/")
|
||||||
|
log.info("(Press CTRL+C to quit)")
|
||||||
|
web.run_app(app, host=host, port=port, print=None)
|
||||||
|
else:
|
||||||
|
# Integrated mode: serve both pywidevine + REST API
|
||||||
|
log.info("Starting integrated server (pywidevine CDM + REST API)")
|
||||||
|
|
||||||
|
# Create integrated app with both pywidevine and API routes
|
||||||
|
if no_key:
|
||||||
|
app = web.Application(middlewares=[cors_middleware])
|
||||||
|
app["config"] = dict(config.serve)
|
||||||
|
app["config"]["users"] = []
|
||||||
|
else:
|
||||||
|
app = web.Application(middlewares=[cors_middleware, pywidevine_serve.authentication])
|
||||||
|
# Setup config - add API secret to users for authentication
|
||||||
|
serve_config = dict(config.serve)
|
||||||
|
if not serve_config.get("users"):
|
||||||
|
serve_config["users"] = []
|
||||||
|
if api_secret not in serve_config["users"]:
|
||||||
|
serve_config["users"].append(api_secret)
|
||||||
|
app["config"] = serve_config
|
||||||
|
|
||||||
|
app.on_startup.append(pywidevine_serve._startup)
|
||||||
|
app.on_cleanup.append(pywidevine_serve._cleanup)
|
||||||
|
app.add_routes(pywidevine_serve.routes)
|
||||||
|
setup_routes(app)
|
||||||
|
setup_swagger(app)
|
||||||
|
|
||||||
|
log.info(f"REST API endpoints available at http://{host}:{port}/api/")
|
||||||
|
log.info(f"Swagger UI available at http://{host}:{port}/api/docs/")
|
||||||
|
log.info("(Press CTRL+C to quit)")
|
||||||
|
web.run_app(app, host=host, port=port, print=None)
|
||||||
finally:
|
finally:
|
||||||
if caddy_p:
|
if caddy_p:
|
||||||
caddy_p.kill()
|
caddy_p.kill()
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
__version__ = "1.4.8"
|
__version__ = "2.0.0"
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import atexit
|
import atexit
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import urllib3
|
import urllib3
|
||||||
@@ -16,23 +15,16 @@ from unshackle.core.config import config
|
|||||||
from unshackle.core.console import ComfyRichHandler, console
|
from unshackle.core.console import ComfyRichHandler, console
|
||||||
from unshackle.core.constants import context_settings
|
from unshackle.core.constants import context_settings
|
||||||
from unshackle.core.update_checker import UpdateChecker
|
from unshackle.core.update_checker import UpdateChecker
|
||||||
from unshackle.core.utilities import rotate_log_file
|
from unshackle.core.utilities import close_debug_logger, init_debug_logger
|
||||||
|
|
||||||
LOGGING_PATH = None
|
|
||||||
|
|
||||||
|
|
||||||
@click.command(cls=Commands, invoke_without_command=True, context_settings=context_settings)
|
@click.command(cls=Commands, invoke_without_command=True, context_settings=context_settings)
|
||||||
@click.option("-v", "--version", is_flag=True, default=False, help="Print version information.")
|
@click.option("-v", "--version", is_flag=True, default=False, help="Print version information.")
|
||||||
@click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs.")
|
@click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs and JSON debug logging.")
|
||||||
@click.option(
|
def main(version: bool, debug: bool) -> None:
|
||||||
"--log",
|
|
||||||
"log_path",
|
|
||||||
type=Path,
|
|
||||||
default=config.directories.logs / config.filenames.log,
|
|
||||||
help="Log path (or filename). Path can contain the following f-string args: {name} {time}.",
|
|
||||||
)
|
|
||||||
def main(version: bool, debug: bool, log_path: Path) -> None:
|
|
||||||
"""unshackle—Modular Movie, TV, and Music Archival Software."""
|
"""unshackle—Modular Movie, TV, and Music Archival Software."""
|
||||||
|
debug_logging_enabled = debug or config.debug
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=logging.DEBUG if debug else logging.INFO,
|
level=logging.DEBUG if debug else logging.INFO,
|
||||||
format="%(message)s",
|
format="%(message)s",
|
||||||
@@ -48,11 +40,8 @@ def main(version: bool, debug: bool, log_path: Path) -> None:
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
if log_path:
|
if debug_logging_enabled:
|
||||||
global LOGGING_PATH
|
init_debug_logger(enabled=True)
|
||||||
console.record = True
|
|
||||||
new_log_path = rotate_log_file(log_path)
|
|
||||||
LOGGING_PATH = new_log_path
|
|
||||||
|
|
||||||
urllib3.disable_warnings(InsecureRequestWarning)
|
urllib3.disable_warnings(InsecureRequestWarning)
|
||||||
|
|
||||||
@@ -98,10 +87,9 @@ def main(version: bool, debug: bool, log_path: Path) -> None:
|
|||||||
|
|
||||||
|
|
||||||
@atexit.register
|
@atexit.register
|
||||||
def save_log():
|
def cleanup():
|
||||||
if console.record and LOGGING_PATH:
|
"""Clean up resources on exit."""
|
||||||
# TODO: Currently semi-bust. Everything that refreshes gets duplicated.
|
close_debug_logger()
|
||||||
console.save_text(LOGGING_PATH)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
3
unshackle/core/api/__init__.py
Normal file
3
unshackle/core/api/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from unshackle.core.api.routes import cors_middleware, setup_routes, setup_swagger
|
||||||
|
|
||||||
|
__all__ = ["setup_routes", "setup_swagger", "cors_middleware"]
|
||||||
631
unshackle/core/api/download_manager.py
Normal file
631
unshackle/core/api/download_manager.py
Normal file
@@ -0,0 +1,631 @@
|
|||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import threading
|
||||||
|
import uuid
|
||||||
|
from contextlib import suppress
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
log = logging.getLogger("download_manager")
|
||||||
|
|
||||||
|
|
||||||
|
class JobStatus(Enum):
|
||||||
|
QUEUED = "queued"
|
||||||
|
DOWNLOADING = "downloading"
|
||||||
|
COMPLETED = "completed"
|
||||||
|
FAILED = "failed"
|
||||||
|
CANCELLED = "cancelled"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DownloadJob:
|
||||||
|
"""Represents a download job with all its parameters and status."""
|
||||||
|
|
||||||
|
job_id: str
|
||||||
|
status: JobStatus
|
||||||
|
created_time: datetime
|
||||||
|
service: str
|
||||||
|
title_id: str
|
||||||
|
parameters: Dict[str, Any]
|
||||||
|
|
||||||
|
# Progress tracking
|
||||||
|
started_time: Optional[datetime] = None
|
||||||
|
completed_time: Optional[datetime] = None
|
||||||
|
progress: float = 0.0
|
||||||
|
|
||||||
|
# Results and error info
|
||||||
|
output_files: List[str] = field(default_factory=list)
|
||||||
|
error_message: Optional[str] = None
|
||||||
|
error_details: Optional[str] = None
|
||||||
|
|
||||||
|
# Cancellation support
|
||||||
|
cancel_event: threading.Event = field(default_factory=threading.Event)
|
||||||
|
|
||||||
|
def to_dict(self, include_full_details: bool = False) -> Dict[str, Any]:
|
||||||
|
"""Convert job to dictionary for JSON response."""
|
||||||
|
result = {
|
||||||
|
"job_id": self.job_id,
|
||||||
|
"status": self.status.value,
|
||||||
|
"created_time": self.created_time.isoformat(),
|
||||||
|
"service": self.service,
|
||||||
|
"title_id": self.title_id,
|
||||||
|
"progress": self.progress,
|
||||||
|
}
|
||||||
|
|
||||||
|
if include_full_details:
|
||||||
|
result.update(
|
||||||
|
{
|
||||||
|
"parameters": self.parameters,
|
||||||
|
"started_time": self.started_time.isoformat() if self.started_time else None,
|
||||||
|
"completed_time": self.completed_time.isoformat() if self.completed_time else None,
|
||||||
|
"output_files": self.output_files,
|
||||||
|
"error_message": self.error_message,
|
||||||
|
"error_details": self.error_details,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _perform_download(
|
||||||
|
job_id: str,
|
||||||
|
service: str,
|
||||||
|
title_id: str,
|
||||||
|
params: Dict[str, Any],
|
||||||
|
cancel_event: Optional[threading.Event] = None,
|
||||||
|
progress_callback: Optional[Callable[[Dict[str, Any]], None]] = None,
|
||||||
|
) -> List[str]:
|
||||||
|
"""Execute the synchronous download logic for a job."""
|
||||||
|
|
||||||
|
def _check_cancel(stage: str):
|
||||||
|
if cancel_event and cancel_event.is_set():
|
||||||
|
raise Exception(f"Job was cancelled {stage}")
|
||||||
|
|
||||||
|
from contextlib import redirect_stderr, redirect_stdout
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
_check_cancel("before execution started")
|
||||||
|
|
||||||
|
# Import dl.py components lazily to avoid circular deps during module import
|
||||||
|
import click
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from unshackle.commands.dl import dl
|
||||||
|
from unshackle.core.config import config
|
||||||
|
from unshackle.core.services import Services
|
||||||
|
from unshackle.core.utils.click_types import ContextData
|
||||||
|
from unshackle.core.utils.collections import merge_dict
|
||||||
|
|
||||||
|
log.info(f"Starting sync download for job {job_id}")
|
||||||
|
|
||||||
|
# Load service configuration
|
||||||
|
service_config_path = Services.get_path(service) / config.filenames.config
|
||||||
|
if service_config_path.exists():
|
||||||
|
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||||
|
else:
|
||||||
|
service_config = {}
|
||||||
|
merge_dict(config.services.get(service), service_config)
|
||||||
|
|
||||||
|
from unshackle.commands.dl import dl as dl_command
|
||||||
|
|
||||||
|
ctx = click.Context(dl_command.cli)
|
||||||
|
ctx.invoked_subcommand = service
|
||||||
|
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=params.get("profile"))
|
||||||
|
ctx.params = {
|
||||||
|
"proxy": params.get("proxy"),
|
||||||
|
"no_proxy": params.get("no_proxy", False),
|
||||||
|
"profile": params.get("profile"),
|
||||||
|
"tag": params.get("tag"),
|
||||||
|
"tmdb_id": params.get("tmdb_id"),
|
||||||
|
"tmdb_name": params.get("tmdb_name", False),
|
||||||
|
"tmdb_year": params.get("tmdb_year", False),
|
||||||
|
}
|
||||||
|
|
||||||
|
dl_instance = dl(
|
||||||
|
ctx=ctx,
|
||||||
|
no_proxy=params.get("no_proxy", False),
|
||||||
|
profile=params.get("profile"),
|
||||||
|
proxy=params.get("proxy"),
|
||||||
|
tag=params.get("tag"),
|
||||||
|
tmdb_id=params.get("tmdb_id"),
|
||||||
|
tmdb_name=params.get("tmdb_name", False),
|
||||||
|
tmdb_year=params.get("tmdb_year", False),
|
||||||
|
)
|
||||||
|
|
||||||
|
service_module = Services.load(service)
|
||||||
|
|
||||||
|
_check_cancel("before service instantiation")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||||
|
|
||||||
|
service_ctx = click.Context(click.Command(service))
|
||||||
|
service_ctx.parent = ctx
|
||||||
|
service_ctx.obj = ctx.obj
|
||||||
|
|
||||||
|
service_kwargs = {}
|
||||||
|
|
||||||
|
if "title" in service_init_params:
|
||||||
|
service_kwargs["title"] = title_id
|
||||||
|
|
||||||
|
for key, value in params.items():
|
||||||
|
if key in service_init_params and key not in ["service", "title_id"]:
|
||||||
|
service_kwargs[key] = value
|
||||||
|
|
||||||
|
for param_name, param_info in service_init_params.items():
|
||||||
|
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||||
|
if param_info.default is inspect.Parameter.empty:
|
||||||
|
if param_name == "movie":
|
||||||
|
service_kwargs[param_name] = "/movies/" in title_id
|
||||||
|
elif param_name == "meta_lang":
|
||||||
|
service_kwargs[param_name] = None
|
||||||
|
else:
|
||||||
|
log.warning(f"Unknown required parameter '{param_name}' for service {service}, using None")
|
||||||
|
service_kwargs[param_name] = None
|
||||||
|
|
||||||
|
service_instance = service_module(service_ctx, **service_kwargs)
|
||||||
|
|
||||||
|
except Exception as exc: # noqa: BLE001 - propagate meaningful failure
|
||||||
|
log.error(f"Failed to create service instance: {exc}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
original_download_dir = config.directories.downloads
|
||||||
|
|
||||||
|
_check_cancel("before download execution")
|
||||||
|
|
||||||
|
stdout_capture = StringIO()
|
||||||
|
stderr_capture = StringIO()
|
||||||
|
|
||||||
|
# Simple progress tracking if callback provided
|
||||||
|
if progress_callback:
|
||||||
|
# Report initial progress
|
||||||
|
progress_callback({"progress": 0.0, "status": "starting"})
|
||||||
|
|
||||||
|
# Simple approach: report progress at key points
|
||||||
|
original_result = dl_instance.result
|
||||||
|
|
||||||
|
def result_with_progress(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
# Report that download started
|
||||||
|
progress_callback({"progress": 5.0, "status": "downloading"})
|
||||||
|
|
||||||
|
# Call original method
|
||||||
|
result = original_result(*args, **kwargs)
|
||||||
|
|
||||||
|
# Report completion
|
||||||
|
progress_callback({"progress": 100.0, "status": "completed"})
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
progress_callback({"progress": 0.0, "status": "failed", "error": str(e)})
|
||||||
|
raise
|
||||||
|
|
||||||
|
dl_instance.result = result_with_progress
|
||||||
|
|
||||||
|
try:
|
||||||
|
with redirect_stdout(stdout_capture), redirect_stderr(stderr_capture):
|
||||||
|
dl_instance.result(
|
||||||
|
service=service_instance,
|
||||||
|
quality=params.get("quality", []),
|
||||||
|
vcodec=params.get("vcodec"),
|
||||||
|
acodec=params.get("acodec"),
|
||||||
|
vbitrate=params.get("vbitrate"),
|
||||||
|
abitrate=params.get("abitrate"),
|
||||||
|
range_=params.get("range", []),
|
||||||
|
channels=params.get("channels"),
|
||||||
|
no_atmos=params.get("no_atmos", False),
|
||||||
|
wanted=params.get("wanted", []),
|
||||||
|
lang=params.get("lang", ["orig"]),
|
||||||
|
v_lang=params.get("v_lang", []),
|
||||||
|
a_lang=params.get("a_lang", []),
|
||||||
|
s_lang=params.get("s_lang", ["all"]),
|
||||||
|
require_subs=params.get("require_subs", []),
|
||||||
|
forced_subs=params.get("forced_subs", False),
|
||||||
|
sub_format=params.get("sub_format"),
|
||||||
|
video_only=params.get("video_only", False),
|
||||||
|
audio_only=params.get("audio_only", False),
|
||||||
|
subs_only=params.get("subs_only", False),
|
||||||
|
chapters_only=params.get("chapters_only", False),
|
||||||
|
no_subs=params.get("no_subs", False),
|
||||||
|
no_audio=params.get("no_audio", False),
|
||||||
|
no_chapters=params.get("no_chapters", False),
|
||||||
|
slow=params.get("slow", False),
|
||||||
|
list_=False,
|
||||||
|
list_titles=False,
|
||||||
|
skip_dl=params.get("skip_dl", False),
|
||||||
|
export=params.get("export"),
|
||||||
|
cdm_only=params.get("cdm_only"),
|
||||||
|
no_proxy=params.get("no_proxy", False),
|
||||||
|
no_folder=params.get("no_folder", False),
|
||||||
|
no_source=params.get("no_source", False),
|
||||||
|
workers=params.get("workers"),
|
||||||
|
downloads=params.get("downloads", 1),
|
||||||
|
best_available=params.get("best_available", False),
|
||||||
|
)
|
||||||
|
|
||||||
|
except SystemExit as exc:
|
||||||
|
if exc.code != 0:
|
||||||
|
stdout_str = stdout_capture.getvalue()
|
||||||
|
stderr_str = stderr_capture.getvalue()
|
||||||
|
log.error(f"Download exited with code {exc.code}")
|
||||||
|
log.error(f"Stdout: {stdout_str}")
|
||||||
|
log.error(f"Stderr: {stderr_str}")
|
||||||
|
raise Exception(f"Download failed with exit code {exc.code}")
|
||||||
|
|
||||||
|
except Exception as exc: # noqa: BLE001 - propagate to caller
|
||||||
|
stdout_str = stdout_capture.getvalue()
|
||||||
|
stderr_str = stderr_capture.getvalue()
|
||||||
|
log.error(f"Download execution failed: {exc}")
|
||||||
|
log.error(f"Stdout: {stdout_str}")
|
||||||
|
log.error(f"Stderr: {stderr_str}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
log.info(f"Download completed for job {job_id}, files in {original_download_dir}")
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadQueueManager:
|
||||||
|
"""Manages download job queue with configurable concurrency limits."""
|
||||||
|
|
||||||
|
def __init__(self, max_concurrent_downloads: int = 2, job_retention_hours: int = 24):
|
||||||
|
self.max_concurrent_downloads = max_concurrent_downloads
|
||||||
|
self.job_retention_hours = job_retention_hours
|
||||||
|
|
||||||
|
self._jobs: Dict[str, DownloadJob] = {}
|
||||||
|
self._job_queue: asyncio.Queue = asyncio.Queue()
|
||||||
|
self._active_downloads: Dict[str, asyncio.Task] = {}
|
||||||
|
self._download_processes: Dict[str, asyncio.subprocess.Process] = {}
|
||||||
|
self._job_temp_files: Dict[str, Dict[str, str]] = {}
|
||||||
|
self._workers_started = False
|
||||||
|
self._shutdown_event = asyncio.Event()
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f"Initialized download queue manager: max_concurrent={max_concurrent_downloads}, retention_hours={job_retention_hours}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_job(self, service: str, title_id: str, **parameters) -> DownloadJob:
|
||||||
|
"""Create a new download job and add it to the queue."""
|
||||||
|
job_id = str(uuid.uuid4())
|
||||||
|
job = DownloadJob(
|
||||||
|
job_id=job_id,
|
||||||
|
status=JobStatus.QUEUED,
|
||||||
|
created_time=datetime.now(),
|
||||||
|
service=service,
|
||||||
|
title_id=title_id,
|
||||||
|
parameters=parameters,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._jobs[job_id] = job
|
||||||
|
self._job_queue.put_nowait(job)
|
||||||
|
|
||||||
|
log.info(f"Created download job {job_id} for {service}:{title_id}")
|
||||||
|
return job
|
||||||
|
|
||||||
|
def get_job(self, job_id: str) -> Optional[DownloadJob]:
|
||||||
|
"""Get job by ID."""
|
||||||
|
return self._jobs.get(job_id)
|
||||||
|
|
||||||
|
def list_jobs(self) -> List[DownloadJob]:
|
||||||
|
"""List all jobs."""
|
||||||
|
return list(self._jobs.values())
|
||||||
|
|
||||||
|
def cancel_job(self, job_id: str) -> bool:
|
||||||
|
"""Cancel a job if it's queued or downloading."""
|
||||||
|
job = self._jobs.get(job_id)
|
||||||
|
if not job:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if job.status == JobStatus.QUEUED:
|
||||||
|
job.status = JobStatus.CANCELLED
|
||||||
|
job.cancel_event.set() # Signal cancellation
|
||||||
|
log.info(f"Cancelled queued job {job_id}")
|
||||||
|
return True
|
||||||
|
elif job.status == JobStatus.DOWNLOADING:
|
||||||
|
# Set the cancellation event first - this will be checked by the download thread
|
||||||
|
job.cancel_event.set()
|
||||||
|
job.status = JobStatus.CANCELLED
|
||||||
|
log.info(f"Signaled cancellation for downloading job {job_id}")
|
||||||
|
|
||||||
|
# Cancel the active download task
|
||||||
|
task = self._active_downloads.get(job_id)
|
||||||
|
if task:
|
||||||
|
task.cancel()
|
||||||
|
log.info(f"Cancelled download task for job {job_id}")
|
||||||
|
|
||||||
|
process = self._download_processes.get(job_id)
|
||||||
|
if process:
|
||||||
|
try:
|
||||||
|
process.terminate()
|
||||||
|
log.info(f"Terminated worker process for job {job_id}")
|
||||||
|
except ProcessLookupError:
|
||||||
|
log.debug(f"Worker process for job {job_id} already exited")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def cleanup_old_jobs(self) -> int:
|
||||||
|
"""Remove jobs older than retention period."""
|
||||||
|
cutoff_time = datetime.now() - timedelta(hours=self.job_retention_hours)
|
||||||
|
jobs_to_remove = []
|
||||||
|
|
||||||
|
for job_id, job in self._jobs.items():
|
||||||
|
if job.status in [JobStatus.COMPLETED, JobStatus.FAILED, JobStatus.CANCELLED]:
|
||||||
|
if job.completed_time and job.completed_time < cutoff_time:
|
||||||
|
jobs_to_remove.append(job_id)
|
||||||
|
elif not job.completed_time and job.created_time < cutoff_time:
|
||||||
|
jobs_to_remove.append(job_id)
|
||||||
|
|
||||||
|
for job_id in jobs_to_remove:
|
||||||
|
del self._jobs[job_id]
|
||||||
|
|
||||||
|
if jobs_to_remove:
|
||||||
|
log.info(f"Cleaned up {len(jobs_to_remove)} old jobs")
|
||||||
|
|
||||||
|
return len(jobs_to_remove)
|
||||||
|
|
||||||
|
async def start_workers(self):
|
||||||
|
"""Start worker tasks to process the download queue."""
|
||||||
|
if self._workers_started:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._workers_started = True
|
||||||
|
|
||||||
|
# Start worker tasks
|
||||||
|
for i in range(self.max_concurrent_downloads):
|
||||||
|
asyncio.create_task(self._download_worker(f"worker-{i}"))
|
||||||
|
|
||||||
|
# Start cleanup task
|
||||||
|
asyncio.create_task(self._cleanup_worker())
|
||||||
|
|
||||||
|
log.info(f"Started {self.max_concurrent_downloads} download workers")
|
||||||
|
|
||||||
|
async def shutdown(self):
|
||||||
|
"""Shutdown the queue manager and cancel all active downloads."""
|
||||||
|
log.info("Shutting down download queue manager")
|
||||||
|
self._shutdown_event.set()
|
||||||
|
|
||||||
|
# Cancel all active downloads
|
||||||
|
for task in self._active_downloads.values():
|
||||||
|
task.cancel()
|
||||||
|
|
||||||
|
# Terminate worker processes
|
||||||
|
for job_id, process in list(self._download_processes.items()):
|
||||||
|
try:
|
||||||
|
process.terminate()
|
||||||
|
except ProcessLookupError:
|
||||||
|
log.debug(f"Worker process for job {job_id} already exited during shutdown")
|
||||||
|
|
||||||
|
for job_id, process in list(self._download_processes.items()):
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(process.wait(), timeout=5)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
log.warning(f"Worker process for job {job_id} did not exit, killing")
|
||||||
|
process.kill()
|
||||||
|
await process.wait()
|
||||||
|
finally:
|
||||||
|
self._download_processes.pop(job_id, None)
|
||||||
|
|
||||||
|
# Clean up any remaining temp files
|
||||||
|
for paths in self._job_temp_files.values():
|
||||||
|
for path in paths.values():
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
self._job_temp_files.clear()
|
||||||
|
|
||||||
|
# Wait for workers to finish
|
||||||
|
if self._active_downloads:
|
||||||
|
await asyncio.gather(*self._active_downloads.values(), return_exceptions=True)
|
||||||
|
|
||||||
|
async def _download_worker(self, worker_name: str):
|
||||||
|
"""Worker task that processes jobs from the queue."""
|
||||||
|
log.debug(f"Download worker {worker_name} started")
|
||||||
|
|
||||||
|
while not self._shutdown_event.is_set():
|
||||||
|
try:
|
||||||
|
# Wait for a job or shutdown signal
|
||||||
|
job = await asyncio.wait_for(self._job_queue.get(), timeout=1.0)
|
||||||
|
|
||||||
|
if job.status == JobStatus.CANCELLED:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Start processing the job
|
||||||
|
job.status = JobStatus.DOWNLOADING
|
||||||
|
job.started_time = datetime.now()
|
||||||
|
|
||||||
|
log.info(f"Worker {worker_name} starting job {job.job_id}")
|
||||||
|
|
||||||
|
# Create download task
|
||||||
|
download_task = asyncio.create_task(self._execute_download(job))
|
||||||
|
self._active_downloads[job.job_id] = download_task
|
||||||
|
|
||||||
|
try:
|
||||||
|
await download_task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
job.status = JobStatus.CANCELLED
|
||||||
|
log.info(f"Job {job.job_id} was cancelled")
|
||||||
|
except Exception as e:
|
||||||
|
job.status = JobStatus.FAILED
|
||||||
|
job.error_message = str(e)
|
||||||
|
log.error(f"Job {job.job_id} failed: {e}")
|
||||||
|
finally:
|
||||||
|
job.completed_time = datetime.now()
|
||||||
|
if job.job_id in self._active_downloads:
|
||||||
|
del self._active_downloads[job.job_id]
|
||||||
|
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
log.error(f"Worker {worker_name} error: {e}")
|
||||||
|
|
||||||
|
async def _execute_download(self, job: DownloadJob):
|
||||||
|
"""Execute the actual download for a job."""
|
||||||
|
log.info(f"Executing download for job {job.job_id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
output_files = await self._run_download_async(job)
|
||||||
|
job.status = JobStatus.COMPLETED
|
||||||
|
job.output_files = output_files
|
||||||
|
job.progress = 100.0
|
||||||
|
log.info(f"Download completed for job {job.job_id}: {len(output_files)} files")
|
||||||
|
except Exception as e:
|
||||||
|
job.status = JobStatus.FAILED
|
||||||
|
job.error_message = str(e)
|
||||||
|
job.error_details = str(e)
|
||||||
|
log.error(f"Download failed for job {job.job_id}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def _run_download_async(self, job: DownloadJob) -> List[str]:
|
||||||
|
"""Invoke a worker subprocess to execute the download."""
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"job_id": job.job_id,
|
||||||
|
"service": job.service,
|
||||||
|
"title_id": job.title_id,
|
||||||
|
"parameters": job.parameters,
|
||||||
|
}
|
||||||
|
|
||||||
|
payload_fd, payload_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_payload.json")
|
||||||
|
os.close(payload_fd)
|
||||||
|
result_fd, result_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_result.json")
|
||||||
|
os.close(result_fd)
|
||||||
|
progress_fd, progress_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_progress.json")
|
||||||
|
os.close(progress_fd)
|
||||||
|
|
||||||
|
with open(payload_path, "w", encoding="utf-8") as handle:
|
||||||
|
json.dump(payload, handle)
|
||||||
|
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
sys.executable,
|
||||||
|
"-m",
|
||||||
|
"unshackle.core.api.download_worker",
|
||||||
|
payload_path,
|
||||||
|
result_path,
|
||||||
|
progress_path,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._download_processes[job.job_id] = process
|
||||||
|
self._job_temp_files[job.job_id] = {"payload": payload_path, "result": result_path, "progress": progress_path}
|
||||||
|
|
||||||
|
communicate_task = asyncio.create_task(process.communicate())
|
||||||
|
|
||||||
|
stdout_bytes = b""
|
||||||
|
stderr_bytes = b""
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
done, _ = await asyncio.wait({communicate_task}, timeout=0.5)
|
||||||
|
if communicate_task in done:
|
||||||
|
stdout_bytes, stderr_bytes = communicate_task.result()
|
||||||
|
break
|
||||||
|
|
||||||
|
# Check for progress updates
|
||||||
|
try:
|
||||||
|
if os.path.exists(progress_path):
|
||||||
|
with open(progress_path, "r", encoding="utf-8") as handle:
|
||||||
|
progress_data = json.load(handle)
|
||||||
|
if "progress" in progress_data:
|
||||||
|
new_progress = float(progress_data["progress"])
|
||||||
|
if new_progress != job.progress:
|
||||||
|
job.progress = new_progress
|
||||||
|
log.info(f"Job {job.job_id} progress updated: {job.progress}%")
|
||||||
|
except (FileNotFoundError, json.JSONDecodeError, ValueError) as e:
|
||||||
|
log.debug(f"Could not read progress for job {job.job_id}: {e}")
|
||||||
|
|
||||||
|
if job.cancel_event.is_set() or job.status == JobStatus.CANCELLED:
|
||||||
|
log.info(f"Cancellation detected for job {job.job_id}, terminating worker process")
|
||||||
|
process.terminate()
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(communicate_task, timeout=5)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
log.warning(f"Worker process for job {job.job_id} did not terminate, killing")
|
||||||
|
process.kill()
|
||||||
|
await asyncio.wait_for(communicate_task, timeout=5)
|
||||||
|
raise asyncio.CancelledError("Job was cancelled")
|
||||||
|
|
||||||
|
returncode = process.returncode
|
||||||
|
stdout = stdout_bytes.decode("utf-8", errors="ignore")
|
||||||
|
stderr = stderr_bytes.decode("utf-8", errors="ignore")
|
||||||
|
|
||||||
|
if stdout.strip():
|
||||||
|
log.debug(f"Worker stdout for job {job.job_id}: {stdout.strip()}")
|
||||||
|
if stderr.strip():
|
||||||
|
log.warning(f"Worker stderr for job {job.job_id}: {stderr.strip()}")
|
||||||
|
|
||||||
|
result_data: Optional[Dict[str, Any]] = None
|
||||||
|
try:
|
||||||
|
with open(result_path, "r", encoding="utf-8") as handle:
|
||||||
|
result_data = json.load(handle)
|
||||||
|
except FileNotFoundError:
|
||||||
|
log.error(f"Result file missing for job {job.job_id}")
|
||||||
|
except json.JSONDecodeError as exc:
|
||||||
|
log.error(f"Failed to parse worker result for job {job.job_id}: {exc}")
|
||||||
|
|
||||||
|
if returncode != 0:
|
||||||
|
message = result_data.get("message") if result_data else "unknown error"
|
||||||
|
raise Exception(f"Worker exited with code {returncode}: {message}")
|
||||||
|
|
||||||
|
if not result_data or result_data.get("status") != "success":
|
||||||
|
message = result_data.get("message") if result_data else "worker did not report success"
|
||||||
|
raise Exception(f"Worker failure: {message}")
|
||||||
|
|
||||||
|
return result_data.get("output_files", [])
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if not communicate_task.done():
|
||||||
|
communicate_task.cancel()
|
||||||
|
with suppress(asyncio.CancelledError):
|
||||||
|
await communicate_task
|
||||||
|
|
||||||
|
self._download_processes.pop(job.job_id, None)
|
||||||
|
|
||||||
|
temp_paths = self._job_temp_files.pop(job.job_id, {})
|
||||||
|
for path in temp_paths.values():
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _execute_download_sync(self, job: DownloadJob) -> List[str]:
|
||||||
|
"""Execute download synchronously using existing dl.py logic."""
|
||||||
|
return _perform_download(job.job_id, job.service, job.title_id, job.parameters.copy(), job.cancel_event)
|
||||||
|
|
||||||
|
async def _cleanup_worker(self):
|
||||||
|
"""Worker that periodically cleans up old jobs."""
|
||||||
|
while not self._shutdown_event.is_set():
|
||||||
|
try:
|
||||||
|
await asyncio.sleep(3600) # Run every hour
|
||||||
|
self.cleanup_old_jobs()
|
||||||
|
except Exception as e:
|
||||||
|
log.error(f"Cleanup worker error: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# Global instance
|
||||||
|
download_manager: Optional[DownloadQueueManager] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_download_manager() -> DownloadQueueManager:
|
||||||
|
"""Get the global download manager instance."""
|
||||||
|
global download_manager
|
||||||
|
if download_manager is None:
|
||||||
|
# Load configuration from unshackle config
|
||||||
|
from unshackle.core.config import config
|
||||||
|
|
||||||
|
max_concurrent = getattr(config, "max_concurrent_downloads", 2)
|
||||||
|
retention_hours = getattr(config, "download_job_retention_hours", 24)
|
||||||
|
|
||||||
|
download_manager = DownloadQueueManager(max_concurrent, retention_hours)
|
||||||
|
|
||||||
|
return download_manager
|
||||||
84
unshackle/core/api/download_worker.py
Normal file
84
unshackle/core/api/download_worker.py
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
"""Standalone worker process entry point for executing download jobs."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from .download_manager import _perform_download
|
||||||
|
|
||||||
|
log = logging.getLogger("download_worker")
|
||||||
|
|
||||||
|
|
||||||
|
def _read_payload(path: Path) -> Dict[str, Any]:
|
||||||
|
with path.open("r", encoding="utf-8") as handle:
|
||||||
|
return json.load(handle)
|
||||||
|
|
||||||
|
|
||||||
|
def _write_result(path: Path, payload: Dict[str, Any]) -> None:
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with path.open("w", encoding="utf-8") as handle:
|
||||||
|
json.dump(payload, handle)
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv: list[str]) -> int:
|
||||||
|
if len(argv) not in [3, 4]:
|
||||||
|
print(
|
||||||
|
"Usage: python -m unshackle.core.api.download_worker <payload_path> <result_path> [progress_path]",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
return 2
|
||||||
|
|
||||||
|
payload_path = Path(argv[1])
|
||||||
|
result_path = Path(argv[2])
|
||||||
|
progress_path = Path(argv[3]) if len(argv) > 3 else None
|
||||||
|
|
||||||
|
result: Dict[str, Any] = {}
|
||||||
|
exit_code = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = _read_payload(payload_path)
|
||||||
|
job_id = payload["job_id"]
|
||||||
|
service = payload["service"]
|
||||||
|
title_id = payload["title_id"]
|
||||||
|
params = payload.get("parameters", {})
|
||||||
|
|
||||||
|
log.info(f"Worker starting job {job_id} ({service}:{title_id})")
|
||||||
|
|
||||||
|
def progress_callback(progress_data: Dict[str, Any]) -> None:
|
||||||
|
"""Write progress updates to file for main process to read."""
|
||||||
|
if progress_path:
|
||||||
|
try:
|
||||||
|
log.info(f"Writing progress update: {progress_data}")
|
||||||
|
_write_result(progress_path, progress_data)
|
||||||
|
log.info(f"Progress update written to {progress_path}")
|
||||||
|
except Exception as e:
|
||||||
|
log.error(f"Failed to write progress update: {e}")
|
||||||
|
|
||||||
|
output_files = _perform_download(
|
||||||
|
job_id, service, title_id, params, cancel_event=None, progress_callback=progress_callback
|
||||||
|
)
|
||||||
|
|
||||||
|
result = {"status": "success", "output_files": output_files}
|
||||||
|
|
||||||
|
except Exception as exc: # noqa: BLE001 - capture for parent process
|
||||||
|
exit_code = 1
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
log.error(f"Worker failed with error: {exc}")
|
||||||
|
result = {"status": "error", "message": str(exc), "traceback": tb}
|
||||||
|
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
_write_result(result_path, result)
|
||||||
|
except Exception as exc: # noqa: BLE001 - last resort logging
|
||||||
|
log.error(f"Failed to write worker result file: {exc}")
|
||||||
|
|
||||||
|
return exit_code
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main(sys.argv))
|
||||||
652
unshackle/core/api/handlers.py
Normal file
652
unshackle/core/api/handlers.py
Normal file
@@ -0,0 +1,652 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from unshackle.core.constants import AUDIO_CODEC_MAP, DYNAMIC_RANGE_MAP, VIDEO_CODEC_MAP
|
||||||
|
from unshackle.core.proxies.basic import Basic
|
||||||
|
from unshackle.core.proxies.hola import Hola
|
||||||
|
from unshackle.core.proxies.nordvpn import NordVPN
|
||||||
|
from unshackle.core.proxies.surfsharkvpn import SurfsharkVPN
|
||||||
|
from unshackle.core.services import Services
|
||||||
|
from unshackle.core.titles import Episode, Movie, Title_T
|
||||||
|
from unshackle.core.tracks import Audio, Subtitle, Video
|
||||||
|
|
||||||
|
log = logging.getLogger("api")
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_proxy_providers() -> List[Any]:
|
||||||
|
"""Initialize and return available proxy providers."""
|
||||||
|
proxy_providers = []
|
||||||
|
try:
|
||||||
|
from unshackle.core import binaries
|
||||||
|
# Load the main unshackle config to get proxy provider settings
|
||||||
|
from unshackle.core.config import config as main_config
|
||||||
|
|
||||||
|
log.debug(f"Main config proxy providers: {getattr(main_config, 'proxy_providers', {})}")
|
||||||
|
log.debug(f"Available proxy provider configs: {list(getattr(main_config, 'proxy_providers', {}).keys())}")
|
||||||
|
|
||||||
|
# Use main_config instead of the service-specific config for proxy providers
|
||||||
|
proxy_config = getattr(main_config, "proxy_providers", {})
|
||||||
|
|
||||||
|
if proxy_config.get("basic"):
|
||||||
|
log.debug("Loading Basic proxy provider")
|
||||||
|
proxy_providers.append(Basic(**proxy_config["basic"]))
|
||||||
|
if proxy_config.get("nordvpn"):
|
||||||
|
log.debug("Loading NordVPN proxy provider")
|
||||||
|
proxy_providers.append(NordVPN(**proxy_config["nordvpn"]))
|
||||||
|
if proxy_config.get("surfsharkvpn"):
|
||||||
|
log.debug("Loading SurfsharkVPN proxy provider")
|
||||||
|
proxy_providers.append(SurfsharkVPN(**proxy_config["surfsharkvpn"]))
|
||||||
|
if hasattr(binaries, "HolaProxy") and binaries.HolaProxy:
|
||||||
|
log.debug("Loading Hola proxy provider")
|
||||||
|
proxy_providers.append(Hola())
|
||||||
|
|
||||||
|
for proxy_provider in proxy_providers:
|
||||||
|
log.info(f"Loaded {proxy_provider.__class__.__name__}: {proxy_provider}")
|
||||||
|
|
||||||
|
if not proxy_providers:
|
||||||
|
log.warning("No proxy providers were loaded. Check your proxy provider configuration in unshackle.yaml")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.warning(f"Failed to initialize some proxy providers: {e}")
|
||||||
|
|
||||||
|
return proxy_providers
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_proxy(proxy: str, proxy_providers: List[Any]) -> str:
|
||||||
|
"""Resolve proxy parameter to actual proxy URI."""
|
||||||
|
import re
|
||||||
|
|
||||||
|
if not proxy:
|
||||||
|
return proxy
|
||||||
|
|
||||||
|
# Check if explicit proxy URI
|
||||||
|
if re.match(r"^https?://", proxy):
|
||||||
|
return proxy
|
||||||
|
|
||||||
|
# Handle provider:country format (e.g., "nordvpn:us")
|
||||||
|
requested_provider = None
|
||||||
|
if re.match(r"^[a-z]+:.+$", proxy, re.IGNORECASE):
|
||||||
|
requested_provider, proxy = proxy.split(":", maxsplit=1)
|
||||||
|
|
||||||
|
# Handle country code format (e.g., "us", "uk")
|
||||||
|
if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE):
|
||||||
|
proxy = proxy.lower()
|
||||||
|
|
||||||
|
if requested_provider:
|
||||||
|
# Find specific provider (case-insensitive matching)
|
||||||
|
proxy_provider = next(
|
||||||
|
(x for x in proxy_providers if x.__class__.__name__.lower() == requested_provider.lower()),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if not proxy_provider:
|
||||||
|
available_providers = [x.__class__.__name__ for x in proxy_providers]
|
||||||
|
raise ValueError(
|
||||||
|
f"The proxy provider '{requested_provider}' was not recognized. Available providers: {available_providers}"
|
||||||
|
)
|
||||||
|
|
||||||
|
proxy_uri = proxy_provider.get_proxy(proxy)
|
||||||
|
if not proxy_uri:
|
||||||
|
raise ValueError(f"The proxy provider {requested_provider} had no proxy for {proxy}")
|
||||||
|
|
||||||
|
log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy_uri}")
|
||||||
|
return proxy_uri
|
||||||
|
else:
|
||||||
|
# Try all providers
|
||||||
|
for proxy_provider in proxy_providers:
|
||||||
|
proxy_uri = proxy_provider.get_proxy(proxy)
|
||||||
|
if proxy_uri:
|
||||||
|
log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy_uri}")
|
||||||
|
return proxy_uri
|
||||||
|
|
||||||
|
raise ValueError(f"No proxy provider had a proxy for {proxy}")
|
||||||
|
|
||||||
|
# Return as-is if not recognized format
|
||||||
|
log.info(f"Using explicit Proxy: {proxy}")
|
||||||
|
return proxy
|
||||||
|
|
||||||
|
|
||||||
|
def validate_service(service_tag: str) -> Optional[str]:
|
||||||
|
"""Validate and normalize service tag."""
|
||||||
|
try:
|
||||||
|
normalized = Services.get_tag(service_tag)
|
||||||
|
service_path = Services.get_path(normalized)
|
||||||
|
if not service_path.exists():
|
||||||
|
return None
|
||||||
|
return normalized
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def serialize_title(title: Title_T) -> Dict[str, Any]:
|
||||||
|
"""Convert a title object to JSON-serializable dict."""
|
||||||
|
if isinstance(title, Episode):
|
||||||
|
episode_name = title.name if title.name else f"Episode {title.number:02d}"
|
||||||
|
result = {
|
||||||
|
"type": "episode",
|
||||||
|
"name": episode_name,
|
||||||
|
"series_title": str(title.title),
|
||||||
|
"season": title.season,
|
||||||
|
"number": title.number,
|
||||||
|
"year": title.year,
|
||||||
|
"id": str(title.id) if hasattr(title, "id") else None,
|
||||||
|
}
|
||||||
|
elif isinstance(title, Movie):
|
||||||
|
result = {
|
||||||
|
"type": "movie",
|
||||||
|
"name": str(title.name) if hasattr(title, "name") else str(title),
|
||||||
|
"year": title.year,
|
||||||
|
"id": str(title.id) if hasattr(title, "id") else None,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
result = {
|
||||||
|
"type": "other",
|
||||||
|
"name": str(title.name) if hasattr(title, "name") else str(title),
|
||||||
|
"id": str(title.id) if hasattr(title, "id") else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def serialize_video_track(track: Video) -> Dict[str, Any]:
|
||||||
|
"""Convert video track to JSON-serializable dict."""
|
||||||
|
codec_name = track.codec.name if hasattr(track.codec, "name") else str(track.codec)
|
||||||
|
range_name = track.range.name if hasattr(track.range, "name") else str(track.range)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": str(track.id),
|
||||||
|
"codec": codec_name,
|
||||||
|
"codec_display": VIDEO_CODEC_MAP.get(codec_name, codec_name),
|
||||||
|
"bitrate": int(track.bitrate / 1000) if track.bitrate else None,
|
||||||
|
"width": track.width,
|
||||||
|
"height": track.height,
|
||||||
|
"resolution": f"{track.width}x{track.height}" if track.width and track.height else None,
|
||||||
|
"fps": track.fps if track.fps else None,
|
||||||
|
"range": range_name,
|
||||||
|
"range_display": DYNAMIC_RANGE_MAP.get(range_name, range_name),
|
||||||
|
"language": str(track.language) if track.language else None,
|
||||||
|
"drm": str(track.drm) if hasattr(track, "drm") and track.drm else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def serialize_audio_track(track: Audio) -> Dict[str, Any]:
|
||||||
|
"""Convert audio track to JSON-serializable dict."""
|
||||||
|
codec_name = track.codec.name if hasattr(track.codec, "name") else str(track.codec)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": str(track.id),
|
||||||
|
"codec": codec_name,
|
||||||
|
"codec_display": AUDIO_CODEC_MAP.get(codec_name, codec_name),
|
||||||
|
"bitrate": int(track.bitrate / 1000) if track.bitrate else None,
|
||||||
|
"channels": track.channels if track.channels else None,
|
||||||
|
"language": str(track.language) if track.language else None,
|
||||||
|
"atmos": track.atmos if hasattr(track, "atmos") else False,
|
||||||
|
"descriptive": track.descriptive if hasattr(track, "descriptive") else False,
|
||||||
|
"drm": str(track.drm) if hasattr(track, "drm") and track.drm else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def serialize_subtitle_track(track: Subtitle) -> Dict[str, Any]:
|
||||||
|
"""Convert subtitle track to JSON-serializable dict."""
|
||||||
|
return {
|
||||||
|
"id": str(track.id),
|
||||||
|
"codec": track.codec.name if hasattr(track.codec, "name") else str(track.codec),
|
||||||
|
"language": str(track.language) if track.language else None,
|
||||||
|
"forced": track.forced if hasattr(track, "forced") else False,
|
||||||
|
"sdh": track.sdh if hasattr(track, "sdh") else False,
|
||||||
|
"cc": track.cc if hasattr(track, "cc") else False,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def list_titles_handler(data: Dict[str, Any]) -> web.Response:
|
||||||
|
"""Handle list-titles request."""
|
||||||
|
service_tag = data.get("service")
|
||||||
|
title_id = data.get("title_id")
|
||||||
|
profile = data.get("profile")
|
||||||
|
|
||||||
|
if not service_tag:
|
||||||
|
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||||
|
|
||||||
|
if not title_id:
|
||||||
|
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||||
|
|
||||||
|
normalized_service = validate_service(service_tag)
|
||||||
|
if not normalized_service:
|
||||||
|
return web.json_response(
|
||||||
|
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
import click
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from unshackle.commands.dl import dl
|
||||||
|
from unshackle.core.config import config
|
||||||
|
from unshackle.core.utils.click_types import ContextData
|
||||||
|
from unshackle.core.utils.collections import merge_dict
|
||||||
|
|
||||||
|
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||||
|
if service_config_path.exists():
|
||||||
|
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||||
|
else:
|
||||||
|
service_config = {}
|
||||||
|
merge_dict(config.services.get(normalized_service), service_config)
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.pass_context
|
||||||
|
def dummy_service(ctx: click.Context) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Handle proxy configuration
|
||||||
|
proxy_param = data.get("proxy")
|
||||||
|
no_proxy = data.get("no_proxy", False)
|
||||||
|
proxy_providers = []
|
||||||
|
|
||||||
|
if not no_proxy:
|
||||||
|
proxy_providers = initialize_proxy_providers()
|
||||||
|
|
||||||
|
if proxy_param and not no_proxy:
|
||||||
|
try:
|
||||||
|
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||||
|
proxy_param = resolved_proxy
|
||||||
|
except ValueError as e:
|
||||||
|
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||||
|
|
||||||
|
ctx = click.Context(dummy_service)
|
||||||
|
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||||
|
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||||
|
|
||||||
|
service_module = Services.load(normalized_service)
|
||||||
|
|
||||||
|
dummy_service.name = normalized_service
|
||||||
|
dummy_service.params = [click.Argument([title_id], type=str)]
|
||||||
|
ctx.invoked_subcommand = normalized_service
|
||||||
|
|
||||||
|
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||||
|
service_ctx.obj = ctx.obj
|
||||||
|
|
||||||
|
service_kwargs = {"title": title_id}
|
||||||
|
|
||||||
|
# Add additional parameters from request data
|
||||||
|
for key, value in data.items():
|
||||||
|
if key not in ["service", "title_id", "profile", "season", "episode", "wanted", "proxy", "no_proxy"]:
|
||||||
|
service_kwargs[key] = value
|
||||||
|
|
||||||
|
# Get service parameter info and click command defaults
|
||||||
|
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||||
|
|
||||||
|
# Extract default values from the click command
|
||||||
|
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||||
|
for param in service_module.cli.params:
|
||||||
|
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||||
|
# Add default value if parameter is not already provided
|
||||||
|
if hasattr(param, "default") and param.default is not None:
|
||||||
|
service_kwargs[param.name] = param.default
|
||||||
|
|
||||||
|
# Handle required parameters that don't have click defaults
|
||||||
|
for param_name, param_info in service_init_params.items():
|
||||||
|
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||||
|
# Check if parameter is required (no default value in signature)
|
||||||
|
if param_info.default is inspect.Parameter.empty:
|
||||||
|
# Provide sensible defaults for common required parameters
|
||||||
|
if param_name == "meta_lang":
|
||||||
|
service_kwargs[param_name] = None
|
||||||
|
elif param_name == "movie":
|
||||||
|
service_kwargs[param_name] = False
|
||||||
|
else:
|
||||||
|
# Log warning for unknown required parameters
|
||||||
|
log.warning(f"Unknown required parameter '{param_name}' for service {normalized_service}")
|
||||||
|
|
||||||
|
# Filter out any parameters that the service doesn't accept
|
||||||
|
filtered_kwargs = {}
|
||||||
|
for key, value in service_kwargs.items():
|
||||||
|
if key in service_init_params:
|
||||||
|
filtered_kwargs[key] = value
|
||||||
|
|
||||||
|
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||||
|
|
||||||
|
cookies = dl.get_cookie_jar(normalized_service, profile)
|
||||||
|
credential = dl.get_credentials(normalized_service, profile)
|
||||||
|
service_instance.authenticate(cookies, credential)
|
||||||
|
|
||||||
|
titles = service_instance.get_titles()
|
||||||
|
|
||||||
|
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||||
|
title_list = [serialize_title(t) for t in titles]
|
||||||
|
else:
|
||||||
|
title_list = [serialize_title(titles)]
|
||||||
|
|
||||||
|
return web.json_response({"titles": title_list})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.exception("Error listing titles")
|
||||||
|
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_tracks_handler(data: Dict[str, Any]) -> web.Response:
|
||||||
|
"""Handle list-tracks request."""
|
||||||
|
service_tag = data.get("service")
|
||||||
|
title_id = data.get("title_id")
|
||||||
|
profile = data.get("profile")
|
||||||
|
|
||||||
|
if not service_tag:
|
||||||
|
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||||
|
|
||||||
|
if not title_id:
|
||||||
|
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||||
|
|
||||||
|
normalized_service = validate_service(service_tag)
|
||||||
|
if not normalized_service:
|
||||||
|
return web.json_response(
|
||||||
|
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
import click
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from unshackle.commands.dl import dl
|
||||||
|
from unshackle.core.config import config
|
||||||
|
from unshackle.core.utils.click_types import ContextData
|
||||||
|
from unshackle.core.utils.collections import merge_dict
|
||||||
|
|
||||||
|
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||||
|
if service_config_path.exists():
|
||||||
|
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||||
|
else:
|
||||||
|
service_config = {}
|
||||||
|
merge_dict(config.services.get(normalized_service), service_config)
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.pass_context
|
||||||
|
def dummy_service(ctx: click.Context) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Handle proxy configuration
|
||||||
|
proxy_param = data.get("proxy")
|
||||||
|
no_proxy = data.get("no_proxy", False)
|
||||||
|
proxy_providers = []
|
||||||
|
|
||||||
|
if not no_proxy:
|
||||||
|
proxy_providers = initialize_proxy_providers()
|
||||||
|
|
||||||
|
if proxy_param and not no_proxy:
|
||||||
|
try:
|
||||||
|
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||||
|
proxy_param = resolved_proxy
|
||||||
|
except ValueError as e:
|
||||||
|
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||||
|
|
||||||
|
ctx = click.Context(dummy_service)
|
||||||
|
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||||
|
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||||
|
|
||||||
|
service_module = Services.load(normalized_service)
|
||||||
|
|
||||||
|
dummy_service.name = normalized_service
|
||||||
|
dummy_service.params = [click.Argument([title_id], type=str)]
|
||||||
|
ctx.invoked_subcommand = normalized_service
|
||||||
|
|
||||||
|
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||||
|
service_ctx.obj = ctx.obj
|
||||||
|
|
||||||
|
service_kwargs = {"title": title_id}
|
||||||
|
|
||||||
|
# Add additional parameters from request data
|
||||||
|
for key, value in data.items():
|
||||||
|
if key not in ["service", "title_id", "profile", "season", "episode", "wanted", "proxy", "no_proxy"]:
|
||||||
|
service_kwargs[key] = value
|
||||||
|
|
||||||
|
# Get service parameter info and click command defaults
|
||||||
|
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||||
|
|
||||||
|
# Extract default values from the click command
|
||||||
|
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||||
|
for param in service_module.cli.params:
|
||||||
|
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||||
|
# Add default value if parameter is not already provided
|
||||||
|
if hasattr(param, "default") and param.default is not None:
|
||||||
|
service_kwargs[param.name] = param.default
|
||||||
|
|
||||||
|
# Handle required parameters that don't have click defaults
|
||||||
|
for param_name, param_info in service_init_params.items():
|
||||||
|
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||||
|
# Check if parameter is required (no default value in signature)
|
||||||
|
if param_info.default is inspect.Parameter.empty:
|
||||||
|
# Provide sensible defaults for common required parameters
|
||||||
|
if param_name == "meta_lang":
|
||||||
|
service_kwargs[param_name] = None
|
||||||
|
elif param_name == "movie":
|
||||||
|
service_kwargs[param_name] = False
|
||||||
|
else:
|
||||||
|
# Log warning for unknown required parameters
|
||||||
|
log.warning(f"Unknown required parameter '{param_name}' for service {normalized_service}")
|
||||||
|
|
||||||
|
# Filter out any parameters that the service doesn't accept
|
||||||
|
filtered_kwargs = {}
|
||||||
|
for key, value in service_kwargs.items():
|
||||||
|
if key in service_init_params:
|
||||||
|
filtered_kwargs[key] = value
|
||||||
|
|
||||||
|
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||||
|
|
||||||
|
cookies = dl.get_cookie_jar(normalized_service, profile)
|
||||||
|
credential = dl.get_credentials(normalized_service, profile)
|
||||||
|
service_instance.authenticate(cookies, credential)
|
||||||
|
|
||||||
|
titles = service_instance.get_titles()
|
||||||
|
|
||||||
|
wanted_param = data.get("wanted")
|
||||||
|
season = data.get("season")
|
||||||
|
episode = data.get("episode")
|
||||||
|
|
||||||
|
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||||
|
titles_list = list(titles)
|
||||||
|
|
||||||
|
wanted = None
|
||||||
|
if wanted_param:
|
||||||
|
from unshackle.core.utils.click_types import SeasonRange
|
||||||
|
|
||||||
|
try:
|
||||||
|
season_range = SeasonRange()
|
||||||
|
wanted = season_range.parse_tokens(wanted_param)
|
||||||
|
log.debug(f"Parsed wanted '{wanted_param}' into {len(wanted)} episodes: {wanted[:10]}...")
|
||||||
|
except Exception as e:
|
||||||
|
return web.json_response(
|
||||||
|
{"status": "error", "message": f"Invalid wanted parameter: {e}"}, status=400
|
||||||
|
)
|
||||||
|
elif season is not None and episode is not None:
|
||||||
|
wanted = [f"{season}x{episode}"]
|
||||||
|
|
||||||
|
if wanted:
|
||||||
|
# Filter titles based on wanted episodes, similar to how dl.py does it
|
||||||
|
matching_titles = []
|
||||||
|
log.debug(f"Filtering {len(titles_list)} titles with {len(wanted)} wanted episodes")
|
||||||
|
for title in titles_list:
|
||||||
|
if isinstance(title, Episode):
|
||||||
|
episode_key = f"{title.season}x{title.number}"
|
||||||
|
if episode_key in wanted:
|
||||||
|
log.debug(f"Episode {episode_key} matches wanted list")
|
||||||
|
matching_titles.append(title)
|
||||||
|
else:
|
||||||
|
log.debug(f"Episode {episode_key} not in wanted list")
|
||||||
|
else:
|
||||||
|
matching_titles.append(title)
|
||||||
|
|
||||||
|
log.debug(f"Found {len(matching_titles)} matching titles")
|
||||||
|
|
||||||
|
if not matching_titles:
|
||||||
|
return web.json_response(
|
||||||
|
{"status": "error", "message": "No episodes found matching wanted criteria"}, status=404
|
||||||
|
)
|
||||||
|
|
||||||
|
# If multiple episodes match, return tracks for all episodes
|
||||||
|
if len(matching_titles) > 1 and all(isinstance(t, Episode) for t in matching_titles):
|
||||||
|
episodes_data = []
|
||||||
|
failed_episodes = []
|
||||||
|
|
||||||
|
# Sort matching titles by season and episode number for consistent ordering
|
||||||
|
sorted_titles = sorted(matching_titles, key=lambda t: (t.season, t.number))
|
||||||
|
|
||||||
|
for title in sorted_titles:
|
||||||
|
try:
|
||||||
|
tracks = service_instance.get_tracks(title)
|
||||||
|
video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True)
|
||||||
|
audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True)
|
||||||
|
|
||||||
|
episode_data = {
|
||||||
|
"title": serialize_title(title),
|
||||||
|
"video": [serialize_video_track(t) for t in video_tracks],
|
||||||
|
"audio": [serialize_audio_track(t) for t in audio_tracks],
|
||||||
|
"subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles],
|
||||||
|
}
|
||||||
|
episodes_data.append(episode_data)
|
||||||
|
log.debug(f"Successfully got tracks for {title.season}x{title.number}")
|
||||||
|
except SystemExit:
|
||||||
|
# Service calls sys.exit() for unavailable episodes - catch and skip
|
||||||
|
failed_episodes.append(f"S{title.season}E{title.number:02d}")
|
||||||
|
log.debug(f"Episode {title.season}x{title.number} not available, skipping")
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
# Handle other errors gracefully
|
||||||
|
failed_episodes.append(f"S{title.season}E{title.number:02d}")
|
||||||
|
log.debug(f"Error getting tracks for {title.season}x{title.number}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if episodes_data:
|
||||||
|
response = {"episodes": episodes_data}
|
||||||
|
if failed_episodes:
|
||||||
|
response["unavailable_episodes"] = failed_episodes
|
||||||
|
return web.json_response(response)
|
||||||
|
else:
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"message": f"No available episodes found. Unavailable: {', '.join(failed_episodes)}",
|
||||||
|
},
|
||||||
|
status=404,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Single episode or movie
|
||||||
|
first_title = matching_titles[0]
|
||||||
|
else:
|
||||||
|
first_title = titles_list[0]
|
||||||
|
else:
|
||||||
|
first_title = titles
|
||||||
|
|
||||||
|
tracks = service_instance.get_tracks(first_title)
|
||||||
|
|
||||||
|
video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True)
|
||||||
|
audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True)
|
||||||
|
|
||||||
|
response = {
|
||||||
|
"title": serialize_title(first_title),
|
||||||
|
"video": [serialize_video_track(t) for t in video_tracks],
|
||||||
|
"audio": [serialize_audio_track(t) for t in audio_tracks],
|
||||||
|
"subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles],
|
||||||
|
}
|
||||||
|
|
||||||
|
return web.json_response(response)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.exception("Error listing tracks")
|
||||||
|
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
async def download_handler(data: Dict[str, Any]) -> web.Response:
|
||||||
|
"""Handle download request - create and queue a download job."""
|
||||||
|
from unshackle.core.api.download_manager import get_download_manager
|
||||||
|
|
||||||
|
service_tag = data.get("service")
|
||||||
|
title_id = data.get("title_id")
|
||||||
|
|
||||||
|
if not service_tag:
|
||||||
|
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||||
|
|
||||||
|
if not title_id:
|
||||||
|
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||||
|
|
||||||
|
normalized_service = validate_service(service_tag)
|
||||||
|
if not normalized_service:
|
||||||
|
return web.json_response(
|
||||||
|
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get download manager and start workers if needed
|
||||||
|
manager = get_download_manager()
|
||||||
|
await manager.start_workers()
|
||||||
|
|
||||||
|
# Create download job with filtered parameters (exclude service and title_id as they're already passed)
|
||||||
|
filtered_params = {k: v for k, v in data.items() if k not in ["service", "title_id"]}
|
||||||
|
job = manager.create_job(normalized_service, title_id, **filtered_params)
|
||||||
|
|
||||||
|
return web.json_response(
|
||||||
|
{"job_id": job.job_id, "status": job.status.value, "created_time": job.created_time.isoformat()}, status=202
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.exception("Error creating download job")
|
||||||
|
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_download_jobs_handler(data: Dict[str, Any]) -> web.Response:
|
||||||
|
"""Handle list download jobs request."""
|
||||||
|
from unshackle.core.api.download_manager import get_download_manager
|
||||||
|
|
||||||
|
try:
|
||||||
|
manager = get_download_manager()
|
||||||
|
jobs = manager.list_jobs()
|
||||||
|
|
||||||
|
job_list = [job.to_dict(include_full_details=False) for job in jobs]
|
||||||
|
|
||||||
|
return web.json_response({"jobs": job_list})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.exception("Error listing download jobs")
|
||||||
|
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_download_job_handler(job_id: str) -> web.Response:
|
||||||
|
"""Handle get specific download job request."""
|
||||||
|
from unshackle.core.api.download_manager import get_download_manager
|
||||||
|
|
||||||
|
try:
|
||||||
|
manager = get_download_manager()
|
||||||
|
job = manager.get_job(job_id)
|
||||||
|
|
||||||
|
if not job:
|
||||||
|
return web.json_response({"status": "error", "message": "Job not found"}, status=404)
|
||||||
|
|
||||||
|
return web.json_response(job.to_dict(include_full_details=True))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(f"Error getting download job {job_id}")
|
||||||
|
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
async def cancel_download_job_handler(job_id: str) -> web.Response:
|
||||||
|
"""Handle cancel download job request."""
|
||||||
|
from unshackle.core.api.download_manager import get_download_manager
|
||||||
|
|
||||||
|
try:
|
||||||
|
manager = get_download_manager()
|
||||||
|
|
||||||
|
if not manager.get_job(job_id):
|
||||||
|
return web.json_response({"status": "error", "message": "Job not found"}, status=404)
|
||||||
|
|
||||||
|
success = manager.cancel_job(job_id)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
return web.json_response({"status": "success", "message": "Job cancelled"})
|
||||||
|
else:
|
||||||
|
return web.json_response({"status": "error", "message": "Job cannot be cancelled"}, status=400)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(f"Error cancelling download job {job_id}")
|
||||||
|
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||||
388
unshackle/core/api/routes.py
Normal file
388
unshackle/core/api/routes.py
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
from aiohttp_swagger3 import SwaggerDocs, SwaggerInfo, SwaggerUiSettings
|
||||||
|
|
||||||
|
from unshackle.core import __version__
|
||||||
|
from unshackle.core.api.handlers import (cancel_download_job_handler, download_handler, get_download_job_handler,
|
||||||
|
list_download_jobs_handler, list_titles_handler, list_tracks_handler)
|
||||||
|
from unshackle.core.services import Services
|
||||||
|
from unshackle.core.update_checker import UpdateChecker
|
||||||
|
|
||||||
|
|
||||||
|
@web.middleware
|
||||||
|
async def cors_middleware(request: web.Request, handler):
|
||||||
|
"""Add CORS headers to all responses."""
|
||||||
|
# Handle preflight requests
|
||||||
|
if request.method == "OPTIONS":
|
||||||
|
response = web.Response()
|
||||||
|
else:
|
||||||
|
response = await handler(request)
|
||||||
|
|
||||||
|
# Add CORS headers
|
||||||
|
response.headers["Access-Control-Allow-Origin"] = "*"
|
||||||
|
response.headers["Access-Control-Allow-Methods"] = "GET, POST, PUT, DELETE, OPTIONS"
|
||||||
|
response.headers["Access-Control-Allow-Headers"] = "Content-Type, X-API-Key, Authorization"
|
||||||
|
response.headers["Access-Control-Max-Age"] = "3600"
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
log = logging.getLogger("api")
|
||||||
|
|
||||||
|
|
||||||
|
async def health(request: web.Request) -> web.Response:
|
||||||
|
"""
|
||||||
|
Health check endpoint.
|
||||||
|
---
|
||||||
|
summary: Health check
|
||||||
|
description: Get server health status, version info, and update availability
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Health status
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
example: ok
|
||||||
|
version:
|
||||||
|
type: string
|
||||||
|
example: "2.0.0"
|
||||||
|
update_check:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
update_available:
|
||||||
|
type: boolean
|
||||||
|
nullable: true
|
||||||
|
current_version:
|
||||||
|
type: string
|
||||||
|
latest_version:
|
||||||
|
type: string
|
||||||
|
nullable: true
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
latest_version = await UpdateChecker.check_for_updates(__version__)
|
||||||
|
update_info = {
|
||||||
|
"update_available": latest_version is not None,
|
||||||
|
"current_version": __version__,
|
||||||
|
"latest_version": latest_version,
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
log.warning(f"Failed to check for updates: {e}")
|
||||||
|
update_info = {"update_available": None, "current_version": __version__, "latest_version": None}
|
||||||
|
|
||||||
|
return web.json_response({"status": "ok", "version": __version__, "update_check": update_info})
|
||||||
|
|
||||||
|
|
||||||
|
async def services(request: web.Request) -> web.Response:
|
||||||
|
"""
|
||||||
|
List available services.
|
||||||
|
---
|
||||||
|
summary: List services
|
||||||
|
description: Get all available streaming services with their details
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: List of services
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
services:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
tag:
|
||||||
|
type: string
|
||||||
|
aliases:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
geofence:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
title_regex:
|
||||||
|
type: string
|
||||||
|
nullable: true
|
||||||
|
help:
|
||||||
|
type: string
|
||||||
|
nullable: true
|
||||||
|
'500':
|
||||||
|
description: Server error
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
service_tags = Services.get_tags()
|
||||||
|
services_info = []
|
||||||
|
|
||||||
|
for tag in service_tags:
|
||||||
|
service_data = {"tag": tag, "aliases": [], "geofence": [], "title_regex": None, "help": None}
|
||||||
|
|
||||||
|
try:
|
||||||
|
service_module = Services.load(tag)
|
||||||
|
|
||||||
|
if hasattr(service_module, "ALIASES"):
|
||||||
|
service_data["aliases"] = list(service_module.ALIASES)
|
||||||
|
|
||||||
|
if hasattr(service_module, "GEOFENCE"):
|
||||||
|
service_data["geofence"] = list(service_module.GEOFENCE)
|
||||||
|
|
||||||
|
if hasattr(service_module, "TITLE_RE"):
|
||||||
|
service_data["title_regex"] = service_module.TITLE_RE
|
||||||
|
|
||||||
|
if service_module.__doc__:
|
||||||
|
service_data["help"] = service_module.__doc__.strip()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.warning(f"Could not load details for service {tag}: {e}")
|
||||||
|
|
||||||
|
services_info.append(service_data)
|
||||||
|
|
||||||
|
return web.json_response({"services": services_info})
|
||||||
|
except Exception as e:
|
||||||
|
log.exception("Error listing services")
|
||||||
|
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_titles(request: web.Request) -> web.Response:
|
||||||
|
"""
|
||||||
|
List titles for a service and title ID.
|
||||||
|
---
|
||||||
|
summary: List titles
|
||||||
|
description: Get available titles for a service and title ID
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- service
|
||||||
|
- title_id
|
||||||
|
properties:
|
||||||
|
service:
|
||||||
|
type: string
|
||||||
|
description: Service tag
|
||||||
|
title_id:
|
||||||
|
type: string
|
||||||
|
description: Title identifier
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: List of titles
|
||||||
|
'400':
|
||||||
|
description: Invalid request
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
except Exception:
|
||||||
|
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||||
|
|
||||||
|
return await list_titles_handler(data)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_tracks(request: web.Request) -> web.Response:
|
||||||
|
"""
|
||||||
|
List tracks for a title, separated by type.
|
||||||
|
---
|
||||||
|
summary: List tracks
|
||||||
|
description: Get available video, audio, and subtitle tracks for a title
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- service
|
||||||
|
- title_id
|
||||||
|
properties:
|
||||||
|
service:
|
||||||
|
type: string
|
||||||
|
description: Service tag
|
||||||
|
title_id:
|
||||||
|
type: string
|
||||||
|
description: Title identifier
|
||||||
|
wanted:
|
||||||
|
type: string
|
||||||
|
description: Specific episode/season (optional)
|
||||||
|
proxy:
|
||||||
|
type: string
|
||||||
|
description: Proxy configuration (optional)
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Track information
|
||||||
|
'400':
|
||||||
|
description: Invalid request
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
except Exception:
|
||||||
|
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||||
|
|
||||||
|
return await list_tracks_handler(data)
|
||||||
|
|
||||||
|
|
||||||
|
async def download(request: web.Request) -> web.Response:
|
||||||
|
"""
|
||||||
|
Download content based on provided parameters.
|
||||||
|
---
|
||||||
|
summary: Download content
|
||||||
|
description: Download video content based on specified parameters
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- service
|
||||||
|
- title_id
|
||||||
|
properties:
|
||||||
|
service:
|
||||||
|
type: string
|
||||||
|
description: Service tag
|
||||||
|
title_id:
|
||||||
|
type: string
|
||||||
|
description: Title identifier
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Download started
|
||||||
|
'400':
|
||||||
|
description: Invalid request
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
data = await request.json()
|
||||||
|
except Exception:
|
||||||
|
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||||
|
|
||||||
|
return await download_handler(data)
|
||||||
|
|
||||||
|
|
||||||
|
async def download_jobs(request: web.Request) -> web.Response:
|
||||||
|
"""
|
||||||
|
List all download jobs.
|
||||||
|
---
|
||||||
|
summary: List download jobs
|
||||||
|
description: Get list of all download jobs with their status
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: List of download jobs
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
jobs:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
job_id:
|
||||||
|
type: string
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
created_time:
|
||||||
|
type: string
|
||||||
|
service:
|
||||||
|
type: string
|
||||||
|
title_id:
|
||||||
|
type: string
|
||||||
|
progress:
|
||||||
|
type: number
|
||||||
|
'500':
|
||||||
|
description: Server error
|
||||||
|
"""
|
||||||
|
return await list_download_jobs_handler({})
|
||||||
|
|
||||||
|
|
||||||
|
async def download_job_detail(request: web.Request) -> web.Response:
|
||||||
|
"""
|
||||||
|
Get download job details.
|
||||||
|
---
|
||||||
|
summary: Get download job
|
||||||
|
description: Get detailed information about a specific download job
|
||||||
|
parameters:
|
||||||
|
- name: job_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Download job details
|
||||||
|
'404':
|
||||||
|
description: Job not found
|
||||||
|
'500':
|
||||||
|
description: Server error
|
||||||
|
"""
|
||||||
|
job_id = request.match_info["job_id"]
|
||||||
|
return await get_download_job_handler(job_id)
|
||||||
|
|
||||||
|
|
||||||
|
async def cancel_download_job(request: web.Request) -> web.Response:
|
||||||
|
"""
|
||||||
|
Cancel download job.
|
||||||
|
---
|
||||||
|
summary: Cancel download job
|
||||||
|
description: Cancel a queued or running download job
|
||||||
|
parameters:
|
||||||
|
- name: job_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Job cancelled successfully
|
||||||
|
'400':
|
||||||
|
description: Job cannot be cancelled
|
||||||
|
'404':
|
||||||
|
description: Job not found
|
||||||
|
'500':
|
||||||
|
description: Server error
|
||||||
|
"""
|
||||||
|
job_id = request.match_info["job_id"]
|
||||||
|
return await cancel_download_job_handler(job_id)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_routes(app: web.Application) -> None:
|
||||||
|
"""Setup all API routes."""
|
||||||
|
app.router.add_get("/api/health", health)
|
||||||
|
app.router.add_get("/api/services", services)
|
||||||
|
app.router.add_post("/api/list-titles", list_titles)
|
||||||
|
app.router.add_post("/api/list-tracks", list_tracks)
|
||||||
|
app.router.add_post("/api/download", download)
|
||||||
|
app.router.add_get("/api/download/jobs", download_jobs)
|
||||||
|
app.router.add_get("/api/download/jobs/{job_id}", download_job_detail)
|
||||||
|
app.router.add_delete("/api/download/jobs/{job_id}", cancel_download_job)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_swagger(app: web.Application) -> None:
|
||||||
|
"""Setup Swagger UI documentation."""
|
||||||
|
swagger = SwaggerDocs(
|
||||||
|
app,
|
||||||
|
swagger_ui_settings=SwaggerUiSettings(path="/api/docs/"),
|
||||||
|
info=SwaggerInfo(
|
||||||
|
title="Unshackle REST API",
|
||||||
|
version=__version__,
|
||||||
|
description="REST API for Unshackle - Modular Movie, TV, and Music Archival Software",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add routes with OpenAPI documentation
|
||||||
|
swagger.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/api/health", health),
|
||||||
|
web.get("/api/services", services),
|
||||||
|
web.post("/api/list-titles", list_titles),
|
||||||
|
web.post("/api/list-tracks", list_tracks),
|
||||||
|
web.post("/api/download", download),
|
||||||
|
web.get("/api/download/jobs", download_jobs),
|
||||||
|
web.get("/api/download/jobs/{job_id}", download_job_detail),
|
||||||
|
web.delete("/api/download/jobs/{job_id}", cancel_download_job),
|
||||||
|
]
|
||||||
|
)
|
||||||
@@ -8,22 +8,23 @@ __shaka_platform = {"win32": "win", "darwin": "osx"}.get(sys.platform, sys.platf
|
|||||||
|
|
||||||
def find(*names: str) -> Optional[Path]:
|
def find(*names: str) -> Optional[Path]:
|
||||||
"""Find the path of the first found binary name."""
|
"""Find the path of the first found binary name."""
|
||||||
# Get the directory containing this file to find the local binaries folder
|
current_dir = Path(__file__).resolve().parent.parent
|
||||||
current_dir = Path(__file__).parent.parent
|
|
||||||
local_binaries_dir = current_dir / "binaries"
|
local_binaries_dir = current_dir / "binaries"
|
||||||
|
|
||||||
for name in names:
|
ext = ".exe" if sys.platform == "win32" else ""
|
||||||
# First check local binaries folder
|
|
||||||
if local_binaries_dir.exists():
|
|
||||||
local_path = local_binaries_dir / name
|
|
||||||
if local_path.is_file() and local_path.stat().st_mode & 0o111: # Check if executable
|
|
||||||
return local_path
|
|
||||||
|
|
||||||
# Also check with .exe extension on Windows
|
for name in names:
|
||||||
if sys.platform == "win32":
|
if local_binaries_dir.exists():
|
||||||
local_path_exe = local_binaries_dir / f"{name}.exe"
|
candidate_paths = [
|
||||||
if local_path_exe.is_file():
|
local_binaries_dir / f"{name}{ext}",
|
||||||
return local_path_exe
|
local_binaries_dir / name / f"{name}{ext}"
|
||||||
|
]
|
||||||
|
|
||||||
|
for path in candidate_paths:
|
||||||
|
if path.is_file():
|
||||||
|
# On Unix-like systems, check if file is executable
|
||||||
|
if sys.platform == "win32" or (path.stat().st_mode & 0o111):
|
||||||
|
return path
|
||||||
|
|
||||||
# Fall back to system PATH
|
# Fall back to system PATH
|
||||||
path = shutil.which(name)
|
path = shutil.which(name)
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
from .custom_remote_cdm import CustomRemoteCDM
|
||||||
from .decrypt_labs_remote_cdm import DecryptLabsRemoteCDM
|
from .decrypt_labs_remote_cdm import DecryptLabsRemoteCDM
|
||||||
|
|
||||||
__all__ = ["DecryptLabsRemoteCDM"]
|
__all__ = ["DecryptLabsRemoteCDM", "CustomRemoteCDM"]
|
||||||
|
|||||||
1085
unshackle/core/cdm/custom_remote_cdm.py
Normal file
1085
unshackle/core/cdm/custom_remote_cdm.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -31,6 +31,7 @@ class Config:
|
|||||||
class _Filenames:
|
class _Filenames:
|
||||||
# default filenames, do not modify here, set via config
|
# default filenames, do not modify here, set via config
|
||||||
log = "unshackle_{name}_{time}.log" # Directories.logs
|
log = "unshackle_{name}_{time}.log" # Directories.logs
|
||||||
|
debug_log = "unshackle_debug_{service}_{time}.jsonl" # Directories.logs
|
||||||
config = "config.yaml" # Directories.services / tag
|
config = "config.yaml" # Directories.services / tag
|
||||||
root_config = "unshackle.yaml" # Directories.user_configs
|
root_config = "unshackle.yaml" # Directories.user_configs
|
||||||
chapters = "Chapters_{title}_{random}.txt" # Directories.temp
|
chapters = "Chapters_{title}_{random}.txt" # Directories.temp
|
||||||
@@ -88,6 +89,7 @@ class Config:
|
|||||||
self.tag_group_name: bool = kwargs.get("tag_group_name", True)
|
self.tag_group_name: bool = kwargs.get("tag_group_name", True)
|
||||||
self.tag_imdb_tmdb: bool = kwargs.get("tag_imdb_tmdb", True)
|
self.tag_imdb_tmdb: bool = kwargs.get("tag_imdb_tmdb", True)
|
||||||
self.tmdb_api_key: str = kwargs.get("tmdb_api_key") or ""
|
self.tmdb_api_key: str = kwargs.get("tmdb_api_key") or ""
|
||||||
|
self.simkl_client_id: str = kwargs.get("simkl_client_id") or ""
|
||||||
self.decrypt_labs_api_key: str = kwargs.get("decrypt_labs_api_key") or ""
|
self.decrypt_labs_api_key: str = kwargs.get("decrypt_labs_api_key") or ""
|
||||||
self.update_checks: bool = kwargs.get("update_checks", True)
|
self.update_checks: bool = kwargs.get("update_checks", True)
|
||||||
self.update_check_interval: int = kwargs.get("update_check_interval", 24)
|
self.update_check_interval: int = kwargs.get("update_check_interval", 24)
|
||||||
@@ -98,6 +100,9 @@ class Config:
|
|||||||
self.title_cache_max_retention: int = kwargs.get("title_cache_max_retention", 86400) # 24 hours default
|
self.title_cache_max_retention: int = kwargs.get("title_cache_max_retention", 86400) # 24 hours default
|
||||||
self.title_cache_enabled: bool = kwargs.get("title_cache_enabled", True)
|
self.title_cache_enabled: bool = kwargs.get("title_cache_enabled", True)
|
||||||
|
|
||||||
|
self.debug: bool = kwargs.get("debug", False)
|
||||||
|
self.debug_keys: bool = kwargs.get("debug_keys", False)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_yaml(cls, path: Path) -> Config:
|
def from_yaml(cls, path: Path) -> Config:
|
||||||
if not path.exists():
|
if not path.exists():
|
||||||
@@ -113,8 +118,8 @@ POSSIBLE_CONFIG_PATHS = (
|
|||||||
Config._Directories.namespace_dir / Config._Filenames.root_config,
|
Config._Directories.namespace_dir / Config._Filenames.root_config,
|
||||||
# The Parent Folder to the unshackle Namespace Folder (e.g., %appdata%/Python/Python311/site-packages)
|
# The Parent Folder to the unshackle Namespace Folder (e.g., %appdata%/Python/Python311/site-packages)
|
||||||
Config._Directories.namespace_dir.parent / Config._Filenames.root_config,
|
Config._Directories.namespace_dir.parent / Config._Filenames.root_config,
|
||||||
# The AppDirs User Config Folder (e.g., %localappdata%/unshackle)
|
# The AppDirs User Config Folder (e.g., ~/.config/unshackle on Linux, %LOCALAPPDATA%\unshackle on Windows)
|
||||||
Config._Directories.user_configs / Config._Filenames.root_config,
|
Path(Config._Directories.app_dirs.user_config_dir) / Config._Filenames.root_config,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -439,7 +439,7 @@ class HLS:
|
|||||||
elif len(files) != range_len:
|
elif len(files) != range_len:
|
||||||
raise ValueError(f"Missing {range_len - len(files)} segment files for {segment_range}...")
|
raise ValueError(f"Missing {range_len - len(files)} segment files for {segment_range}...")
|
||||||
|
|
||||||
if isinstance(drm, Widevine):
|
if isinstance(drm, (Widevine, PlayReady)):
|
||||||
# with widevine we can merge all segments and decrypt once
|
# with widevine we can merge all segments and decrypt once
|
||||||
merge(to=merged_path, via=files, delete=True, include_map_data=True)
|
merge(to=merged_path, via=files, delete=True, include_map_data=True)
|
||||||
drm.decrypt(merged_path)
|
drm.decrypt(merged_path)
|
||||||
|
|||||||
@@ -2,5 +2,6 @@ from .basic import Basic
|
|||||||
from .hola import Hola
|
from .hola import Hola
|
||||||
from .nordvpn import NordVPN
|
from .nordvpn import NordVPN
|
||||||
from .surfsharkvpn import SurfsharkVPN
|
from .surfsharkvpn import SurfsharkVPN
|
||||||
|
from .windscribevpn import WindscribeVPN
|
||||||
|
|
||||||
__all__ = ("Basic", "Hola", "NordVPN", "SurfsharkVPN")
|
__all__ = ("Basic", "Hola", "NordVPN", "SurfsharkVPN", "WindscribeVPN")
|
||||||
|
|||||||
99
unshackle/core/proxies/windscribevpn.py
Normal file
99
unshackle/core/proxies/windscribevpn.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import json
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from unshackle.core.proxies.proxy import Proxy
|
||||||
|
|
||||||
|
|
||||||
|
class WindscribeVPN(Proxy):
|
||||||
|
def __init__(self, username: str, password: str, server_map: Optional[dict[str, str]] = None):
|
||||||
|
"""
|
||||||
|
Proxy Service using WindscribeVPN Service Credentials.
|
||||||
|
|
||||||
|
A username and password must be provided. These are Service Credentials, not your Login Credentials.
|
||||||
|
The Service Credentials can be found here: https://windscribe.com/getconfig/openvpn
|
||||||
|
"""
|
||||||
|
if not username:
|
||||||
|
raise ValueError("No Username was provided to the WindscribeVPN Proxy Service.")
|
||||||
|
if not password:
|
||||||
|
raise ValueError("No Password was provided to the WindscribeVPN Proxy Service.")
|
||||||
|
|
||||||
|
if server_map is not None and not isinstance(server_map, dict):
|
||||||
|
raise TypeError(f"Expected server_map to be a dict mapping a region to a hostname, not '{server_map!r}'.")
|
||||||
|
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.server_map = server_map or {}
|
||||||
|
|
||||||
|
self.countries = self.get_countries()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
countries = len(set(x.get("country_code") for x in self.countries if x.get("country_code")))
|
||||||
|
servers = sum(
|
||||||
|
len(host)
|
||||||
|
for location in self.countries
|
||||||
|
for group in location.get("groups", [])
|
||||||
|
for host in group.get("hosts", [])
|
||||||
|
)
|
||||||
|
|
||||||
|
return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})"
|
||||||
|
|
||||||
|
def get_proxy(self, query: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Get an HTTPS proxy URI for a WindscribeVPN server.
|
||||||
|
"""
|
||||||
|
query = query.lower()
|
||||||
|
|
||||||
|
if query in self.server_map:
|
||||||
|
hostname = self.server_map[query]
|
||||||
|
else:
|
||||||
|
if re.match(r"^[a-z]+$", query):
|
||||||
|
hostname = self.get_random_server(query)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"The query provided is unsupported and unrecognized: {query}")
|
||||||
|
|
||||||
|
if not hostname:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return f"https://{self.username}:{self.password}@{hostname}:443"
|
||||||
|
|
||||||
|
def get_random_server(self, country_code: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Get a random server hostname for a country.
|
||||||
|
|
||||||
|
Returns None if no servers are available for the country.
|
||||||
|
"""
|
||||||
|
for location in self.countries:
|
||||||
|
if location.get("country_code", "").lower() == country_code.lower():
|
||||||
|
hostnames = []
|
||||||
|
for group in location.get("groups", []):
|
||||||
|
for host in group.get("hosts", []):
|
||||||
|
if hostname := host.get("hostname"):
|
||||||
|
hostnames.append(hostname)
|
||||||
|
|
||||||
|
if hostnames:
|
||||||
|
return random.choice(hostnames)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_countries() -> list[dict]:
|
||||||
|
"""Get a list of available Countries and their metadata."""
|
||||||
|
res = requests.get(
|
||||||
|
url="https://assets.windscribe.com/serverlist/firefox/1/1",
|
||||||
|
headers={
|
||||||
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not res.ok:
|
||||||
|
raise ValueError(f"Failed to get a list of WindscribeVPN locations [{res.status_code}]")
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = res.json()
|
||||||
|
return data.get("data", [])
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError("Could not decode list of WindscribeVPN locations, not JSON data.")
|
||||||
@@ -2,9 +2,16 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import random
|
||||||
|
import time
|
||||||
import warnings
|
import warnings
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from email.utils import parsedate_to_datetime
|
||||||
|
from typing import Any
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from curl_cffi.requests import Session as CurlSession
|
from curl_cffi.requests import Response, Session, exceptions
|
||||||
|
|
||||||
from unshackle.core.config import config
|
from unshackle.core.config import config
|
||||||
|
|
||||||
@@ -15,18 +22,91 @@ warnings.filterwarnings(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Session(CurlSession):
|
class MaxRetriesError(exceptions.RequestException):
|
||||||
"""curl_cffi Session with warning suppression."""
|
def __init__(self, message, cause=None):
|
||||||
|
super().__init__(message)
|
||||||
|
self.__cause__ = cause
|
||||||
|
|
||||||
def request(self, method, url, **kwargs):
|
|
||||||
with warnings.catch_warnings():
|
class CurlSession(Session):
|
||||||
warnings.filterwarnings(
|
def __init__(
|
||||||
"ignore", message="Make sure you are using https over https proxy.*", category=RuntimeWarning
|
self,
|
||||||
)
|
max_retries: int = 10,
|
||||||
|
backoff_factor: float = 0.2,
|
||||||
|
max_backoff: float = 60.0,
|
||||||
|
status_forcelist: list[int] | None = None,
|
||||||
|
allowed_methods: set[str] | None = None,
|
||||||
|
catch_exceptions: tuple[type[Exception], ...] | None = None,
|
||||||
|
**session_kwargs: Any,
|
||||||
|
):
|
||||||
|
super().__init__(**session_kwargs)
|
||||||
|
|
||||||
|
self.max_retries = max_retries
|
||||||
|
self.backoff_factor = backoff_factor
|
||||||
|
self.max_backoff = max_backoff
|
||||||
|
self.status_forcelist = status_forcelist or [429, 500, 502, 503, 504]
|
||||||
|
self.allowed_methods = allowed_methods or {"GET", "POST", "HEAD", "OPTIONS", "PUT", "DELETE", "TRACE"}
|
||||||
|
self.catch_exceptions = catch_exceptions or (
|
||||||
|
exceptions.ConnectionError,
|
||||||
|
exceptions.SSLError,
|
||||||
|
exceptions.Timeout,
|
||||||
|
)
|
||||||
|
self.log = logging.getLogger(self.__class__.__name__)
|
||||||
|
|
||||||
|
def _get_sleep_time(self, response: Response | None, attempt: int) -> float | None:
|
||||||
|
if response:
|
||||||
|
retry_after = response.headers.get("Retry-After")
|
||||||
|
if retry_after:
|
||||||
|
try:
|
||||||
|
return float(retry_after)
|
||||||
|
except ValueError:
|
||||||
|
if retry_date := parsedate_to_datetime(retry_after):
|
||||||
|
return (retry_date - datetime.now(timezone.utc)).total_seconds()
|
||||||
|
|
||||||
|
if attempt == 0:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
backoff_value = self.backoff_factor * (2 ** (attempt - 1))
|
||||||
|
jitter = backoff_value * 0.1
|
||||||
|
sleep_time = backoff_value + random.uniform(-jitter, jitter)
|
||||||
|
return min(sleep_time, self.max_backoff)
|
||||||
|
|
||||||
|
def request(self, method: str, url: str, **kwargs: Any) -> Response:
|
||||||
|
if method.upper() not in self.allowed_methods:
|
||||||
return super().request(method, url, **kwargs)
|
return super().request(method, url, **kwargs)
|
||||||
|
|
||||||
|
last_exception = None
|
||||||
|
response = None
|
||||||
|
|
||||||
def session(browser: str | None = None, **kwargs) -> Session:
|
for attempt in range(self.max_retries + 1):
|
||||||
|
try:
|
||||||
|
response = super().request(method, url, **kwargs)
|
||||||
|
if response.status_code not in self.status_forcelist:
|
||||||
|
return response
|
||||||
|
last_exception = exceptions.HTTPError(f"Received status code: {response.status_code}")
|
||||||
|
self.log.warning(
|
||||||
|
f"{response.status_code} {response.reason}({urlparse(url).path}). Retrying... "
|
||||||
|
f"({attempt + 1}/{self.max_retries})"
|
||||||
|
)
|
||||||
|
|
||||||
|
except self.catch_exceptions as e:
|
||||||
|
last_exception = e
|
||||||
|
response = None
|
||||||
|
self.log.warning(
|
||||||
|
f"{e.__class__.__name__}({urlparse(url).path}). Retrying... ({attempt + 1}/{self.max_retries})"
|
||||||
|
)
|
||||||
|
|
||||||
|
if attempt < self.max_retries:
|
||||||
|
if sleep_duration := self._get_sleep_time(response, attempt + 1):
|
||||||
|
if sleep_duration > 0:
|
||||||
|
time.sleep(sleep_duration)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
raise MaxRetriesError(f"Max retries exceeded for {method} {url}", cause=last_exception)
|
||||||
|
|
||||||
|
|
||||||
|
def session(browser: str | None = None, **kwargs) -> CurlSession:
|
||||||
"""
|
"""
|
||||||
Create a curl_cffi session that impersonates a browser.
|
Create a curl_cffi session that impersonates a browser.
|
||||||
|
|
||||||
@@ -48,32 +128,43 @@ def session(browser: str | None = None, **kwargs) -> Session:
|
|||||||
- allow_redirects: Follow redirects (bool, default True)
|
- allow_redirects: Follow redirects (bool, default True)
|
||||||
- max_redirects: Maximum redirect count (int)
|
- max_redirects: Maximum redirect count (int)
|
||||||
- cert: Client certificate (str or tuple)
|
- cert: Client certificate (str or tuple)
|
||||||
|
- ja3: JA3 fingerprint (str)
|
||||||
|
- akamai: Akamai fingerprint (str)
|
||||||
|
|
||||||
|
Extra arguments for retry handler:
|
||||||
|
- max_retries: Maximum number of retries (int, default 10)
|
||||||
|
- backoff_factor: Backoff factor (float, default 0.2)
|
||||||
|
- max_backoff: Maximum backoff time (float, default 60.0)
|
||||||
|
- status_forcelist: List of status codes to force retry (list, default [429, 500, 502, 503, 504])
|
||||||
|
- allowed_methods: List of allowed HTTP methods (set, default {"GET", "POST", "HEAD", "OPTIONS", "PUT", "DELETE", "TRACE"})
|
||||||
|
- catch_exceptions: List of exceptions to catch (tuple, default (exceptions.ConnectionError, exceptions.SSLError, exceptions.Timeout))
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
curl_cffi.requests.Session configured with browser impersonation, common headers,
|
curl_cffi.requests.Session configured with browser impersonation, common headers,
|
||||||
and equivalent retry behavior to requests.Session.
|
and equivalent retry behavior to requests.Session.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
from unshackle.core.session import session
|
from unshackle.core.session import session as CurlSession
|
||||||
|
|
||||||
class MyService(Service):
|
class MyService(Service):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_session():
|
def get_session() -> CurlSession:
|
||||||
return session() # Uses config default browser
|
session = CurlSession(
|
||||||
|
impersonate="chrome",
|
||||||
|
ja3="...",
|
||||||
|
akamai="...",
|
||||||
|
max_retries=5,
|
||||||
|
status_forcelist=[429, 500],
|
||||||
|
allowed_methods={"GET", "HEAD", "OPTIONS"},
|
||||||
|
)
|
||||||
|
return session # Uses config default browser
|
||||||
"""
|
"""
|
||||||
if browser is None:
|
|
||||||
browser = config.curl_impersonate.get("browser", "chrome124")
|
|
||||||
|
|
||||||
session_config = {
|
session_config = {
|
||||||
"impersonate": browser,
|
"impersonate": browser or config.curl_impersonate.get("browser", "chrome"),
|
||||||
"timeout": 30.0,
|
**kwargs,
|
||||||
"allow_redirects": True,
|
|
||||||
"max_redirects": 15,
|
|
||||||
"verify": True,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
session_config.update(kwargs)
|
session_obj = CurlSession(**session_config)
|
||||||
session_obj = Session(**session_config)
|
|
||||||
session_obj.headers.update(config.headers)
|
session_obj.headers.update(config.headers)
|
||||||
|
|
||||||
return session_obj
|
return session_obj
|
||||||
|
|||||||
@@ -1,19 +1,22 @@
|
|||||||
import ast
|
import ast
|
||||||
import contextlib
|
import contextlib
|
||||||
import importlib.util
|
import importlib.util
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import traceback
|
||||||
import unicodedata
|
import unicodedata
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Optional, Sequence, Union
|
from typing import Any, Optional, Sequence, Union
|
||||||
from urllib.parse import ParseResult, urlparse
|
from urllib.parse import ParseResult, urlparse
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
import chardet
|
import chardet
|
||||||
import requests
|
import requests
|
||||||
@@ -122,7 +125,7 @@ def is_exact_match(language: Union[str, Language], languages: Sequence[Union[str
|
|||||||
return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_EXACT_DISTANCE
|
return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_EXACT_DISTANCE
|
||||||
|
|
||||||
|
|
||||||
def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box:
|
def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box: # type: ignore
|
||||||
"""
|
"""
|
||||||
Scan a byte array for a wanted MP4/ISOBMFF box, then parse and yield each find.
|
Scan a byte array for a wanted MP4/ISOBMFF box, then parse and yield each find.
|
||||||
|
|
||||||
@@ -457,3 +460,334 @@ class FPS(ast.NodeVisitor):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def parse(cls, expr: str) -> float:
|
def parse(cls, expr: str) -> float:
|
||||||
return cls().visit(ast.parse(expr).body[0])
|
return cls().visit(ast.parse(expr).body[0])
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Structured JSON debug logging for unshackle.
|
||||||
|
|
||||||
|
Provides comprehensive debugging information for service developers and troubleshooting.
|
||||||
|
When enabled, logs all operations, requests, responses, DRM operations, and errors in JSON format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class DebugLogger:
|
||||||
|
"""
|
||||||
|
Structured JSON debug logger for unshackle.
|
||||||
|
|
||||||
|
Outputs JSON Lines format where each line is a complete JSON object.
|
||||||
|
This makes it easy to parse, filter, and analyze logs programmatically.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, log_path: Optional[Path] = None, enabled: bool = False, log_keys: bool = False):
|
||||||
|
"""
|
||||||
|
Initialize the debug logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
log_path: Path to the log file. If None, logging is disabled.
|
||||||
|
enabled: Whether debug logging is enabled.
|
||||||
|
log_keys: Whether to log decryption keys (for debugging key issues).
|
||||||
|
"""
|
||||||
|
self.enabled = enabled and log_path is not None
|
||||||
|
self.log_path = log_path
|
||||||
|
self.session_id = str(uuid4())[:8]
|
||||||
|
self.file_handle = None
|
||||||
|
self.log_keys = log_keys
|
||||||
|
|
||||||
|
if self.enabled:
|
||||||
|
self.log_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.file_handle = open(self.log_path, "a", encoding="utf-8")
|
||||||
|
self._log_session_start()
|
||||||
|
|
||||||
|
def _log_session_start(self):
|
||||||
|
"""Log the start of a new session with environment information."""
|
||||||
|
import platform
|
||||||
|
|
||||||
|
from unshackle.core import __version__
|
||||||
|
|
||||||
|
self.log(
|
||||||
|
level="INFO",
|
||||||
|
operation="session_start",
|
||||||
|
message="Debug logging session started",
|
||||||
|
context={
|
||||||
|
"unshackle_version": __version__,
|
||||||
|
"python_version": sys.version,
|
||||||
|
"platform": platform.platform(),
|
||||||
|
"platform_system": platform.system(),
|
||||||
|
"platform_release": platform.release(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def log(
|
||||||
|
self,
|
||||||
|
level: str = "DEBUG",
|
||||||
|
operation: str = "",
|
||||||
|
message: str = "",
|
||||||
|
context: Optional[dict[str, Any]] = None,
|
||||||
|
service: Optional[str] = None,
|
||||||
|
error: Optional[Exception] = None,
|
||||||
|
request: Optional[dict[str, Any]] = None,
|
||||||
|
response: Optional[dict[str, Any]] = None,
|
||||||
|
duration_ms: Optional[float] = None,
|
||||||
|
success: Optional[bool] = None,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Log a structured JSON entry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
level: Log level (DEBUG, INFO, WARNING, ERROR)
|
||||||
|
operation: Name of the operation being performed
|
||||||
|
message: Human-readable message
|
||||||
|
context: Additional context information
|
||||||
|
service: Service name (e.g., DSNP, NF)
|
||||||
|
error: Exception object if an error occurred
|
||||||
|
request: Request details (URL, method, headers, body)
|
||||||
|
response: Response details (status, headers, body)
|
||||||
|
duration_ms: Operation duration in milliseconds
|
||||||
|
success: Whether the operation succeeded
|
||||||
|
**kwargs: Additional fields to include in the log entry
|
||||||
|
"""
|
||||||
|
if not self.enabled or not self.file_handle:
|
||||||
|
return
|
||||||
|
|
||||||
|
entry = {
|
||||||
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"session_id": self.session_id,
|
||||||
|
"level": level,
|
||||||
|
}
|
||||||
|
|
||||||
|
if operation:
|
||||||
|
entry["operation"] = operation
|
||||||
|
if message:
|
||||||
|
entry["message"] = message
|
||||||
|
if service:
|
||||||
|
entry["service"] = service
|
||||||
|
if context:
|
||||||
|
entry["context"] = self._sanitize_data(context)
|
||||||
|
if request:
|
||||||
|
entry["request"] = self._sanitize_data(request)
|
||||||
|
if response:
|
||||||
|
entry["response"] = self._sanitize_data(response)
|
||||||
|
if duration_ms is not None:
|
||||||
|
entry["duration_ms"] = duration_ms
|
||||||
|
if success is not None:
|
||||||
|
entry["success"] = success
|
||||||
|
|
||||||
|
if error:
|
||||||
|
entry["error"] = {
|
||||||
|
"type": type(error).__name__,
|
||||||
|
"message": str(error),
|
||||||
|
"traceback": traceback.format_exception(type(error), error, error.__traceback__),
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
if key not in entry:
|
||||||
|
entry[key] = self._sanitize_data(value)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.file_handle.write(json.dumps(entry, default=str) + "\n")
|
||||||
|
self.file_handle.flush()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to write debug log: {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
def _sanitize_data(self, data: Any) -> Any:
|
||||||
|
"""
|
||||||
|
Sanitize data for JSON serialization.
|
||||||
|
Handles complex objects and removes sensitive information.
|
||||||
|
"""
|
||||||
|
if data is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(data, (str, int, float, bool)):
|
||||||
|
return data
|
||||||
|
|
||||||
|
if isinstance(data, (list, tuple)):
|
||||||
|
return [self._sanitize_data(item) for item in data]
|
||||||
|
|
||||||
|
if isinstance(data, dict):
|
||||||
|
sanitized = {}
|
||||||
|
for key, value in data.items():
|
||||||
|
key_lower = str(key).lower()
|
||||||
|
has_prefix = key_lower.startswith("has_")
|
||||||
|
|
||||||
|
is_always_sensitive = not has_prefix and any(
|
||||||
|
sensitive in key_lower for sensitive in ["password", "token", "secret", "auth", "cookie"]
|
||||||
|
)
|
||||||
|
|
||||||
|
is_key_field = (
|
||||||
|
"key" in key_lower
|
||||||
|
and not has_prefix
|
||||||
|
and not any(safe in key_lower for safe in ["_count", "_id", "_type", "kid", "keys_", "key_found"])
|
||||||
|
)
|
||||||
|
|
||||||
|
should_redact = is_always_sensitive or (is_key_field and not self.log_keys)
|
||||||
|
|
||||||
|
if should_redact:
|
||||||
|
sanitized[key] = "[REDACTED]"
|
||||||
|
else:
|
||||||
|
sanitized[key] = self._sanitize_data(value)
|
||||||
|
return sanitized
|
||||||
|
|
||||||
|
if isinstance(data, bytes):
|
||||||
|
try:
|
||||||
|
return data.hex()
|
||||||
|
except Exception:
|
||||||
|
return "[BINARY_DATA]"
|
||||||
|
|
||||||
|
if isinstance(data, Path):
|
||||||
|
return str(data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return str(data)
|
||||||
|
except Exception:
|
||||||
|
return f"[{type(data).__name__}]"
|
||||||
|
|
||||||
|
def log_operation_start(self, operation: str, **kwargs) -> str:
|
||||||
|
"""
|
||||||
|
Log the start of an operation and return an operation ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operation: Name of the operation
|
||||||
|
**kwargs: Additional context
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation ID that can be used to log the end of the operation
|
||||||
|
"""
|
||||||
|
op_id = str(uuid4())[:8]
|
||||||
|
self.log(
|
||||||
|
level="DEBUG",
|
||||||
|
operation=f"{operation}_start",
|
||||||
|
message=f"Starting operation: {operation}",
|
||||||
|
operation_id=op_id,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
return op_id
|
||||||
|
|
||||||
|
def log_operation_end(
|
||||||
|
self, operation: str, operation_id: str, success: bool = True, duration_ms: Optional[float] = None, **kwargs
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Log the end of an operation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operation: Name of the operation
|
||||||
|
operation_id: Operation ID from log_operation_start
|
||||||
|
success: Whether the operation succeeded
|
||||||
|
duration_ms: Operation duration in milliseconds
|
||||||
|
**kwargs: Additional context
|
||||||
|
"""
|
||||||
|
self.log(
|
||||||
|
level="INFO" if success else "ERROR",
|
||||||
|
operation=f"{operation}_end",
|
||||||
|
message=f"Finished operation: {operation}",
|
||||||
|
operation_id=operation_id,
|
||||||
|
success=success,
|
||||||
|
duration_ms=duration_ms,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def log_service_call(self, method: str, url: str, **kwargs):
|
||||||
|
"""
|
||||||
|
Log a service API call.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
method: HTTP method (GET, POST, etc.)
|
||||||
|
url: Request URL
|
||||||
|
**kwargs: Additional request details (headers, body, etc.)
|
||||||
|
"""
|
||||||
|
self.log(level="DEBUG", operation="service_call", request={"method": method, "url": url, **kwargs})
|
||||||
|
|
||||||
|
def log_drm_operation(self, drm_type: str, operation: str, **kwargs):
|
||||||
|
"""
|
||||||
|
Log a DRM operation (PSSH extraction, license request, key retrieval).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
drm_type: DRM type (Widevine, PlayReady, etc.)
|
||||||
|
operation: DRM operation name
|
||||||
|
**kwargs: Additional context (PSSH, KIDs, keys, etc.)
|
||||||
|
"""
|
||||||
|
self.log(
|
||||||
|
level="DEBUG", operation=f"drm_{operation}", message=f"{drm_type} {operation}", drm_type=drm_type, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def log_vault_query(self, vault_name: str, operation: str, **kwargs):
|
||||||
|
"""
|
||||||
|
Log a vault query operation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
vault_name: Name of the vault
|
||||||
|
operation: Vault operation (get_key, add_key, etc.)
|
||||||
|
**kwargs: Additional context (KID, key, success, etc.)
|
||||||
|
"""
|
||||||
|
self.log(
|
||||||
|
level="DEBUG",
|
||||||
|
operation=f"vault_{operation}",
|
||||||
|
message=f"Vault {vault_name}: {operation}",
|
||||||
|
vault=vault_name,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def log_error(self, operation: str, error: Exception, **kwargs):
|
||||||
|
"""
|
||||||
|
Log an error with full context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operation: Operation that failed
|
||||||
|
error: Exception that occurred
|
||||||
|
**kwargs: Additional context
|
||||||
|
"""
|
||||||
|
self.log(
|
||||||
|
level="ERROR",
|
||||||
|
operation=operation,
|
||||||
|
message=f"Error in {operation}: {str(error)}",
|
||||||
|
error=error,
|
||||||
|
success=False,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the log file and clean up resources."""
|
||||||
|
if self.file_handle:
|
||||||
|
self.log(level="INFO", operation="session_end", message="Debug logging session ended")
|
||||||
|
self.file_handle.close()
|
||||||
|
self.file_handle = None
|
||||||
|
|
||||||
|
|
||||||
|
# Global debug logger instance
|
||||||
|
_debug_logger: Optional[DebugLogger] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_debug_logger() -> Optional[DebugLogger]:
|
||||||
|
"""Get the global debug logger instance."""
|
||||||
|
return _debug_logger
|
||||||
|
|
||||||
|
|
||||||
|
def init_debug_logger(log_path: Optional[Path] = None, enabled: bool = False, log_keys: bool = False):
|
||||||
|
"""
|
||||||
|
Initialize the global debug logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
log_path: Path to the log file
|
||||||
|
enabled: Whether debug logging is enabled
|
||||||
|
log_keys: Whether to log decryption keys (for debugging key issues)
|
||||||
|
"""
|
||||||
|
global _debug_logger
|
||||||
|
if _debug_logger:
|
||||||
|
_debug_logger.close()
|
||||||
|
_debug_logger = DebugLogger(log_path=log_path, enabled=enabled, log_keys=log_keys)
|
||||||
|
|
||||||
|
|
||||||
|
def close_debug_logger():
|
||||||
|
"""Close the global debug logger."""
|
||||||
|
global _debug_logger
|
||||||
|
if _debug_logger:
|
||||||
|
_debug_logger.close()
|
||||||
|
_debug_logger = None
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"DebugLogger",
|
||||||
|
"get_debug_logger",
|
||||||
|
"init_debug_logger",
|
||||||
|
"close_debug_logger",
|
||||||
|
)
|
||||||
|
|||||||
@@ -47,6 +47,10 @@ def _api_key() -> Optional[str]:
|
|||||||
return config.tmdb_api_key or os.getenv("TMDB_API_KEY")
|
return config.tmdb_api_key or os.getenv("TMDB_API_KEY")
|
||||||
|
|
||||||
|
|
||||||
|
def _simkl_client_id() -> Optional[str]:
|
||||||
|
return config.simkl_client_id or os.getenv("SIMKL_CLIENT_ID")
|
||||||
|
|
||||||
|
|
||||||
def _clean(s: str) -> str:
|
def _clean(s: str) -> str:
|
||||||
return STRIP_RE.sub("", s).lower()
|
return STRIP_RE.sub("", s).lower()
|
||||||
|
|
||||||
@@ -63,9 +67,14 @@ def fuzzy_match(a: str, b: str, threshold: float = 0.8) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
def search_simkl(title: str, year: Optional[int], kind: str) -> Tuple[Optional[dict], Optional[str], Optional[int]]:
|
def search_simkl(title: str, year: Optional[int], kind: str) -> Tuple[Optional[dict], Optional[str], Optional[int]]:
|
||||||
"""Search Simkl API for show information by filename (no auth required)."""
|
"""Search Simkl API for show information by filename."""
|
||||||
log.debug("Searching Simkl for %r (%s, %s)", title, kind, year)
|
log.debug("Searching Simkl for %r (%s, %s)", title, kind, year)
|
||||||
|
|
||||||
|
client_id = _simkl_client_id()
|
||||||
|
if not client_id:
|
||||||
|
log.debug("No SIMKL client ID configured; skipping SIMKL search")
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
# Construct appropriate filename based on type
|
# Construct appropriate filename based on type
|
||||||
filename = f"{title}"
|
filename = f"{title}"
|
||||||
if year:
|
if year:
|
||||||
@@ -78,7 +87,8 @@ def search_simkl(title: str, year: Optional[int], kind: str) -> Tuple[Optional[d
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
session = _get_session()
|
session = _get_session()
|
||||||
resp = session.post("https://api.simkl.com/search/file", json={"file": filename}, timeout=30)
|
headers = {"simkl-api-key": client_id}
|
||||||
|
resp = session.post("https://api.simkl.com/search/file", json={"file": filename}, headers=headers, timeout=30)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
log.debug("Simkl API response received")
|
log.debug("Simkl API response received")
|
||||||
@@ -338,73 +348,97 @@ def tag_file(path: Path, title: Title, tmdb_id: Optional[int] | None = None) ->
|
|||||||
return
|
return
|
||||||
|
|
||||||
if config.tag_imdb_tmdb:
|
if config.tag_imdb_tmdb:
|
||||||
# If tmdb_id is provided (via --tmdb), skip Simkl and use TMDB directly
|
# Check if we have any API keys available for metadata lookup
|
||||||
if tmdb_id is not None:
|
|
||||||
log.debug("Using provided TMDB ID %s for tags", tmdb_id)
|
|
||||||
else:
|
|
||||||
# Try Simkl first for automatic lookup
|
|
||||||
simkl_data, simkl_title, simkl_tmdb_id = search_simkl(name, year, kind)
|
|
||||||
|
|
||||||
if simkl_data and simkl_title and fuzzy_match(simkl_title, name):
|
|
||||||
log.debug("Using Simkl data for tags")
|
|
||||||
if simkl_tmdb_id:
|
|
||||||
tmdb_id = simkl_tmdb_id
|
|
||||||
|
|
||||||
# Handle TV show data from Simkl
|
|
||||||
if simkl_data.get("type") == "episode" and "show" in simkl_data:
|
|
||||||
show_ids = simkl_data.get("show", {}).get("ids", {})
|
|
||||||
if show_ids.get("imdb"):
|
|
||||||
standard_tags["IMDB"] = show_ids["imdb"]
|
|
||||||
if show_ids.get("tvdb"):
|
|
||||||
standard_tags["TVDB2"] = f"series/{show_ids['tvdb']}"
|
|
||||||
if show_ids.get("tmdbtv"):
|
|
||||||
standard_tags["TMDB"] = f"tv/{show_ids['tmdbtv']}"
|
|
||||||
|
|
||||||
# Handle movie data from Simkl
|
|
||||||
elif simkl_data.get("type") == "movie" and "movie" in simkl_data:
|
|
||||||
movie_ids = simkl_data.get("movie", {}).get("ids", {})
|
|
||||||
if movie_ids.get("imdb"):
|
|
||||||
standard_tags["IMDB"] = movie_ids["imdb"]
|
|
||||||
if movie_ids.get("tvdb"):
|
|
||||||
standard_tags["TVDB2"] = f"movies/{movie_ids['tvdb']}"
|
|
||||||
if movie_ids.get("tmdb"):
|
|
||||||
standard_tags["TMDB"] = f"movie/{movie_ids['tmdb']}"
|
|
||||||
|
|
||||||
# Use TMDB API for additional metadata (either from provided ID or Simkl lookup)
|
|
||||||
api_key = _api_key()
|
api_key = _api_key()
|
||||||
if not api_key:
|
simkl_client = _simkl_client_id()
|
||||||
log.debug("No TMDB API key set; applying basic tags only")
|
|
||||||
_apply_tags(path, custom_tags)
|
|
||||||
return
|
|
||||||
|
|
||||||
tmdb_title: Optional[str] = None
|
if not api_key and not simkl_client:
|
||||||
if tmdb_id is None:
|
log.debug("No TMDB API key or Simkl client ID configured; skipping IMDB/TMDB tag lookup")
|
||||||
tmdb_id, tmdb_title = search_tmdb(name, year, kind)
|
|
||||||
log.debug("TMDB search result: %r (ID %s)", tmdb_title, tmdb_id)
|
|
||||||
if not tmdb_id or not tmdb_title or not fuzzy_match(tmdb_title, name):
|
|
||||||
log.debug("TMDB search did not match; skipping external ID lookup")
|
|
||||||
_apply_tags(path, custom_tags)
|
|
||||||
return
|
|
||||||
|
|
||||||
prefix = "movie" if kind == "movie" else "tv"
|
|
||||||
standard_tags["TMDB"] = f"{prefix}/{tmdb_id}"
|
|
||||||
try:
|
|
||||||
ids = external_ids(tmdb_id, kind)
|
|
||||||
except requests.RequestException as exc:
|
|
||||||
log.debug("Failed to fetch external IDs: %s", exc)
|
|
||||||
ids = {}
|
|
||||||
else:
|
else:
|
||||||
log.debug("External IDs found: %s", ids)
|
# If tmdb_id is provided (via --tmdb), skip Simkl and use TMDB directly
|
||||||
|
if tmdb_id is not None:
|
||||||
imdb_id = ids.get("imdb_id")
|
log.debug("Using provided TMDB ID %s for tags", tmdb_id)
|
||||||
if imdb_id:
|
|
||||||
standard_tags["IMDB"] = imdb_id
|
|
||||||
tvdb_id = ids.get("tvdb_id")
|
|
||||||
if tvdb_id:
|
|
||||||
if kind == "movie":
|
|
||||||
standard_tags["TVDB2"] = f"movies/{tvdb_id}"
|
|
||||||
else:
|
else:
|
||||||
standard_tags["TVDB2"] = f"series/{tvdb_id}"
|
# Try Simkl first for automatic lookup (only if client ID is available)
|
||||||
|
if simkl_client:
|
||||||
|
simkl_data, simkl_title, simkl_tmdb_id = search_simkl(name, year, kind)
|
||||||
|
|
||||||
|
if simkl_data and simkl_title and fuzzy_match(simkl_title, name):
|
||||||
|
log.debug("Using Simkl data for tags")
|
||||||
|
if simkl_tmdb_id:
|
||||||
|
tmdb_id = simkl_tmdb_id
|
||||||
|
|
||||||
|
# Handle TV show data from Simkl
|
||||||
|
if simkl_data.get("type") == "episode" and "show" in simkl_data:
|
||||||
|
show_ids = simkl_data.get("show", {}).get("ids", {})
|
||||||
|
if show_ids.get("imdb"):
|
||||||
|
standard_tags["IMDB"] = show_ids["imdb"]
|
||||||
|
if show_ids.get("tvdb"):
|
||||||
|
standard_tags["TVDB2"] = f"series/{show_ids['tvdb']}"
|
||||||
|
if show_ids.get("tmdbtv"):
|
||||||
|
standard_tags["TMDB"] = f"tv/{show_ids['tmdbtv']}"
|
||||||
|
|
||||||
|
# Handle movie data from Simkl
|
||||||
|
elif simkl_data.get("type") == "movie" and "movie" in simkl_data:
|
||||||
|
movie_ids = simkl_data.get("movie", {}).get("ids", {})
|
||||||
|
if movie_ids.get("imdb"):
|
||||||
|
standard_tags["IMDB"] = movie_ids["imdb"]
|
||||||
|
if movie_ids.get("tvdb"):
|
||||||
|
standard_tags["TVDB2"] = f"movies/{movie_ids['tvdb']}"
|
||||||
|
if movie_ids.get("tmdb"):
|
||||||
|
standard_tags["TMDB"] = f"movie/{movie_ids['tmdb']}"
|
||||||
|
|
||||||
|
# Use TMDB API for additional metadata (either from provided ID or Simkl lookup)
|
||||||
|
if api_key:
|
||||||
|
tmdb_title: Optional[str] = None
|
||||||
|
if tmdb_id is None:
|
||||||
|
tmdb_id, tmdb_title = search_tmdb(name, year, kind)
|
||||||
|
log.debug("TMDB search result: %r (ID %s)", tmdb_title, tmdb_id)
|
||||||
|
if not tmdb_id or not tmdb_title or not fuzzy_match(tmdb_title, name):
|
||||||
|
log.debug("TMDB search did not match; skipping external ID lookup")
|
||||||
|
else:
|
||||||
|
prefix = "movie" if kind == "movie" else "tv"
|
||||||
|
standard_tags["TMDB"] = f"{prefix}/{tmdb_id}"
|
||||||
|
try:
|
||||||
|
ids = external_ids(tmdb_id, kind)
|
||||||
|
except requests.RequestException as exc:
|
||||||
|
log.debug("Failed to fetch external IDs: %s", exc)
|
||||||
|
ids = {}
|
||||||
|
else:
|
||||||
|
log.debug("External IDs found: %s", ids)
|
||||||
|
|
||||||
|
imdb_id = ids.get("imdb_id")
|
||||||
|
if imdb_id:
|
||||||
|
standard_tags["IMDB"] = imdb_id
|
||||||
|
tvdb_id = ids.get("tvdb_id")
|
||||||
|
if tvdb_id:
|
||||||
|
if kind == "movie":
|
||||||
|
standard_tags["TVDB2"] = f"movies/{tvdb_id}"
|
||||||
|
else:
|
||||||
|
standard_tags["TVDB2"] = f"series/{tvdb_id}"
|
||||||
|
elif tmdb_id is not None:
|
||||||
|
# tmdb_id was provided or found via Simkl
|
||||||
|
prefix = "movie" if kind == "movie" else "tv"
|
||||||
|
standard_tags["TMDB"] = f"{prefix}/{tmdb_id}"
|
||||||
|
try:
|
||||||
|
ids = external_ids(tmdb_id, kind)
|
||||||
|
except requests.RequestException as exc:
|
||||||
|
log.debug("Failed to fetch external IDs: %s", exc)
|
||||||
|
ids = {}
|
||||||
|
else:
|
||||||
|
log.debug("External IDs found: %s", ids)
|
||||||
|
|
||||||
|
imdb_id = ids.get("imdb_id")
|
||||||
|
if imdb_id:
|
||||||
|
standard_tags["IMDB"] = imdb_id
|
||||||
|
tvdb_id = ids.get("tvdb_id")
|
||||||
|
if tvdb_id:
|
||||||
|
if kind == "movie":
|
||||||
|
standard_tags["TVDB2"] = f"movies/{tvdb_id}"
|
||||||
|
else:
|
||||||
|
standard_tags["TVDB2"] = f"series/{tvdb_id}"
|
||||||
|
else:
|
||||||
|
log.debug("No TMDB API key configured; skipping TMDB external ID lookup")
|
||||||
|
|
||||||
merged_tags = {
|
merged_tags = {
|
||||||
**custom_tags,
|
**custom_tags,
|
||||||
|
|||||||
@@ -32,6 +32,26 @@ title_cache_enabled: true # Enable/disable title caching globally (default: true
|
|||||||
title_cache_time: 1800 # Cache duration in seconds (default: 1800 = 30 minutes)
|
title_cache_time: 1800 # Cache duration in seconds (default: 1800 = 30 minutes)
|
||||||
title_cache_max_retention: 86400 # Maximum cache retention for fallback when API fails (default: 86400 = 24 hours)
|
title_cache_max_retention: 86400 # Maximum cache retention for fallback when API fails (default: 86400 = 24 hours)
|
||||||
|
|
||||||
|
# Debug logging configuration
|
||||||
|
# Comprehensive JSON-based debug logging for troubleshooting and service development
|
||||||
|
debug:
|
||||||
|
false # Enable structured JSON debug logging (default: false)
|
||||||
|
# When enabled with --debug flag or set to true:
|
||||||
|
# - Creates JSON Lines (.jsonl) log files with complete debugging context
|
||||||
|
# - Logs: session info, CLI params, service config, CDM details, authentication,
|
||||||
|
# titles, tracks metadata, DRM operations, vault queries, errors with stack traces
|
||||||
|
# - File location: logs/unshackle_debug_{service}_{timestamp}.jsonl
|
||||||
|
# - Also creates text log: logs/unshackle_root_{timestamp}.log
|
||||||
|
|
||||||
|
debug_keys:
|
||||||
|
false # Log decryption keys in debug logs (default: false)
|
||||||
|
# Set to true to include actual decryption keys in logs
|
||||||
|
# Useful for debugging key retrieval and decryption issues
|
||||||
|
# SECURITY NOTE: Passwords, tokens, cookies, and session tokens
|
||||||
|
# are ALWAYS redacted regardless of this setting
|
||||||
|
# Only affects: content_key, key fields (the actual CEKs)
|
||||||
|
# Never affects: kid, keys_count, key_id (metadata is always logged)
|
||||||
|
|
||||||
# Muxing configuration
|
# Muxing configuration
|
||||||
muxing:
|
muxing:
|
||||||
set_title: false
|
set_title: false
|
||||||
@@ -109,6 +129,74 @@ cdm:
|
|||||||
default: netflix_standard_l3
|
default: netflix_standard_l3
|
||||||
|
|
||||||
# Use pywidevine Serve-compliant Remote CDMs
|
# Use pywidevine Serve-compliant Remote CDMs
|
||||||
|
|
||||||
|
# Example: Custom CDM API Configuration
|
||||||
|
# This demonstrates the highly configurable custom_api type that can adapt to any CDM API format
|
||||||
|
# - name: "chrome"
|
||||||
|
# type: "custom_api"
|
||||||
|
# host: "http://remotecdm.test/"
|
||||||
|
# timeout: 30
|
||||||
|
# device:
|
||||||
|
# name: "ChromeCDM"
|
||||||
|
# type: "CHROME"
|
||||||
|
# system_id: 34312
|
||||||
|
# security_level: 3
|
||||||
|
# auth:
|
||||||
|
# type: "header"
|
||||||
|
# header_name: "x-api-key"
|
||||||
|
# key: "YOUR_API_KEY_HERE"
|
||||||
|
# custom_headers:
|
||||||
|
# User-Agent: "Unshackle/2.0.0"
|
||||||
|
# endpoints:
|
||||||
|
# get_request:
|
||||||
|
# path: "/get-challenge"
|
||||||
|
# method: "POST"
|
||||||
|
# timeout: 30
|
||||||
|
# decrypt_response:
|
||||||
|
# path: "/get-keys"
|
||||||
|
# method: "POST"
|
||||||
|
# timeout: 30
|
||||||
|
# request_mapping:
|
||||||
|
# get_request:
|
||||||
|
# param_names:
|
||||||
|
# scheme: "device"
|
||||||
|
# init_data: "init_data"
|
||||||
|
# static_params:
|
||||||
|
# scheme: "Widevine"
|
||||||
|
# decrypt_response:
|
||||||
|
# param_names:
|
||||||
|
# scheme: "device"
|
||||||
|
# license_request: "license_request"
|
||||||
|
# license_response: "license_response"
|
||||||
|
# static_params:
|
||||||
|
# scheme: "Widevine"
|
||||||
|
# response_mapping:
|
||||||
|
# get_request:
|
||||||
|
# fields:
|
||||||
|
# challenge: "challenge"
|
||||||
|
# session_id: "session_id"
|
||||||
|
# message: "message"
|
||||||
|
# message_type: "message_type"
|
||||||
|
# response_types:
|
||||||
|
# - condition: "message_type == 'license-request'"
|
||||||
|
# type: "license_request"
|
||||||
|
# success_conditions:
|
||||||
|
# - "message == 'success'"
|
||||||
|
# decrypt_response:
|
||||||
|
# fields:
|
||||||
|
# keys: "keys"
|
||||||
|
# message: "message"
|
||||||
|
# key_fields:
|
||||||
|
# kid: "kid"
|
||||||
|
# key: "key"
|
||||||
|
# type: "type"
|
||||||
|
# success_conditions:
|
||||||
|
# - "message == 'success'"
|
||||||
|
# caching:
|
||||||
|
# enabled: true
|
||||||
|
# use_vaults: true
|
||||||
|
# check_cached_first: true
|
||||||
|
|
||||||
remote_cdm:
|
remote_cdm:
|
||||||
- name: "chrome"
|
- name: "chrome"
|
||||||
device_name: chrome
|
device_name: chrome
|
||||||
@@ -239,7 +327,7 @@ headers:
|
|||||||
|
|
||||||
# Override default filenames used across unshackle
|
# Override default filenames used across unshackle
|
||||||
filenames:
|
filenames:
|
||||||
log: "unshackle_{name}_{time}.log"
|
debug_log: "unshackle_debug_{service}_{time}.jsonl" # JSON Lines debug log file
|
||||||
config: "config.yaml"
|
config: "config.yaml"
|
||||||
root_config: "unshackle.yaml"
|
root_config: "unshackle.yaml"
|
||||||
chapters: "Chapters_{title}_{random}.txt"
|
chapters: "Chapters_{title}_{random}.txt"
|
||||||
@@ -248,6 +336,10 @@ filenames:
|
|||||||
# API key for The Movie Database (TMDB)
|
# API key for The Movie Database (TMDB)
|
||||||
tmdb_api_key: ""
|
tmdb_api_key: ""
|
||||||
|
|
||||||
|
# Client ID for SIMKL API (optional, improves metadata matching)
|
||||||
|
# Get your free client ID at: https://simkl.com/settings/developer/
|
||||||
|
simkl_client_id: ""
|
||||||
|
|
||||||
# conversion_method:
|
# conversion_method:
|
||||||
# - auto (default): Smart routing - subby for WebVTT/SAMI, standard for others
|
# - auto (default): Smart routing - subby for WebVTT/SAMI, standard for others
|
||||||
# - subby: Always use subby with advanced processing
|
# - subby: Always use subby with advanced processing
|
||||||
@@ -260,6 +352,7 @@ subtitle:
|
|||||||
|
|
||||||
# Configuration for pywidevine's serve functionality
|
# Configuration for pywidevine's serve functionality
|
||||||
serve:
|
serve:
|
||||||
|
api_secret: "your-secret-key-here"
|
||||||
users:
|
users:
|
||||||
secret_key_for_user:
|
secret_key_for_user:
|
||||||
devices:
|
devices:
|
||||||
@@ -273,9 +366,13 @@ services:
|
|||||||
# Service-specific configuration goes here
|
# Service-specific configuration goes here
|
||||||
# Profile-specific configurations can be nested under service names
|
# Profile-specific configurations can be nested under service names
|
||||||
|
|
||||||
# Example: with profile-specific device configs
|
# You can override ANY global configuration option on a per-service basis
|
||||||
|
# This allows fine-tuned control for services with special requirements
|
||||||
|
# Supported overrides: dl, aria2c, n_m3u8dl_re, curl_impersonate, subtitle, muxing, headers, etc.
|
||||||
|
|
||||||
|
# Example: Comprehensive service configuration showing all features
|
||||||
EXAMPLE:
|
EXAMPLE:
|
||||||
# Global service config
|
# Standard service config
|
||||||
api_key: "service_api_key"
|
api_key: "service_api_key"
|
||||||
|
|
||||||
# Service certificate for Widevine L1/L2 (base64 encoded)
|
# Service certificate for Widevine L1/L2 (base64 encoded)
|
||||||
@@ -296,6 +393,42 @@ services:
|
|||||||
app_name: "AIV"
|
app_name: "AIV"
|
||||||
device_model: "Fire TV Stick 4K"
|
device_model: "Fire TV Stick 4K"
|
||||||
|
|
||||||
|
# NEW: Configuration overrides (can be combined with profiles and certificates)
|
||||||
|
# Override dl command defaults for this service
|
||||||
|
dl:
|
||||||
|
downloads: 4 # Limit concurrent track downloads (global default: 6)
|
||||||
|
workers: 8 # Reduce workers per track (global default: 16)
|
||||||
|
lang: ["en", "es-419"] # Different language priority for this service
|
||||||
|
sub_format: srt # Force SRT subtitle format
|
||||||
|
|
||||||
|
# Override n_m3u8dl_re downloader settings
|
||||||
|
n_m3u8dl_re:
|
||||||
|
thread_count: 8 # Lower thread count for rate-limited service (global default: 16)
|
||||||
|
use_proxy: true # Force proxy usage for this service
|
||||||
|
retry_count: 10 # More retries for unstable connections
|
||||||
|
ad_keyword: "advertisement" # Service-specific ad filtering
|
||||||
|
|
||||||
|
# Override aria2c downloader settings
|
||||||
|
aria2c:
|
||||||
|
max_concurrent_downloads: 2 # Limit concurrent downloads (global default: 4)
|
||||||
|
max_connection_per_server: 1 # Single connection per server
|
||||||
|
split: 3 # Fewer splits (global default: 5)
|
||||||
|
file_allocation: none # Faster allocation for this service
|
||||||
|
|
||||||
|
# Override subtitle processing for this service
|
||||||
|
subtitle:
|
||||||
|
conversion_method: pycaption # Use specific subtitle converter
|
||||||
|
sdh_method: auto
|
||||||
|
|
||||||
|
# Service-specific headers
|
||||||
|
headers:
|
||||||
|
User-Agent: "Service-specific user agent string"
|
||||||
|
Accept-Language: "en-US,en;q=0.9"
|
||||||
|
|
||||||
|
# Override muxing options
|
||||||
|
muxing:
|
||||||
|
set_title: true
|
||||||
|
|
||||||
# Example: Service with different regions per profile
|
# Example: Service with different regions per profile
|
||||||
SERVICE_NAME:
|
SERVICE_NAME:
|
||||||
profiles:
|
profiles:
|
||||||
@@ -306,6 +439,13 @@ services:
|
|||||||
region: "GB"
|
region: "GB"
|
||||||
api_endpoint: "https://api.uk.service.com"
|
api_endpoint: "https://api.uk.service.com"
|
||||||
|
|
||||||
|
# Notes on service-specific overrides:
|
||||||
|
# - Overrides are merged with global config, not replaced
|
||||||
|
# - Only specified keys are overridden, others use global defaults
|
||||||
|
# - Reserved keys (profiles, api_key, certificate, etc.) are NOT treated as overrides
|
||||||
|
# - Any dict-type config option can be overridden (dl, aria2c, n_m3u8dl_re, etc.)
|
||||||
|
# - Use --debug flag to see which overrides are applied during downloads
|
||||||
|
|
||||||
# External proxy provider services
|
# External proxy provider services
|
||||||
proxy_providers:
|
proxy_providers:
|
||||||
nordvpn:
|
nordvpn:
|
||||||
@@ -320,6 +460,12 @@ proxy_providers:
|
|||||||
us: 3844 # force US server #3844 for US proxies
|
us: 3844 # force US server #3844 for US proxies
|
||||||
gb: 2697 # force GB server #2697 for GB proxies
|
gb: 2697 # force GB server #2697 for GB proxies
|
||||||
au: 4621 # force AU server #4621 for AU proxies
|
au: 4621 # force AU server #4621 for AU proxies
|
||||||
|
windscribevpn:
|
||||||
|
username: your_windscribe_username # Service credentials from https://windscribe.com/getconfig/openvpn
|
||||||
|
password: your_windscribe_password # Service credentials (not your login password)
|
||||||
|
server_map:
|
||||||
|
us: "us-central-096.totallyacdn.com" # force US server
|
||||||
|
gb: "uk-london-055.totallyacdn.com" # force GB server
|
||||||
basic:
|
basic:
|
||||||
GB:
|
GB:
|
||||||
- "socks5://username:password@bhx.socks.ipvanish.com:1080" # 1 (Birmingham)
|
- "socks5://username:password@bhx.socks.ipvanish.com:1080" # 1 (Birmingham)
|
||||||
|
|||||||
61
uv.lock
generated
61
uv.lock
generated
@@ -80,6 +80,22 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" },
|
{ url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiohttp-swagger3"
|
||||||
|
version = "0.10.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "aiohttp" },
|
||||||
|
{ name = "attrs" },
|
||||||
|
{ name = "fastjsonschema" },
|
||||||
|
{ name = "pyyaml" },
|
||||||
|
{ name = "rfc3339-validator" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a1/06/00ccb2c8afdde4ca7c3cac424d54715c7d90cdd4e13e1ca71d68f5b2e665/aiohttp_swagger3-0.10.0.tar.gz", hash = "sha256:a333c59328f64dd64587e5f276ee84dc256f587d09f2da6ddaae3812fa4d4f33", size = 1839028, upload-time = "2025-02-11T10:51:26.974Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0a/8f/db4cb843999a3088846d170f38eda2182b50b5733387be8102fed171c53f/aiohttp_swagger3-0.10.0-py3-none-any.whl", hash = "sha256:0ae2d2ba7dbd8ea8fe1cffe8f0197db5d0aa979eb9679bd699ecd87923912509", size = 1826491, upload-time = "2025-02-11T10:51:25.174Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiosignal"
|
name = "aiosignal"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
@@ -468,6 +484,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" },
|
{ url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fastjsonschema"
|
||||||
|
version = "2.19.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ba/7f/cedf77ace50aa60c566deaca9066750f06e1fcf6ad24f254d255bb976dd6/fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d", size = 372732, upload-time = "2023-12-28T14:02:06.823Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9c/b9/79691036d4a8f9857e74d1728b23f34f583b81350a27492edda58d5604e1/fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0", size = 23388, upload-time = "2023-12-28T14:02:04.512Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filelock"
|
name = "filelock"
|
||||||
version = "3.19.1"
|
version = "3.19.1"
|
||||||
@@ -1101,6 +1126,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/f2/5f/af7da8e6f1e42b52f44a24d08b8e4c726207434e2593732d39e7af5e7256/pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe", size = 1806478, upload-time = "2025-05-17T17:23:26.066Z" },
|
{ url = "https://files.pythonhosted.org/packages/f2/5f/af7da8e6f1e42b52f44a24d08b8e4c726207434e2593732d39e7af5e7256/pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe", size = 1806478, upload-time = "2025-05-17T17:23:26.066Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pyexecjs"
|
||||||
|
version = "1.5.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "six" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ba/8e/aedef81641c8dca6fd0fb7294de5bed9c45f3397d67fddf755c1042c2642/PyExecJS-1.5.1.tar.gz", hash = "sha256:34cc1d070976918183ff7bdc0ad71f8157a891c92708c00c5fbbff7a769f505c", size = 13344, upload-time = "2018-01-18T04:33:55.126Z" }
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pygments"
|
name = "pygments"
|
||||||
version = "2.19.2"
|
version = "2.19.2"
|
||||||
@@ -1267,6 +1301,18 @@ socks = [
|
|||||||
{ name = "pysocks" },
|
{ name = "pysocks" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rfc3339-validator"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "six" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rich"
|
name = "rich"
|
||||||
version = "13.9.4"
|
version = "13.9.4"
|
||||||
@@ -1373,6 +1419,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" },
|
{ url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "six"
|
||||||
|
version = "1.17.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sniffio"
|
name = "sniffio"
|
||||||
version = "1.3.1"
|
version = "1.3.1"
|
||||||
@@ -1514,9 +1569,10 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unshackle"
|
name = "unshackle"
|
||||||
version = "1.4.8"
|
version = "2.0.0"
|
||||||
source = { editable = "." }
|
source = { editable = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
{ name = "aiohttp-swagger3" },
|
||||||
{ name = "appdirs" },
|
{ name = "appdirs" },
|
||||||
{ name = "brotli" },
|
{ name = "brotli" },
|
||||||
{ name = "chardet" },
|
{ name = "chardet" },
|
||||||
@@ -1533,6 +1589,7 @@ dependencies = [
|
|||||||
{ name = "protobuf" },
|
{ name = "protobuf" },
|
||||||
{ name = "pycaption" },
|
{ name = "pycaption" },
|
||||||
{ name = "pycryptodomex" },
|
{ name = "pycryptodomex" },
|
||||||
|
{ name = "pyexecjs" },
|
||||||
{ name = "pyjwt" },
|
{ name = "pyjwt" },
|
||||||
{ name = "pymediainfo" },
|
{ name = "pymediainfo" },
|
||||||
{ name = "pymp4" },
|
{ name = "pymp4" },
|
||||||
@@ -1567,6 +1624,7 @@ dev = [
|
|||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
|
{ name = "aiohttp-swagger3", specifier = ">=0.9.0,<1" },
|
||||||
{ name = "appdirs", specifier = ">=1.4.4,<2" },
|
{ name = "appdirs", specifier = ">=1.4.4,<2" },
|
||||||
{ name = "brotli", specifier = ">=1.1.0,<2" },
|
{ name = "brotli", specifier = ">=1.1.0,<2" },
|
||||||
{ name = "chardet", specifier = ">=5.2.0,<6" },
|
{ name = "chardet", specifier = ">=5.2.0,<6" },
|
||||||
@@ -1583,6 +1641,7 @@ requires-dist = [
|
|||||||
{ name = "protobuf", specifier = ">=4.25.3,<5" },
|
{ name = "protobuf", specifier = ">=4.25.3,<5" },
|
||||||
{ name = "pycaption", specifier = ">=2.2.6,<3" },
|
{ name = "pycaption", specifier = ">=2.2.6,<3" },
|
||||||
{ name = "pycryptodomex", specifier = ">=3.20.0,<4" },
|
{ name = "pycryptodomex", specifier = ">=3.20.0,<4" },
|
||||||
|
{ name = "pyexecjs", specifier = ">=1.5.1" },
|
||||||
{ name = "pyjwt", specifier = ">=2.8.0,<3" },
|
{ name = "pyjwt", specifier = ">=2.8.0,<3" },
|
||||||
{ name = "pymediainfo", specifier = ">=6.1.0,<7" },
|
{ name = "pymediainfo", specifier = ">=6.1.0,<7" },
|
||||||
{ name = "pymp4", specifier = ">=1.4.0,<2" },
|
{ name = "pymp4", specifier = ">=1.4.0,<2" },
|
||||||
|
|||||||
Reference in New Issue
Block a user