55 Commits

Author SHA1 Message Date
Andy
a7bde29401 fix: only exclude forced subs when --forced-subs flag is not set
Previously, forced subtitles were incorrectly included when they matched
languages in the lang configuration, even without the --forced-subs flag.
This caused forced subs to appear when using language configs or the -l
parameter, which violated the expected behavior.
2025-10-15 22:39:44 +00:00
Andy
45902bba13 fix: use keyword arguments for Attachment constructor in font attachment
Fixes #24

When attaching fonts for ASS/SSA subtitles, the Attachment class was being
called with positional arguments instead of keyword arguments. This caused
the font Path object to be incorrectly interpreted, leading to an error:
"Invalid URL 'Arial (arial)': No scheme supplied."

Changed Attachment(font, name) to Attachment(path=font, name=name) to
explicitly pass arguments by keyword, ensuring proper parameter handling.
2025-10-13 16:43:31 +00:00
Andy
170a427af0 chore: bump version to 1.4.8 2025-10-08 21:30:01 +00:00
Andy
283736c57b revert: remove tinycss SyntaxWarning suppression and fix isort
Revert the warnings filter added in 2d5e807 as it didn't work as expected to suppress the tinycss SyntaxWarning. Also fix isort order in prd.py for pyplayready imports.
2025-10-08 21:26:26 +00:00
Andy
3f6a7e1f68 feat: add --exact-lang flag for precise language matching
New --exact-lang CLI flag that enables exact language code matching instead of fuzzy matching. This allows users to get specific regional variants without matching all related variants.

Examples:
- `-l es-419` normally matches all Spanish (es-ES, es-419, es-MX)
- `-l es-419 --exact-lang` matches ONLY es-419 (Latin American Spanish)

Fixes language detection issue where specific variants like es-419 (Latin American Spanish) would match all Spanish variants instead of just close regional variants.
2025-10-08 01:54:30 +00:00
Posi+ive
e9ba78cec3 Specify 'utf-8' encoding when opening temp file. (#22) 2025-10-05 16:42:21 -06:00
Andy
4cec942613 feat(vaults): add DecryptLabs API support to HTTP vault 2025-10-04 21:58:30 +00:00
Andy
d6f8e42f52 feat: add --no-mux flag to skip muxing tracks into container files
Add --no-mux command-line option to allow downloading individual track
files without muxing them into a container file (.mkv/.mka/.mks).
This addresses use cases where users want to download tracks separately,
such as:
- Downloading only subtitles as individual .srt/.vtt files
- Keeping audio/video/subtitle tracks as separate files
- Converting subtitle formats without creating container files
When --no-mux is used:
- Tracks are saved as individual files with descriptive suffixes
- Video tracks: filename.{codec}.ext
- Audio tracks: filename.{language}.{codec}.ext
- Subtitle tracks: filename.{language}.forced.sdh.ext (as applicable)
- Folder structure respects --no-folder flag

Resolves #21
2025-10-01 18:11:40 +00:00
Andy
c1e2d68e57 docs: add pysubs2 conversion_method to configuration documentation 2025-10-01 04:19:25 +00:00
Andy
acbbe734ab Merge branch 'feature/pysubs2-subtitle conversion' into main 2025-10-01 04:16:06 +00:00
Andy
6d0a210efb fix: (subtitle): Move pysubs2 to not be auto while in "testing" phase. 2025-10-01 04:11:22 +00:00
Sp5rky
26a94b7135 feat: add pysubs2 subtitle conversion with extended format support
- Add pysubs2 as default conversion method for 'auto' mode
- Add support for MicroDVD (.sub), MPL2 (.mpl2), and TMP (.tmp) formats
- Implement convert_with_pysubs2() method with fallback to standard conversion
- Update from_mime() to handle new subtitle format types
- Map all supported formats to pysubs2 format identifiers
- Update docstrings to reflect new capabilities
2025-09-30 14:12:51 -06:00
Sp5rky
2d5e807e7d fix: suppress tinycss SyntaxWarning by initializing filter before imports
Move warnings.filterwarnings() to the very top of __main__.py before any
other imports to ensure it takes effect before tinycss is loaded during
module discovery. This fixes the SyntaxWarning from the unmaintained
tinycss package (a dependency of subby).

refs #20
2025-09-30 13:55:21 -06:00
Sp5rky
ebe5012aeb fix: upgrade pyplayready to 0.6.3 and resolve import compatibility issues
- Upgrade pyplayready from >=0.6.0 to >=0.6.3
- Upgrade subby to 0.3.23 (commit 5a925c36) for lxml 6.x compatibility
- Upgrade lxml from 5.4.0 to 6.0.2
- Fix pyplayready exception imports to use root-level imports (compatible with both 0.6.0 and 0.6.3)
- Suppress SyntaxWarning from unmaintained tinycss dependency

The pyplayready package changed its API between versions:
- v0.6.0: exceptions in pyplayready.exceptions module
- v0.6.3: exceptions in pyplayready.misc.exceptions module
Using root-level imports (from pyplayready import ...) works with both versions.

ref #20
2025-09-30 13:50:17 -06:00
Sp5rky
06c4c02312 Merge branch 'main' of https://github.com/unshackle-dl/unshackle 2025-09-30 12:54:25 -06:00
Sp5rky
c77a8737ef fix: update lxml constraint and pyplayready import path
- Update lxml dependency to allow version 6.x (required by subby 0.3.23)
- Fix pyplayready exception import path (moved to misc.exceptions in 0.6.3)

fixes #17
2025-09-30 12:54:13 -06:00
Andy
e0154741f0 fix: dl.py
old leftover bug, sub-format was default to SRT
2025-09-30 05:03:22 +00:00
TPD94
ceea4ac827 Fixed RemoteCDM, added curl_cffi to instance checks. (#18)
* Fix remoteCDM, add curl_cffi to instance check

* Update dash.py
2025-09-29 20:57:42 -06:00
Andy
e92e5c2ba3 feat: add AC4 codec support in Audio class and update mime/profile handling 2025-09-26 04:42:06 +00:00
Andy
30314fdb46 Fix missing movie/episode changes for last commit 2025-09-26 01:41:07 +00:00
Andy
a2c6798fe6 fix: optimize audio track sorting by grouping descriptive tracks and sorting by bitrate, fixes bug that does not identify ATMOS or DD+ as the highest quality available in filenaming. 2025-09-25 23:21:35 +00:00
Andy
bc26bf3046 feat: update changelog for version 1.4.7 2025-09-25 06:29:46 +00:00
Andy
35efdbff6d feat: add curl_cffi session support with browser impersonation
Add new session utility with curl_cffi support for anti-bot protection
Update all manifest parsers (DASH, HLS, ISM, M3U8) to accept curl_cffi sessions
Add browser impersonation support (Chrome, Firefox, Safari)
Fix cookie handling compatibility between requests and curl_cffi
Suppress HTTPS proxy warnings for better UX
Maintain full backward compatibility with requests.Session
2025-09-25 06:27:14 +00:00
Andy
63b7a49c1a feat: Add decrypt_labs_api_key to Config initialization and change duplicate track log level to debug 2025-09-25 06:22:50 +00:00
Andy
98ecf6f876 feat: Add download retry count option to download function 2025-09-23 01:32:00 +00:00
Andy
5df6914536 feat: Add options for required subtitles and best available quality in download command 2025-09-23 01:28:55 +00:00
Andy
c1df074965 Change new dynamic CDM selection text to be in Debug only 2025-09-14 04:25:57 +00:00
Andy
da60a396dd Fix: Prevent KeyError when reusing remote CDMs in dynamic selection
Creates a copy of the CDM dictionary before modification to prevent the original configuration from being mutated, allowing the same CDM to be selected multiple times within a session without errors.
2025-09-14 01:14:01 +00:00
Andy
a99a391395 chore: bump version to 1.4.6 and update changelog 2025-09-13 04:01:45 +00:00
Andy
ed32939d83 feat: Add quality-based CDM selection for dynamic CDM switching
Implements dynamic CDM selection based on video track resolution to optimize
CDM usage. Automatically selects appropriate security level (L3/SL2K for ≤1080p, L1/SL3K for >1080p) based on content requirements.

Key Features:
- Quality-based CDM configuration with threshold operators (>=, >, <=, <)
- Pre-selection based on highest quality across all video tracks
- Maintains backward compatibility with existing CDM configurations
- Single CDM per session to avoid inefficient switching
2025-09-13 03:59:13 +00:00
Andy
4006593a8a Fix: Implement lazy DRM loading for multi-track key retrieval
- Add deferred DRM loading to M3U8 parser to mark tracks for later processing
- Optimize prepare_drm to load DRM just-in-time during download process
2025-09-12 06:38:14 +00:00
Andy
307be4549b Fix vault caching count and NoneType iteration issues
- Fix 'NoneType' object is not iterable error in decrypt_labs_remote_cdm
- Fix vault count display showing 0/3 instead of actual successful vault count
2025-09-10 06:33:46 +00:00
Andy
a82828768d feat: automatic audio language metadata for embedded audio tracks
- Add intelligent embedded audio language detection at mux stage
- Automatically set audio language metadata when no separate audio tracks exist
- Respect user flags (-V, --no-audio) to avoid unnecessary processing
- Smart video track selection based on title language with fallbacks
- Improved default track selection to prioritize title language matches
- Enhanced FFmpeg repackaging with audio stream metadata injection
- Works automatically for all services without service-specific code
2025-09-10 00:57:14 +00:00
Andy
d18a5de0d0 fix: Improve import ordering and code formatting
- Reorder imports in decrypt_labs_remote_cdm.py for better organization
- Clean up trailing whitespace in SQLite.py
2025-09-10 00:53:52 +00:00
Andy
04b540b363 fix: Resolve service name transmission and vault case sensitivity issues
Fixed DecryptLabsRemoteCDM sending 'generic' instead of proper service names and added case-insensitive vault lookups for SQLite/MySQL vaults. Also added local vault integration to DecryptLabsRemoteCDM
2025-09-09 18:53:11 +00:00
Andy
6137146705 chore: bump version to 1.4.5 and update changelog
- Update version from 1.4.4 to 1.4.5 in core/__init__.py
- Add comprehensive changelog entry for v1.4.5 with all changes since 1.4.4
- Include enhanced CDM support, caching improvements, and bug fixes
2025-09-09 03:53:42 +00:00
Andy
859d09693c feat(cdm): Update User-Agent to use dynamic version
- Replace hardcoded version "1.0" with dynamic version import in DecryptLabsRemoteCDM User-Agent header.
2025-09-09 03:49:01 +00:00
Andy
5f022635cb feat(cdm): Optimize get_cached_keys_if_exists for L1/L2 devices
- Always send get_cached_keys_if_exists=True for L1/L2 devices to leverage
- the API's automatic caching optimization. This reduces unnecessary license
- requests by prioritizing cached keys for these security levels.
2025-09-06 22:10:35 +00:00
Andy
ad66502c0c feat(cdm): Add fallback to Widevine common cert for L1 devices
- Use default Widevine common privacy certificate when no service certificate is provided for L1 devices
- Add get_widevine_service_certificate method to EXAMPLE service for config-based certificates
- Improve certificate handling with more descriptive return messages
2025-09-06 20:30:11 +00:00
Andy
e462f07b7a Merge branch 'main' of https://github.com/unshackle-dl/unshackle 2025-09-06 19:39:39 +00:00
Andy
83b600e999 fix(cdm): Clean up session data when retrieving cached keys
Remove decrypt_labs_session_id and challenge from session when cached keys exist but there are missing kids, ensuring clean state for subsequent requests.
2025-09-06 19:38:54 +00:00
Andy
ea8a7b00c9 fix(cdm): Clean up session data when retrieving cached keys
Remove decrypt_labs_session_id and challenge from session when cached keys exist but there are missing kids, ensuring clean state for subsequent requests.
2025-09-06 18:52:20 +00:00
Andy
16ee4175a4 feat(dl): Truncate PSSH string for display in non-debug mode
* Added `_truncate_pssh_for_display` method to limit the width of PSSH strings shown in the console.
* Ensures better readability of DRM information by truncating long strings.
2025-09-05 02:15:10 +00:00
Andy
f722ec69b6 fix(tags): 🐛 Fix formatting issues 2025-09-03 14:51:22 +00:00
Andy
2330297ea4 feat(kv): Enhance vault loading and key copying logic
* Implemented `_load_vaults` function to load and validate vaults by name.
* Improved `_copy_service_data` to handle key copying with better logging and error handling.
* Updated `copy` command to utilize the new vault loading function and streamline the process.
* Enhanced key insertion logic in MySQL and SQLite vaults to avoid inserting existing keys.
2025-09-03 14:50:51 +00:00
Andy
86bb162868 feat(tags): Enhance tag handling for TV shows and movies from Simkl data
Fixes #15
2025-09-02 22:01:44 +00:00
Andy
501cfd68e8 fix(cdm): Add error message for missing service certificate in CDM session 2025-09-02 19:16:34 +00:00
Andy
76fb2eea95 feat: implement intelligent caching system for CDM license requests 2025-09-02 18:48:34 +00:00
Andy
ea5ec40bcd Merge branch 'main' of https://github.com/unshackle-dl/unshackle 2025-09-02 17:34:12 +00:00
Andy
329850b043 feat(cdm): Enhance key retrieval logic and improve cached keys handling 2025-09-02 17:33:31 +00:00
Andy
73595f3b50 feat(cdm): Enhance key retrieval logic and improve cached keys handling 2025-09-02 17:23:02 +00:00
Andy
1e82283133 fix(tags): Fix import order. 2025-09-02 04:13:43 +00:00
Andy
ab13dde9d2 feat(changelog): Update changelog for version 1.4.4 with enhanced CDM support, configuration options, and various improvements 2025-09-02 04:10:28 +00:00
Andy
9fd0895128 feat(cdm): Refactor DecryptLabsRemoteCDM full support for Widevine/Playready and ChromeCDM 2025-09-02 04:02:52 +00:00
Andy
ed744205ad fix(tags): 🐛 Fix Matroska tag compliance with official specification
- Update IMDB tags to use ID only (tt123456) instead of URLs
  - Update TMDB tags to use prefix/id format (movie/123456, tv/123456)
  - Update TVDB tags to use numeric ID only
  - Add XML escaping for tag values
  - Fix XML declaration to use double quotes

Fixes #15
2025-09-01 21:02:08 +00:00
34 changed files with 2051 additions and 1044 deletions

View File

@@ -5,6 +5,196 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.4.8] - 2025-10-08
### Added
- **Exact Language Matching**: New `--exact-lang` flag for precise language matching
- Enables strict language code matching without fallbacks
- **No-Mux Flag**: New `--no-mux` flag to skip muxing tracks into container files
- Useful for keeping individual track files separate
- **DecryptLabs API Integration for HTTP Vault**: Enhanced vault support
- Added DecryptLabs API support to HTTP vault for improved key retrieval
- **AC4 Audio Codec Support**: Enhanced audio format handling
- Added AC4 codec support in Audio class with updated mime/profile handling
- **pysubs2 Subtitle Conversion**: Extended subtitle format support
- Added pysubs2 subtitle conversion with extended format support
- Configurable conversion method in configuration
### Changed
- **Audio Track Sorting**: Optimized audio track selection logic
- Improved audio track sorting by grouping descriptive tracks and sorting by bitrate
- Better identification of ATMOS and DD+ as highest quality for filenaming
- **pyplayready Update**: Upgraded to version 0.6.3
- Updated import paths to resolve compatibility issues
- Fixed lxml constraints for better dependency management
- **pysubs2 Conversion Method**: Moved from auto to manual configuration
- pysubs2 no longer auto-selected during testing phase
### Fixed
- **Remote CDM**: Fixed curl_cffi compatibility
- Added curl_cffi to instance checks in RemoteCDM
- **Temporary File Handling**: Improved encoding handling
- Specified UTF-8 encoding when opening temporary files
### Reverted
- **tinycss SyntaxWarning Suppression**: Removed ineffective warning filter
- Reverted warnings filter that didn't work as expected for suppressing tinycss warnings
## [1.4.7] - 2025-09-25
### Added
- **curl_cffi Session Support**: Enhanced anti-bot protection with browser impersonation
- Added new session utility with curl_cffi support for bypassing anti-bot measures
- Browser impersonation support for Chrome, Firefox, and Safari user agents
- Full backward compatibility with requests.Session maintained
- Suppressed HTTPS proxy warnings for improved user experience
- **Download Retry Functionality**: Configurable retry mechanism for failed downloads
- Added retry count option to download function for improved reliability
- **Subtitle Requirements Options**: Enhanced subtitle download control
- Added options for required subtitles in download command
- Better control over subtitle track selection and requirements
- **Quality Selection Enhancement**: Improved quality selection options
- Added best available quality option in download command for optimal track selection
- **DecryptLabs API Integration**: Enhanced remote CDM configuration
- Added decrypt_labs_api_key to Config initialization for better API integration
### Changed
- **Manifest Parser Updates**: Enhanced compatibility across all parsers
- Updated DASH, HLS, ISM, and M3U8 parsers to accept curl_cffi sessions
- Improved cookie handling compatibility between requests and curl_cffi
- **Logging Improvements**: Reduced log verbosity for better user experience
- Changed duplicate track log level to debug to reduce console noise
- Dynamic CDM selection messages moved to debug-only output
### Fixed
- **Remote CDM Reuse**: Fixed KeyError in dynamic CDM selection
- Prevents KeyError when reusing remote CDMs in dynamic selection process
- Creates copy of CDM dictionary before modification to prevent configuration mutation
- Allows same CDM to be selected multiple times within session without errors
## [1.4.6] - 2025-09-13
### Added
- **Quality-Based CDM Selection**: Dynamic CDM selection based on video resolution
- Automatically selects appropriate CDM (L3/L1) based on video track quality
- Supports quality thresholds in configuration (>=, >, <=, <, exact match)
- Pre-selects optimal CDM based on highest quality across all video tracks
- Maintains backward compatibility with existing CDM configurations
- **Automatic Audio Language Metadata**: Intelligent embedded audio language detection
- Automatically sets audio language metadata when no separate audio tracks exist
- Smart video track selection based on title language with fallbacks
- Enhanced FFmpeg repackaging with audio stream metadata injection
- **Lazy DRM Loading**: Deferred DRM loading for multi-track key retrieval optimization
- Add deferred DRM loading to M3U8 parser to mark tracks for later processing
- Just-in-time DRM loading during download process for better performance
### Changed
- **Enhanced CDM Management**: Improved CDM switching logic for multi-quality downloads
- CDM selection now based on highest quality track to avoid inefficient switching
- Quality-based selection only within same DRM type (Widevine-to-Widevine, PlayReady-to-PlayReady)
- Single CDM used per session for better performance and reliability
### Fixed
- **Vault Caching Issues**: Fixed vault count display and NoneType iteration errors
- Fix 'NoneType' object is not iterable error in DecryptLabsRemoteCDM
- Fix vault count display showing 0/3 instead of actual successful vault count
- **Service Name Transmission**: Resolved DecryptLabsRemoteCDM service name issues
- Fixed DecryptLabsRemoteCDM sending 'generic' instead of proper service names
- Added case-insensitive vault lookups for SQLite/MySQL vaults
- Added local vault integration to DecryptLabsRemoteCDM
- **Import Organization**: Improved import ordering and code formatting
- Reorder imports in decrypt_labs_remote_cdm.py for better organization
- Clean up trailing whitespace in vault files
### Configuration
- **New CDM Configuration Format**: Extended `cdm:` section supports quality-based selection
```yaml
cdm:
SERVICE_NAME:
"<=1080": l3_cdm_name
">1080": l1_cdm_name
default: l3_cdm_name
```
## [1.4.5] - 2025-09-09
### Added
- **Enhanced CDM Key Caching**: Improved key caching and session management for L1/L2 devices
- Optimized `get_cached_keys_if_exists` functionality for better performance with L1/L2 devices
- Enhanced cached key retrieval logic with improved session handling
- **Widevine Common Certificate Fallback**: Added fallback to Widevine common certificate for L1 devices
- Improved compatibility for L1 devices when service certificates are unavailable
- **Enhanced Vault Loading**: Improved vault loading and key copying logic
- Better error handling and key management in vault operations
- **PSSH Display Optimization**: Truncated PSSH string display in non-debug mode for cleaner output
- **CDM Error Messaging**: Added error messages for missing service certificates in CDM sessions
### Changed
- **Dynamic Version Headers**: Updated User-Agent headers to use dynamic version strings
- DecryptLabsRemoteCDM now uses dynamic version import instead of hardcoded version
- **Intelligent CDM Caching**: Implemented intelligent caching system for CDM license requests
- Enhanced caching logic reduces redundant license requests and improves performance
- **Enhanced Tag Handling**: Improved tag handling for TV shows and movies from Simkl data
- Better metadata processing and formatting for improved media tagging
### Fixed
- **CDM Session Management**: Clean up session data when retrieving cached keys
- Remove decrypt_labs_session_id and challenge from session when cached keys exist but there are missing kids
- Ensures clean state for subsequent requests and prevents session conflicts
- **Tag Formatting**: Fixed formatting issues in tag processing
- **Import Order**: Fixed import order issues in tags module
## [1.4.4] - 2025-09-02
### Added
- **Enhanced DecryptLabs CDM Support**: Comprehensive remote CDM functionality
- Full support for Widevine, PlayReady, and ChromeCDM through DecryptLabsRemoteCDM
- Enhanced session management and caching support for remote WV/PR operations
- Support for cached keys and improved license handling
- New CDM configurations for Chrome and PlayReady devices with updated User-Agent and service certificate
- **Advanced Configuration Options**: New device and language preferences
- Added configuration options for device certificate status list
- Enhanced language preference settings
### Changed
- **DRM Decryption Enhancements**: Streamlined decryption process
- Simplified decrypt method by removing unused parameter and streamlined logic
- Improved DecryptLabs CDM configurations with better device support
### Fixed
- **Matroska Tag Compliance**: Enhanced media container compatibility
- Fixed Matroska tag compliance with official specification
- **Application Branding**: Cleaned up version display
- Removed old devine version reference from banner to avoid developer confusion
- Updated branding while maintaining original GNU license compliance
- **IP Information Handling**: Improved geolocation services
- Enhanced get_ip_info functionality with better failover handling
- Added support for 429 error handling and multiple API provider fallback
- Implemented cached IP info retrieval with fallback tester to avoid rate limiting
- **Dependencies**: Streamlined package requirements
- Removed unnecessary data extra requirement from langcodes
### Removed
- Deprecated version references in application banner for clarity
## [1.4.3] - 2025-08-20
### Added

View File

@@ -656,11 +656,12 @@ Control subtitle conversion and SDH (hearing-impaired) stripping behavior.
- `subby`: Always use subby with CommonIssuesFixer.
- `subtitleedit`: Prefer SubtitleEdit when available; otherwise fallback to standard conversion.
- `pycaption`: Use only the pycaption library (no SubtitleEdit, no subby).
- `pysubs2`: Use pysubs2 library (supports SRT, SSA, ASS, WebVTT, TTML, SAMI, MicroDVD, MPL2, TMP formats).
- `sdh_method`: How to strip SDH cues. Default: `auto`.
- `auto`: Try subby for SRT first, then SubtitleEdit, then filter-subs.
- `subby`: Use subbys SDHStripper (SRT only).
- `subtitleedit`: Use SubtitleEdits RemoveTextForHI when available.
- `subby`: Use subby's SDHStripper (SRT only).
- `subtitleedit`: Use SubtitleEdit's RemoveTextForHI when available.
- `filter-subs`: Use the subtitle-filter library.
Example:

View File

@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "unshackle"
version = "1.4.3"
version = "1.4.8"
description = "Modular Movie, TV, and Music Archival Software."
authors = [{ name = "unshackle team" }]
requires-python = ">=3.10,<3.13"
@@ -33,7 +33,7 @@ dependencies = [
"crccheck>=1.3.0,<2",
"jsonpickle>=3.0.4,<4",
"langcodes>=3.4.0,<4",
"lxml>=5.2.1,<6",
"lxml>=5.2.1,<7",
"pproxy>=2.7.9,<3",
"protobuf>=4.25.3,<5",
"pycaption>=2.2.6,<3",
@@ -54,10 +54,11 @@ dependencies = [
"urllib3>=2.2.1,<3",
"chardet>=5.2.0,<6",
"curl-cffi>=0.7.0b4,<0.8",
"pyplayready>=0.6.0,<0.7",
"pyplayready>=0.6.3,<0.7",
"httpx>=0.28.1,<0.29",
"cryptography>=45.0.0",
"subby",
"pysubs2>=1.7.0,<2",
]
[project.urls]
@@ -113,4 +114,4 @@ no_implicit_optional = true
[tool.uv.sources]
unshackle = { workspace = true }
subby = { git = "https://github.com/vevv/subby.git" }
subby = { git = "https://github.com/vevv/subby.git", rev = "5a925c367ffb3f5e53fd114ae222d3be1fdff35d" }

View File

@@ -66,6 +66,18 @@ from unshackle.core.vaults import Vaults
class dl:
@staticmethod
def _truncate_pssh_for_display(pssh_string: str, drm_type: str) -> str:
"""Truncate PSSH string for display when not in debug mode."""
if logging.root.level == logging.DEBUG or not pssh_string:
return pssh_string
max_width = console.width - len(drm_type) - 12
if len(pssh_string) <= max_width:
return pssh_string
return pssh_string[: max_width - 3] + "..."
@click.command(
short_help="Download, Decrypt, and Mux tracks for titles from a Service.",
cls=Services,
@@ -161,7 +173,19 @@ class dl:
help="Language wanted for Audio, overrides -l/--lang for audio tracks.",
)
@click.option("-sl", "--s-lang", type=LANGUAGE_RANGE, default=["all"], help="Language wanted for Subtitles.")
@click.option(
"--require-subs",
type=LANGUAGE_RANGE,
default=[],
help="Required subtitle languages. Downloads all subtitles only if these languages exist. Cannot be used with --s-lang.",
)
@click.option("-fs", "--forced-subs", is_flag=True, default=False, help="Include forced subtitle tracks.")
@click.option(
"--exact-lang",
is_flag=True,
default=False,
help="Use exact language matching (no variants). With this flag, -l es-419 matches ONLY es-419, not es-ES or other variants.",
)
@click.option(
"--proxy",
type=str,
@@ -195,7 +219,7 @@ class dl:
@click.option(
"--sub-format",
type=SubtitleCodecChoice(Subtitle.Codec),
default="srt",
default=None,
help="Set Output Subtitle Format, only converting if necessary.",
)
@click.option("-V", "--video-only", is_flag=True, default=False, help="Only download video tracks.")
@@ -240,6 +264,7 @@ class dl:
@click.option(
"--no-source", is_flag=True, default=False, help="Disable the source tag from the output file name and path."
)
@click.option("--no-mux", is_flag=True, default=False, help="Do not mux tracks into a container file.")
@click.option(
"--workers",
type=int,
@@ -251,6 +276,13 @@ class dl:
@click.option(
"--reset-cache", "reset_cache", is_flag=True, default=False, help="Clear title cache before fetching."
)
@click.option(
"--best-available",
"best_available",
is_flag=True,
default=False,
help="Continue with best available quality if requested resolutions are not available.",
)
@click.pass_context
def cli(ctx: click.Context, **kwargs: Any) -> dl:
return dl(ctx, **kwargs)
@@ -310,6 +342,16 @@ class dl:
vault_copy = vault.copy()
del vault_copy["type"]
if vault_type.lower() == "api" and "decrypt_labs" in vault_name.lower():
if "token" not in vault_copy or not vault_copy["token"]:
if config.decrypt_labs_api_key:
vault_copy["token"] = config.decrypt_labs_api_key
else:
self.log.warning(
f"No token provided for DecryptLabs vault '{vault_name}' and no global "
"decrypt_labs_api_key configured"
)
if vault_type.lower() == "sqlite":
try:
self.vaults.load_critical(vault_type, **vault_copy)
@@ -345,7 +387,10 @@ class dl:
sys.exit(1)
if self.cdm:
if hasattr(self.cdm, "device_type") and self.cdm.device_type.name in ["ANDROID", "CHROME"]:
if isinstance(self.cdm, DecryptLabsRemoteCDM):
drm_type = "PlayReady" if self.cdm.is_playready else "Widevine"
self.log.info(f"Loaded {drm_type} Remote CDM: DecryptLabs (L{self.cdm.security_level})")
elif hasattr(self.cdm, "device_type") and self.cdm.device_type.name in ["ANDROID", "CHROME"]:
self.log.info(f"Loaded Widevine CDM: {self.cdm.system_id} (L{self.cdm.security_level})")
else:
self.log.info(
@@ -427,7 +472,9 @@ class dl:
v_lang: list[str],
a_lang: list[str],
s_lang: list[str],
require_subs: list[str],
forced_subs: bool,
exact_lang: bool,
sub_format: Optional[Subtitle.Codec],
video_only: bool,
audio_only: bool,
@@ -445,8 +492,10 @@ class dl:
no_proxy: bool,
no_folder: bool,
no_source: bool,
no_mux: bool,
workers: Optional[int],
downloads: int,
best_available: bool,
*_: Any,
**__: Any,
) -> None:
@@ -454,6 +503,10 @@ class dl:
self.search_source = None
start_time = time.time()
if require_subs and s_lang != ["all"]:
self.log.error("--require-subs and --s-lang cannot be used together")
sys.exit(1)
# Check if dovi_tool is available when hybrid mode is requested
if any(r == Video.Range.HYBRID for r in range_):
from unshackle.core.binaries import DoviTool
@@ -663,7 +716,9 @@ class dl:
else:
if language not in processed_video_lang:
processed_video_lang.append(language)
title.tracks.videos = title.tracks.by_language(title.tracks.videos, processed_video_lang)
title.tracks.videos = title.tracks.by_language(
title.tracks.videos, processed_video_lang, exact_match=exact_lang
)
if not title.tracks.videos:
self.log.error(f"There's no {processed_video_lang} Video Track...")
sys.exit(1)
@@ -688,8 +743,14 @@ class dl:
res_list = ", ".join([f"{x}p" for x in missing_resolutions[:-1]]) + " or "
res_list = f"{res_list}{missing_resolutions[-1]}p"
plural = "s" if len(missing_resolutions) > 1 else ""
self.log.error(f"There's no {res_list} Video Track{plural}...")
sys.exit(1)
if best_available:
self.log.warning(
f"There's no {res_list} Video Track{plural}, continuing with available qualities..."
)
else:
self.log.error(f"There's no {res_list} Video Track{plural}...")
sys.exit(1)
# choose best track by range and quality
if any(r == Video.Range.HYBRID for r in range_):
@@ -725,23 +786,41 @@ class dl:
title.tracks.videos = selected_videos
# filter subtitle tracks
if s_lang and "all" not in s_lang:
if require_subs:
missing_langs = [
lang
for lang in require_subs
if not any(is_close_match(lang, [sub.language]) for sub in title.tracks.subtitles)
]
if missing_langs:
self.log.error(f"Required subtitle language(s) not found: {', '.join(missing_langs)}")
sys.exit(1)
self.log.info(
f"Required languages found ({', '.join(require_subs)}), downloading all available subtitles"
)
elif s_lang and "all" not in s_lang:
from unshackle.core.utilities import is_exact_match
match_func = is_exact_match if exact_lang else is_close_match
missing_langs = [
lang_
for lang_ in s_lang
if not any(is_close_match(lang_, [sub.language]) for sub in title.tracks.subtitles)
if not any(match_func(lang_, [sub.language]) for sub in title.tracks.subtitles)
]
if missing_langs:
self.log.error(", ".join(missing_langs) + " not found in tracks")
sys.exit(1)
title.tracks.select_subtitles(lambda x: is_close_match(x.language, s_lang))
title.tracks.select_subtitles(lambda x: match_func(x.language, s_lang))
if not title.tracks.subtitles:
self.log.error(f"There's no {s_lang} Subtitle Track...")
sys.exit(1)
if not forced_subs:
title.tracks.select_subtitles(lambda x: not x.forced or is_close_match(x.language, lang))
title.tracks.select_subtitles(lambda x: not x.forced)
# filter audio tracks
# might have no audio tracks if part of the video, e.g. transport stream hls
@@ -799,7 +878,7 @@ class dl:
elif "all" not in processed_lang:
per_language = 1
title.tracks.audio = title.tracks.by_language(
title.tracks.audio, processed_lang, per_language=per_language
title.tracks.audio, processed_lang, per_language=per_language, exact_match=exact_lang
)
if not title.tracks.audio:
self.log.error(f"There's no {processed_lang} Audio Track, cannot continue...")
@@ -847,9 +926,40 @@ class dl:
selected_tracks, tracks_progress_callables = title.tracks.tree(add_progress=True)
for track in title.tracks:
if hasattr(track, "needs_drm_loading") and track.needs_drm_loading:
track.load_drm_if_needed(service)
download_table = Table.grid()
download_table.add_row(selected_tracks)
video_tracks = title.tracks.videos
if video_tracks:
highest_quality = max((track.height for track in video_tracks if track.height), default=0)
if highest_quality > 0:
if isinstance(self.cdm, (WidevineCdm, DecryptLabsRemoteCDM)) and not (
isinstance(self.cdm, DecryptLabsRemoteCDM) and self.cdm.is_playready
):
quality_based_cdm = self.get_cdm(
self.service, self.profile, drm="widevine", quality=highest_quality
)
if quality_based_cdm and quality_based_cdm != self.cdm:
self.log.debug(
f"Pre-selecting Widevine CDM based on highest quality {highest_quality}p across all video tracks"
)
self.cdm = quality_based_cdm
elif isinstance(self.cdm, (PlayReadyCdm, DecryptLabsRemoteCDM)) and (
isinstance(self.cdm, DecryptLabsRemoteCDM) and self.cdm.is_playready
):
quality_based_cdm = self.get_cdm(
self.service, self.profile, drm="playready", quality=highest_quality
)
if quality_based_cdm and quality_based_cdm != self.cdm:
self.log.debug(
f"Pre-selecting PlayReady CDM based on highest quality {highest_quality}p across all video tracks"
)
self.cdm = quality_based_cdm
dl_start_time = time.time()
if skip_dl:
@@ -874,7 +984,12 @@ class dl:
),
licence=partial(
service.get_playready_license
if isinstance(self.cdm, PlayReadyCdm)
if (
isinstance(self.cdm, PlayReadyCdm)
or (
isinstance(self.cdm, DecryptLabsRemoteCDM) and self.cdm.is_playready
)
)
and hasattr(service, "get_playready_license")
else service.get_widevine_license,
title=title,
@@ -991,11 +1106,11 @@ class dl:
if family_dir.exists():
fonts = family_dir.glob("*.*tf")
for font in fonts:
title.tracks.add(Attachment(font, f"{font_name} ({font.stem})"))
title.tracks.add(Attachment(path=font, name=f"{font_name} ({font.stem})"))
font_count += 1
elif fonts_from_system:
for font in fonts_from_system:
title.tracks.add(Attachment(font, f"{font_name} ({font.stem})"))
title.tracks.add(Attachment(path=font, name=f"{font_name} ({font.stem})"))
font_count += 1
else:
self.log.warning(f"Subtitle uses font [text2]{font_name}[/] but it could not be found...")
@@ -1039,7 +1154,12 @@ class dl:
muxed_paths = []
if isinstance(title, (Movie, Episode)):
if no_mux:
# Skip muxing, handle individual track files
for track in title.tracks:
if track.path and track.path.exists():
muxed_paths.append(track.path)
elif isinstance(title, (Movie, Episode)):
progress = Progress(
TextColumn("[progress.description]{task.description}"),
SpinnerColumn(finished_text=""),
@@ -1127,8 +1247,13 @@ class dl:
with Live(Padding(progress, (0, 5, 1, 5)), console=console):
for task_id, task_tracks in multiplex_tasks:
progress.start_task(task_id) # TODO: Needed?
audio_expected = not video_only and not no_audio
muxed_path, return_code, errors = task_tracks.mux(
str(title), progress=partial(progress.update, task_id=task_id), delete=False
str(title),
progress=partial(progress.update, task_id=task_id),
delete=False,
audio_expected=audio_expected,
title_language=title.language,
)
muxed_paths.append(muxed_path)
if return_code >= 2:
@@ -1153,19 +1278,65 @@ class dl:
# dont mux
muxed_paths.append(title.tracks.audio[0].path)
for muxed_path in muxed_paths:
media_info = MediaInfo.parse(muxed_path)
if no_mux:
# Handle individual track files without muxing
final_dir = config.directories.downloads
final_filename = title.get_filename(media_info, show_service=not no_source)
if not no_folder and isinstance(title, (Episode, Song)):
final_dir /= title.get_filename(media_info, show_service=not no_source, folder=True)
# Create folder based on title
# Use first available track for filename generation
sample_track = title.tracks.videos[0] if title.tracks.videos else (
title.tracks.audio[0] if title.tracks.audio else (
title.tracks.subtitles[0] if title.tracks.subtitles else None
)
)
if sample_track and sample_track.path:
media_info = MediaInfo.parse(sample_track.path)
final_dir /= title.get_filename(media_info, show_service=not no_source, folder=True)
final_dir.mkdir(parents=True, exist_ok=True)
final_path = final_dir / f"{final_filename}{muxed_path.suffix}"
shutil.move(muxed_path, final_path)
tags.tag_file(final_path, title, self.tmdb_id)
for track_path in muxed_paths:
# Generate appropriate filename for each track
media_info = MediaInfo.parse(track_path)
base_filename = title.get_filename(media_info, show_service=not no_source)
# Add track type suffix to filename
track = next((t for t in title.tracks if t.path == track_path), None)
if track:
if isinstance(track, Video):
track_suffix = f".{track.codec.name if hasattr(track.codec, 'name') else 'video'}"
elif isinstance(track, Audio):
lang_suffix = f".{track.language}" if track.language else ""
track_suffix = f"{lang_suffix}.{track.codec.name if hasattr(track.codec, 'name') else 'audio'}"
elif isinstance(track, Subtitle):
lang_suffix = f".{track.language}" if track.language else ""
forced_suffix = ".forced" if track.forced else ""
sdh_suffix = ".sdh" if track.sdh else ""
track_suffix = f"{lang_suffix}{forced_suffix}{sdh_suffix}"
else:
track_suffix = ""
final_path = final_dir / f"{base_filename}{track_suffix}{track_path.suffix}"
else:
final_path = final_dir / f"{base_filename}{track_path.suffix}"
shutil.move(track_path, final_path)
self.log.debug(f"Saved: {final_path.name}")
else:
# Handle muxed files
for muxed_path in muxed_paths:
media_info = MediaInfo.parse(muxed_path)
final_dir = config.directories.downloads
final_filename = title.get_filename(media_info, show_service=not no_source)
if not no_folder and isinstance(title, (Episode, Song)):
final_dir /= title.get_filename(media_info, show_service=not no_source, folder=True)
final_dir.mkdir(parents=True, exist_ok=True)
final_path = final_dir / f"{final_filename}{muxed_path.suffix}"
shutil.move(muxed_path, final_path)
tags.tag_file(final_path, title, self.tmdb_id)
title_dl_time = time_elapsed_since(dl_start_time)
console.print(
@@ -1201,21 +1372,43 @@ class dl:
if not drm:
return
if isinstance(drm, Widevine) and not isinstance(self.cdm, WidevineCdm):
self.cdm = self.get_cdm(self.service, self.profile, drm="widevine")
elif isinstance(drm, PlayReady) and not isinstance(self.cdm, PlayReadyCdm):
self.cdm = self.get_cdm(self.service, self.profile, drm="playready")
if isinstance(track, Video) and track.height:
pass
if isinstance(drm, Widevine):
if not isinstance(self.cdm, (WidevineCdm, DecryptLabsRemoteCDM)) or (
isinstance(self.cdm, DecryptLabsRemoteCDM) and self.cdm.is_playready
):
widevine_cdm = self.get_cdm(self.service, self.profile, drm="widevine")
if widevine_cdm:
self.log.info("Switching to Widevine CDM for Widevine content")
self.cdm = widevine_cdm
elif isinstance(drm, PlayReady):
if not isinstance(self.cdm, (PlayReadyCdm, DecryptLabsRemoteCDM)) or (
isinstance(self.cdm, DecryptLabsRemoteCDM) and not self.cdm.is_playready
):
playready_cdm = self.get_cdm(self.service, self.profile, drm="playready")
if playready_cdm:
self.log.info("Switching to PlayReady CDM for PlayReady content")
self.cdm = playready_cdm
if isinstance(drm, Widevine):
with self.DRM_TABLE_LOCK:
cek_tree = Tree(Text.assemble(("Widevine", "cyan"), (f"({drm.pssh.dumps()})", "text"), overflow="fold"))
pssh_display = self._truncate_pssh_for_display(drm.pssh.dumps(), "Widevine")
cek_tree = Tree(Text.assemble(("Widevine", "cyan"), (f"({pssh_display})", "text"), overflow="fold"))
pre_existing_tree = next(
(x for x in table.columns[0].cells if isinstance(x, Tree) and x.label == cek_tree.label), None
)
if pre_existing_tree:
cek_tree = pre_existing_tree
for kid in drm.kids:
need_license = False
all_kids = list(drm.kids)
if track_kid and track_kid not in all_kids:
all_kids.append(track_kid)
for kid in all_kids:
if kid in drm.content_keys:
continue
@@ -1235,46 +1428,51 @@ class dl:
if not pre_existing_tree:
table.add_row(cek_tree)
raise Widevine.Exceptions.CEKNotFound(msg)
else:
need_license = True
if kid not in drm.content_keys and not vaults_only:
from_vaults = drm.content_keys.copy()
if kid not in drm.content_keys and cdm_only:
need_license = True
try:
if self.service == "NF":
drm.get_NF_content_keys(cdm=self.cdm, licence=licence, certificate=certificate)
else:
drm.get_content_keys(cdm=self.cdm, licence=licence, certificate=certificate)
except Exception as e:
if isinstance(e, (Widevine.Exceptions.EmptyLicense, Widevine.Exceptions.CEKNotFound)):
msg = str(e)
else:
msg = f"An exception occurred in the Service's license function: {e}"
cek_tree.add(f"[logging.level.error]{msg}")
if not pre_existing_tree:
table.add_row(cek_tree)
raise e
if need_license and not vaults_only:
from_vaults = drm.content_keys.copy()
for kid_, key in drm.content_keys.items():
if key == "0" * 32:
key = f"[red]{key}[/]"
label = f"[text2]{kid_.hex}:{key}{is_track_kid}"
if not any(f"{kid_.hex}:{key}" in x.label for x in cek_tree.children):
cek_tree.add(label)
try:
if self.service == "NF":
drm.get_NF_content_keys(cdm=self.cdm, licence=licence, certificate=certificate)
else:
drm.get_content_keys(cdm=self.cdm, licence=licence, certificate=certificate)
except Exception as e:
if isinstance(e, (Widevine.Exceptions.EmptyLicense, Widevine.Exceptions.CEKNotFound)):
msg = str(e)
else:
msg = f"An exception occurred in the Service's license function: {e}"
cek_tree.add(f"[logging.level.error]{msg}")
if not pre_existing_tree:
table.add_row(cek_tree)
raise e
drm.content_keys = {
kid_: key for kid_, key in drm.content_keys.items() if key and key.count("0") != len(key)
}
for kid_, key in drm.content_keys.items():
if key == "0" * 32:
key = f"[red]{key}[/]"
is_track_kid_marker = ["", "*"][kid_ == track_kid]
label = f"[text2]{kid_.hex}:{key}{is_track_kid_marker}"
if not any(f"{kid_.hex}:{key}" in x.label for x in cek_tree.children):
cek_tree.add(label)
# The CDM keys may have returned blank content keys for KIDs we got from vaults.
# So we re-add the keys from vaults earlier overwriting blanks or removed KIDs data.
drm.content_keys.update(from_vaults)
drm.content_keys = {
kid_: key for kid_, key in drm.content_keys.items() if key and key.count("0") != len(key)
}
successful_caches = self.vaults.add_keys(drm.content_keys)
self.log.info(
f"Cached {len(drm.content_keys)} Key{'' if len(drm.content_keys) == 1 else 's'} to "
f"{successful_caches}/{len(self.vaults)} Vaults"
)
break # licensing twice will be unnecessary
# The CDM keys may have returned blank content keys for KIDs we got from vaults.
# So we re-add the keys from vaults earlier overwriting blanks or removed KIDs data.
drm.content_keys.update(from_vaults)
successful_caches = self.vaults.add_keys(drm.content_keys)
self.log.info(
f"Cached {len(drm.content_keys)} Key{'' if len(drm.content_keys) == 1 else 's'} to "
f"{successful_caches}/{len(self.vaults)} Vaults"
)
if track_kid and track_kid not in drm.content_keys:
msg = f"No Content Key for KID {track_kid.hex} was returned in the License"
@@ -1300,10 +1498,11 @@ class dl:
elif isinstance(drm, PlayReady):
with self.DRM_TABLE_LOCK:
pssh_display = self._truncate_pssh_for_display(drm.pssh_b64 or "", "PlayReady")
cek_tree = Tree(
Text.assemble(
("PlayReady", "cyan"),
(f"({drm.pssh_b64 or ''})", "text"),
(f"({pssh_display})", "text"),
overflow="fold",
)
)
@@ -1313,7 +1512,12 @@ class dl:
if pre_existing_tree:
cek_tree = pre_existing_tree
for kid in drm.kids:
need_license = False
all_kids = list(drm.kids)
if track_kid and track_kid not in all_kids:
all_kids.append(track_kid)
for kid in all_kids:
if kid in drm.content_keys:
continue
@@ -1333,35 +1537,40 @@ class dl:
if not pre_existing_tree:
table.add_row(cek_tree)
raise PlayReady.Exceptions.CEKNotFound(msg)
else:
need_license = True
if kid not in drm.content_keys and not vaults_only:
from_vaults = drm.content_keys.copy()
if kid not in drm.content_keys and cdm_only:
need_license = True
try:
drm.get_content_keys(cdm=self.cdm, licence=licence, certificate=certificate)
except Exception as e:
if isinstance(e, (PlayReady.Exceptions.EmptyLicense, PlayReady.Exceptions.CEKNotFound)):
msg = str(e)
else:
msg = f"An exception occurred in the Service's license function: {e}"
cek_tree.add(f"[logging.level.error]{msg}")
if not pre_existing_tree:
table.add_row(cek_tree)
raise e
if need_license and not vaults_only:
from_vaults = drm.content_keys.copy()
for kid_, key in drm.content_keys.items():
label = f"[text2]{kid_.hex}:{key}{is_track_kid}"
if not any(f"{kid_.hex}:{key}" in x.label for x in cek_tree.children):
cek_tree.add(label)
try:
drm.get_content_keys(cdm=self.cdm, licence=licence, certificate=certificate)
except Exception as e:
if isinstance(e, (PlayReady.Exceptions.EmptyLicense, PlayReady.Exceptions.CEKNotFound)):
msg = str(e)
else:
msg = f"An exception occurred in the Service's license function: {e}"
cek_tree.add(f"[logging.level.error]{msg}")
if not pre_existing_tree:
table.add_row(cek_tree)
raise e
drm.content_keys.update(from_vaults)
for kid_, key in drm.content_keys.items():
is_track_kid_marker = ["", "*"][kid_ == track_kid]
label = f"[text2]{kid_.hex}:{key}{is_track_kid_marker}"
if not any(f"{kid_.hex}:{key}" in x.label for x in cek_tree.children):
cek_tree.add(label)
successful_caches = self.vaults.add_keys(drm.content_keys)
self.log.info(
f"Cached {len(drm.content_keys)} Key{'' if len(drm.content_keys) == 1 else 's'} to "
f"{successful_caches}/{len(self.vaults)} Vaults"
)
break
drm.content_keys.update(from_vaults)
successful_caches = self.vaults.add_keys(drm.content_keys)
self.log.info(
f"Cached {len(drm.content_keys)} Key{'' if len(drm.content_keys) == 1 else 's'} to "
f"{successful_caches}/{len(self.vaults)} Vaults"
)
if track_kid and track_kid not in drm.content_keys:
msg = f"No Content Key for KID {track_kid.hex} was returned in the License"
@@ -1421,6 +1630,9 @@ class dl:
@staticmethod
def save_cookies(path: Path, cookies: CookieJar):
if hasattr(cookies, 'jar'):
cookies = cookies.jar
cookie_jar = MozillaCookieJar(path)
cookie_jar.load()
for cookie in cookies:
@@ -1447,9 +1659,11 @@ class dl:
service: str,
profile: Optional[str] = None,
drm: Optional[str] = None,
quality: Optional[int] = None,
) -> Optional[object]:
"""
Get CDM for a specified service (either Local or Remote CDM).
Now supports quality-based selection when quality is provided.
Raises a ValueError if there's a problem getting a CDM.
"""
cdm_name = config.cdm.get(service) or config.cdm.get("default")
@@ -1457,47 +1671,110 @@ class dl:
return None
if isinstance(cdm_name, dict):
lower_keys = {k.lower(): v for k, v in cdm_name.items()}
if {"widevine", "playready"} & lower_keys.keys():
drm_key = None
if drm:
drm_key = {
"wv": "widevine",
"widevine": "widevine",
"pr": "playready",
"playready": "playready",
}.get(drm.lower())
cdm_name = lower_keys.get(drm_key or "widevine") or lower_keys.get("playready")
else:
if not profile:
return None
cdm_name = cdm_name.get(profile) or config.cdm.get("default")
if not cdm_name:
return None
if quality:
quality_match = None
quality_keys = []
cdm_api = next(iter(x for x in config.remote_cdm if x["name"] == cdm_name), None)
if cdm_api:
is_decrypt_lab = True if cdm_api["type"] == "decrypt_labs" else False
if is_decrypt_lab:
device_type = cdm_api.get("device_type")
del cdm_api["name"]
del cdm_api["type"]
for key in cdm_name.keys():
if (
isinstance(key, str)
and any(op in key for op in [">=", ">", "<=", "<"])
or (isinstance(key, str) and key.isdigit())
):
quality_keys.append(key)
# Use the appropriate DecryptLabs CDM class based on device type
if device_type == "PLAYREADY" or cdm_api.get("device_name") in ["SL2", "SL3"]:
from unshackle.core.cdm.decrypt_labs_remote_cdm import DecryptLabsRemotePlayReadyCDM
def sort_quality_key(key):
if key.isdigit():
return (0, int(key)) # Exact matches first
elif key.startswith(">="):
return (1, -int(key[2:])) # >= descending
elif key.startswith(">"):
return (1, -int(key[1:])) # > descending
elif key.startswith("<="):
return (2, int(key[2:])) # <= ascending
elif key.startswith("<"):
return (2, int(key[1:])) # < ascending
return (3, 0) # Other keys last
# Remove unused parameters for PlayReady CDM
cdm_params = cdm_api.copy()
cdm_params.pop("device_type", None)
cdm_params.pop("system_id", None)
return DecryptLabsRemotePlayReadyCDM(service_name=service, vaults=self.vaults, **cdm_params)
quality_keys.sort(key=sort_quality_key)
for key in quality_keys:
if key.isdigit() and quality == int(key):
quality_match = cdm_name[key]
self.log.debug(f"Selected CDM based on exact quality match {quality}p: {quality_match}")
break
elif key.startswith(">="):
threshold = int(key[2:])
if quality >= threshold:
quality_match = cdm_name[key]
self.log.debug(f"Selected CDM based on quality {quality}p >= {threshold}p: {quality_match}")
break
elif key.startswith(">"):
threshold = int(key[1:])
if quality > threshold:
quality_match = cdm_name[key]
self.log.debug(f"Selected CDM based on quality {quality}p > {threshold}p: {quality_match}")
break
elif key.startswith("<="):
threshold = int(key[2:])
if quality <= threshold:
quality_match = cdm_name[key]
self.log.debug(f"Selected CDM based on quality {quality}p <= {threshold}p: {quality_match}")
break
elif key.startswith("<"):
threshold = int(key[1:])
if quality < threshold:
quality_match = cdm_name[key]
self.log.debug(f"Selected CDM based on quality {quality}p < {threshold}p: {quality_match}")
break
if quality_match:
cdm_name = quality_match
if isinstance(cdm_name, dict):
lower_keys = {k.lower(): v for k, v in cdm_name.items()}
if {"widevine", "playready"} & lower_keys.keys():
drm_key = None
if drm:
drm_key = {
"wv": "widevine",
"widevine": "widevine",
"pr": "playready",
"playready": "playready",
}.get(drm.lower())
cdm_name = lower_keys.get(drm_key or "widevine") or lower_keys.get("playready")
else:
return DecryptLabsRemoteCDM(service_name=service, vaults=self.vaults, **cdm_api)
else:
cdm_name = cdm_name.get(profile) or cdm_name.get("default") or config.cdm.get("default")
if not cdm_name:
return None
cdm_api = next(iter(x.copy() for x in config.remote_cdm if x["name"] == cdm_name), None)
if cdm_api:
is_decrypt_lab = True if cdm_api.get("type") == "decrypt_labs" else False
if is_decrypt_lab:
del cdm_api["name"]
del cdm_api["type"]
return RemoteCdm(**cdm_api)
if "secret" not in cdm_api or not cdm_api["secret"]:
if config.decrypt_labs_api_key:
cdm_api["secret"] = config.decrypt_labs_api_key
else:
raise ValueError(
f"No secret provided for DecryptLabs CDM '{cdm_name}' and no global "
"decrypt_labs_api_key configured"
)
# All DecryptLabs CDMs use DecryptLabsRemoteCDM
return DecryptLabsRemoteCDM(service_name=service, vaults=self.vaults, **cdm_api)
else:
return RemoteCdm(
device_type=cdm_api['Device Type'],
system_id=cdm_api['System ID'],
security_level=cdm_api['Security Level'],
host=cdm_api['Host'],
secret=cdm_api['Secret'],
device_name=cdm_api['Device Name'],
)
prd_path = config.directories.prds / f"{cdm_name}.prd"
if not prd_path.is_file():

View File

@@ -12,84 +12,113 @@ from unshackle.core.vault import Vault
from unshackle.core.vaults import Vaults
def _load_vaults(vault_names: list[str]) -> Vaults:
"""Load and validate vaults by name."""
vaults = Vaults()
for vault_name in vault_names:
vault_config = next((x for x in config.key_vaults if x["name"] == vault_name), None)
if not vault_config:
raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
vault_type = vault_config["type"]
vault_args = vault_config.copy()
del vault_args["type"]
if not vaults.load(vault_type, **vault_args):
raise click.ClickException(f"Failed to load vault ({vault_name}).")
return vaults
def _process_service_keys(from_vault: Vault, service: str, log: logging.Logger) -> dict[str, str]:
"""Get and validate keys from a vault for a specific service."""
content_keys = list(from_vault.get_keys(service))
bad_keys = {kid: key for kid, key in content_keys if not key or key.count("0") == len(key)}
for kid, key in bad_keys.items():
log.warning(f"Skipping NULL key: {kid}:{key}")
return {kid: key for kid, key in content_keys if kid not in bad_keys}
def _copy_service_data(to_vault: Vault, from_vault: Vault, service: str, log: logging.Logger) -> int:
"""Copy data for a single service between vaults."""
content_keys = _process_service_keys(from_vault, service, log)
total_count = len(content_keys)
if total_count == 0:
log.info(f"{service}: No keys found in {from_vault}")
return 0
try:
added = to_vault.add_keys(service, content_keys)
except PermissionError:
log.warning(f"{service}: No permission to create table in {to_vault}, skipped")
return 0
existed = total_count - added
if added > 0 and existed > 0:
log.info(f"{service}: {added} added, {existed} skipped ({total_count} total)")
elif added > 0:
log.info(f"{service}: {added} added ({total_count} total)")
else:
log.info(f"{service}: {existed} skipped (all existed)")
return added
@click.group(short_help="Manage and configure Key Vaults.", context_settings=context_settings)
def kv() -> None:
"""Manage and configure Key Vaults."""
@kv.command()
@click.argument("to_vault", type=str)
@click.argument("from_vaults", nargs=-1, type=click.UNPROCESSED)
@click.argument("to_vault_name", type=str)
@click.argument("from_vault_names", nargs=-1, type=click.UNPROCESSED)
@click.option("-s", "--service", type=str, default=None, help="Only copy data to and from a specific service.")
def copy(to_vault: str, from_vaults: list[str], service: Optional[str] = None) -> None:
def copy(to_vault_name: str, from_vault_names: list[str], service: Optional[str] = None) -> None:
"""
Copy data from multiple Key Vaults into a single Key Vault.
Rows with matching KIDs are skipped unless there's no KEY set.
Existing data is not deleted or altered.
The `to_vault` argument is the key vault you wish to copy data to.
The `to_vault_name` argument is the key vault you wish to copy data to.
It should be the name of a Key Vault defined in the config.
The `from_vaults` argument is the key vault(s) you wish to take
The `from_vault_names` argument is the key vault(s) you wish to take
data from. You may supply multiple key vaults.
"""
if not from_vaults:
if not from_vault_names:
raise click.ClickException("No Vaults were specified to copy data from.")
log = logging.getLogger("kv")
vaults = Vaults()
for vault_name in [to_vault] + list(from_vaults):
vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
if not vault:
raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
vault_type = vault["type"]
vault_args = vault.copy()
del vault_args["type"]
if not vaults.load(vault_type, **vault_args):
raise click.ClickException(f"Failed to load vault ({vault_name}).")
all_vault_names = [to_vault_name] + list(from_vault_names)
vaults = _load_vaults(all_vault_names)
to_vault: Vault = vaults.vaults[0]
from_vaults: list[Vault] = vaults.vaults[1:]
to_vault = vaults.vaults[0]
from_vaults = vaults.vaults[1:]
vault_names = ", ".join([v.name for v in from_vaults])
log.info(f"Copying data from {vault_names}{to_vault.name}")
log.info(f"Copying data from {', '.join([x.name for x in from_vaults])}, into {to_vault.name}")
if service:
service = Services.get_tag(service)
log.info(f"Only copying data for service {service}")
log.info(f"Filtering by service: {service}")
total_added = 0
for from_vault in from_vaults:
if service:
services = [service]
else:
services = from_vault.get_services()
for service_ in services:
log.info(f"Getting data from {from_vault} for {service_}")
content_keys = list(from_vault.get_keys(service_)) # important as it's a generator we iterate twice
bad_keys = {kid: key for kid, key in content_keys if not key or key.count("0") == len(key)}
for kid, key in bad_keys.items():
log.warning(f"Cannot add a NULL Content Key to a Vault, skipping: {kid}:{key}")
content_keys = {kid: key for kid, key in content_keys if kid not in bad_keys}
total_count = len(content_keys)
log.info(f"Adding {total_count} Content Keys to {to_vault} for {service_}")
try:
added = to_vault.add_keys(service_, content_keys)
except PermissionError:
log.warning(f" - No permission to create table ({service_}) in {to_vault}, skipping...")
continue
services_to_copy = [service] if service else from_vault.get_services()
for service_tag in services_to_copy:
added = _copy_service_data(to_vault, from_vault, service_tag, log)
total_added += added
existed = total_count - added
log.info(f"{to_vault} ({service_}): {added} newly added, {existed} already existed (skipped)")
log.info(f"{to_vault}: {total_added} total newly added")
if total_added > 0:
log.info(f"Successfully added {total_added} new keys to {to_vault}")
else:
log.info("Copy completed - no new keys to add")
@kv.command()
@@ -106,9 +135,9 @@ def sync(ctx: click.Context, vaults: list[str], service: Optional[str] = None) -
if not len(vaults) > 1:
raise click.ClickException("You must provide more than one Vault to sync.")
ctx.invoke(copy, to_vault=vaults[0], from_vaults=vaults[1:], service=service)
ctx.invoke(copy, to_vault_name=vaults[0], from_vault_names=vaults[1:], service=service)
for i in range(1, len(vaults)):
ctx.invoke(copy, to_vault=vaults[i], from_vaults=[vaults[i - 1]], service=service)
ctx.invoke(copy, to_vault_name=vaults[i], from_vault_names=[vaults[i - 1]], service=service)
@kv.command()
@@ -135,15 +164,7 @@ def add(file: Path, service: str, vaults: list[str]) -> None:
log = logging.getLogger("kv")
service = Services.get_tag(service)
vaults_ = Vaults()
for vault_name in vaults:
vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
if not vault:
raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
vault_type = vault["type"]
vault_args = vault.copy()
del vault_args["type"]
vaults_.load(vault_type, **vault_args)
vaults_ = _load_vaults(list(vaults))
data = file.read_text(encoding="utf8")
kid_keys: dict[str, str] = {}
@@ -173,15 +194,7 @@ def prepare(vaults: list[str]) -> None:
"""Create Service Tables on Vaults if not yet created."""
log = logging.getLogger("kv")
vaults_ = Vaults()
for vault_name in vaults:
vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
if not vault:
raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
vault_type = vault["type"]
vault_args = vault.copy()
del vault_args["type"]
vaults_.load(vault_type, **vault_args)
vaults_ = _load_vaults(vaults)
for vault in vaults_:
if hasattr(vault, "has_table") and hasattr(vault, "create_table"):

View File

@@ -5,10 +5,10 @@ from typing import Optional
import click
import requests
from Crypto.Random import get_random_bytes
from pyplayready import InvalidCertificateChain, OutdatedDevice
from pyplayready.cdm import Cdm
from pyplayready.crypto.ecc_key import ECCKey
from pyplayready.device import Device
from pyplayready.exceptions import InvalidCertificateChain, OutdatedDevice
from pyplayready.system.bcert import Certificate, CertificateChain
from pyplayready.system.pssh import PSSH

View File

@@ -1 +1 @@
__version__ = "1.4.3"
__version__ = "1.4.8"

File diff suppressed because it is too large Load Diff

View File

@@ -88,6 +88,7 @@ class Config:
self.tag_group_name: bool = kwargs.get("tag_group_name", True)
self.tag_imdb_tmdb: bool = kwargs.get("tag_imdb_tmdb", True)
self.tmdb_api_key: str = kwargs.get("tmdb_api_key") or ""
self.decrypt_labs_api_key: str = kwargs.get("decrypt_labs_api_key") or ""
self.update_checks: bool = kwargs.get("update_checks", True)
self.update_check_interval: int = kwargs.get("update_check_interval", 24)
self.scene_naming: bool = kwargs.get("scene_naming", True)

View File

@@ -6,6 +6,7 @@ DOWNLOAD_LICENCE_ONLY = Event()
DRM_SORT_MAP = ["ClearKey", "Widevine"]
LANGUAGE_MAX_DISTANCE = 5 # this is max to be considered "same", e.g., en, en-US, en-AU
LANGUAGE_EXACT_DISTANCE = 0 # exact match only, no variants
VIDEO_CODEC_MAP = {"AVC": "H.264", "HEVC": "H.265"}
DYNAMIC_RANGE_MAP = {"HDR10": "HDR", "HDR10+": "HDR10P", "Dolby Vision": "DV", "HDR10 / HDR10+": "HDR10P", "HDR10 / HDR10": "HDR"}
AUDIO_CODEC_MAP = {"E-AC-3": "DDP", "AC-3": "DD"}

View File

@@ -150,6 +150,7 @@ def download(
track_type = track.__class__.__name__
thread_count = str(config.n_m3u8dl_re.get("thread_count", max_workers))
retry_count = str(config.n_m3u8dl_re.get("retry_count", max_workers))
ad_keyword = config.n_m3u8dl_re.get("ad_keyword")
arguments = [
@@ -160,6 +161,8 @@ def download(
output_dir,
"--thread-count",
thread_count,
"--download-retry-count",
retry_count,
"--no-log",
"--write-meta-json",
"false",

View File

@@ -8,6 +8,7 @@ from urllib.parse import urljoin
from Cryptodome.Cipher import AES
from Cryptodome.Util.Padding import unpad
from curl_cffi.requests import Session as CurlSession
from m3u8.model import Key
from requests import Session
@@ -69,8 +70,8 @@ class ClearKey:
"""
if not isinstance(m3u_key, Key):
raise ValueError(f"Provided M3U Key is in an unexpected type {m3u_key!r}")
if not isinstance(session, (Session, type(None))):
raise TypeError(f"Expected session to be a {Session}, not a {type(session)}")
if not isinstance(session, (Session, CurlSession, type(None))):
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not a {type(session)}")
if not m3u_key.method.startswith("AES"):
raise ValueError(f"Provided M3U Key is not an AES Clear Key, {m3u_key.method}")

View File

@@ -256,44 +256,38 @@ class PlayReady:
return keys
def get_content_keys(self, cdm: PlayReadyCdm, certificate: Callable, licence: Callable) -> None:
for kid in self.kids:
if kid in self.content_keys:
continue
session_id = cdm.open()
try:
if hasattr(cdm, "set_pssh_b64") and self.pssh_b64:
cdm.set_pssh_b64(self.pssh_b64)
session_id = cdm.open()
try:
if hasattr(cdm, "set_pssh_b64") and self.pssh_b64:
cdm.set_pssh_b64(self.pssh_b64)
if hasattr(cdm, "set_required_kids"):
cdm.set_required_kids(self.kids)
challenge = cdm.get_license_challenge(session_id, self.pssh.wrm_headers[0])
challenge = cdm.get_license_challenge(session_id, self.pssh.wrm_headers[0])
if challenge:
try:
license_res = licence(challenge=challenge)
except Exception:
if hasattr(cdm, "use_cached_keys_as_fallback"):
if cdm.use_cached_keys_as_fallback(session_id):
keys = self._extract_keys_from_cdm(cdm, session_id)
self.content_keys.update(keys)
continue
if isinstance(license_res, bytes):
license_str = license_res.decode(errors="ignore")
else:
license_str = str(license_res)
if "<License>" not in license_str:
try:
license_str = base64.b64decode(license_str + "===").decode()
except Exception:
pass
cdm.parse_license(session_id, license_str)
except Exception:
raise
if isinstance(license_res, bytes):
license_str = license_res.decode(errors="ignore")
else:
license_str = str(license_res)
if "<License>" not in license_str:
try:
license_str = base64.b64decode(license_str + "===").decode()
except Exception:
pass
cdm.parse_license(session_id, license_str)
keys = self._extract_keys_from_cdm(cdm, session_id)
self.content_keys.update(keys)
finally:
cdm.close(session_id)
keys = self._extract_keys_from_cdm(cdm, session_id)
self.content_keys.update(keys)
finally:
cdm.close(session_id)
if not self.content_keys:
raise PlayReady.Exceptions.EmptyLicense("No Content Keys were within the License")

View File

@@ -185,6 +185,9 @@ class Widevine:
if cert and hasattr(cdm, "set_service_certificate"):
cdm.set_service_certificate(session_id, cert)
if hasattr(cdm, "set_required_kids"):
cdm.set_required_kids(self.kids)
challenge = cdm.get_license_challenge(session_id, self.pssh)
if hasattr(cdm, "has_cached_keys") and cdm.has_cached_keys(session_id):
@@ -218,6 +221,9 @@ class Widevine:
if cert and hasattr(cdm, "set_service_certificate"):
cdm.set_service_certificate(session_id, cert)
if hasattr(cdm, "set_required_kids"):
cdm.set_required_kids(self.kids)
challenge = cdm.get_license_challenge(session_id, self.pssh)
if hasattr(cdm, "has_cached_keys") and cdm.has_cached_keys(session_id):

View File

@@ -15,6 +15,7 @@ from uuid import UUID
from zlib import crc32
import requests
from curl_cffi.requests import Session as CurlSession
from langcodes import Language, tag_is_valid
from lxml.etree import Element, ElementTree
from pyplayready.system.pssh import PSSH as PR_PSSH
@@ -47,7 +48,7 @@ class DASH:
self.url = url
@classmethod
def from_url(cls, url: str, session: Optional[Session] = None, **args: Any) -> DASH:
def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **args: Any) -> DASH:
if not url:
raise requests.URLRequired("DASH manifest URL must be provided for relative path computations.")
if not isinstance(url, str):
@@ -55,8 +56,8 @@ class DASH:
if not session:
session = Session()
elif not isinstance(session, Session):
raise TypeError(f"Expected session to be a {Session}, not {session!r}")
elif not isinstance(session, (Session, CurlSession)):
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
res = session.get(url, **args)
if res.url != url:
@@ -103,6 +104,10 @@ class DASH:
continue
if next(iter(period.xpath("SegmentType/@value")), "content") != "content":
continue
if "urn:amazon:primevideo:cachingBreadth" in [
x.get("schemeIdUri") for x in period.findall("SupplementalProperty")
]:
continue
for adaptation_set in period.findall("AdaptationSet"):
if self.is_trick_mode(adaptation_set):
@@ -248,8 +253,8 @@ class DASH:
):
if not session:
session = Session()
elif not isinstance(session, Session):
raise TypeError(f"Expected session to be a {Session}, not {session!r}")
elif not isinstance(session, (Session, CurlSession)):
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
if proxy:
session.proxies.update({"all": proxy})

View File

@@ -14,9 +14,10 @@ from typing import Any, Callable, Optional, Union
from urllib.parse import urljoin
from zlib import crc32
import httpx
import m3u8
import requests
from curl_cffi.requests import Response as CurlResponse
from curl_cffi.requests import Session as CurlSession
from langcodes import Language, tag_is_valid
from m3u8 import M3U8
from pyplayready.cdm import Cdm as PlayReadyCdm
@@ -35,7 +36,7 @@ from unshackle.core.utilities import get_extension, is_close_match, try_ensure_u
class HLS:
def __init__(self, manifest: M3U8, session: Optional[Union[Session, httpx.Client]] = None):
def __init__(self, manifest: M3U8, session: Optional[Union[Session, CurlSession]] = None):
if not manifest:
raise ValueError("HLS manifest must be provided.")
if not isinstance(manifest, M3U8):
@@ -47,7 +48,7 @@ class HLS:
self.session = session or Session()
@classmethod
def from_url(cls, url: str, session: Optional[Union[Session, httpx.Client]] = None, **args: Any) -> HLS:
def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **args: Any) -> HLS:
if not url:
raise requests.URLRequired("HLS manifest URL must be provided.")
if not isinstance(url, str):
@@ -55,22 +56,22 @@ class HLS:
if not session:
session = Session()
elif not isinstance(session, (Session, httpx.Client)):
raise TypeError(f"Expected session to be a {Session} or {httpx.Client}, not {session!r}")
elif not isinstance(session, (Session, CurlSession)):
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
res = session.get(url, **args)
# Handle both requests and httpx response objects
# Handle requests and curl_cffi response objects
if isinstance(res, requests.Response):
if not res.ok:
raise requests.ConnectionError("Failed to request the M3U(8) document.", response=res)
content = res.text
elif isinstance(res, httpx.Response):
if res.status_code >= 400:
elif isinstance(res, CurlResponse):
if not res.ok:
raise requests.ConnectionError("Failed to request the M3U(8) document.", response=res)
content = res.text
else:
raise TypeError(f"Expected response to be a requests.Response or httpx.Response, not {type(res)}")
raise TypeError(f"Expected response to be a requests.Response or curl_cffi.Response, not {type(res)}")
master = m3u8.loads(content, uri=url)
@@ -229,7 +230,7 @@ class HLS:
save_path: Path,
save_dir: Path,
progress: partial,
session: Optional[Union[Session, httpx.Client]] = None,
session: Optional[Union[Session, CurlSession]] = None,
proxy: Optional[str] = None,
max_workers: Optional[int] = None,
license_widevine: Optional[Callable] = None,
@@ -238,15 +239,13 @@ class HLS:
) -> None:
if not session:
session = Session()
elif not isinstance(session, (Session, httpx.Client)):
raise TypeError(f"Expected session to be a {Session} or {httpx.Client}, not {session!r}")
elif not isinstance(session, (Session, CurlSession)):
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
if proxy:
# Handle proxies differently based on session type
if isinstance(session, Session):
session.proxies.update({"all": proxy})
elif isinstance(session, httpx.Client):
session.proxies = {"http://": proxy, "https://": proxy}
log = logging.getLogger("HLS")
@@ -257,13 +256,8 @@ class HLS:
log.error(f"Failed to request the invariant M3U8 playlist: {response.status_code}")
sys.exit(1)
playlist_text = response.text
elif isinstance(response, httpx.Response):
if response.status_code >= 400:
log.error(f"Failed to request the invariant M3U8 playlist: {response.status_code}")
sys.exit(1)
playlist_text = response.text
else:
raise TypeError(f"Expected response to be a requests.Response or httpx.Response, not {type(response)}")
raise TypeError(f"Expected response to be a requests.Response or curl_cffi.Response, not {type(response)}")
master = m3u8.loads(playlist_text, uri=track.url)
@@ -533,13 +527,9 @@ class HLS:
if isinstance(res, requests.Response):
res.raise_for_status()
init_content = res.content
elif isinstance(res, httpx.Response):
if res.status_code >= 400:
raise requests.HTTPError(f"HTTP Error: {res.status_code}", response=res)
init_content = res.content
else:
raise TypeError(
f"Expected response to be requests.Response or httpx.Response, not {type(res)}"
f"Expected response to be requests.Response or curl_cffi.Response, not {type(res)}"
)
map_data = (segment.init_section, init_content)
@@ -707,7 +697,7 @@ class HLS:
@staticmethod
def parse_session_data_keys(
manifest: M3U8, session: Optional[Union[Session, httpx.Client]] = None
manifest: M3U8, session: Optional[Union[Session, CurlSession]] = None
) -> list[m3u8.model.Key]:
"""Parse `com.apple.hls.keys` session data and return Key objects."""
keys: list[m3u8.model.Key] = []
@@ -798,7 +788,8 @@ class HLS:
@staticmethod
def get_drm(
key: Union[m3u8.model.SessionKey, m3u8.model.Key], session: Optional[Union[Session, httpx.Client]] = None
key: Union[m3u8.model.SessionKey, m3u8.model.Key],
session: Optional[Union[Session, CurlSession]] = None,
) -> DRM_T:
"""
Convert HLS EXT-X-KEY data to an initialized DRM object.
@@ -810,8 +801,8 @@ class HLS:
Raises a NotImplementedError if the key system is not supported.
"""
if not isinstance(session, (Session, httpx.Client, type(None))):
raise TypeError(f"Expected session to be a {Session} or {httpx.Client}, not {type(session)}")
if not isinstance(session, (Session, CurlSession, type(None))):
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {type(session)}")
if not session:
session = Session()

View File

@@ -10,6 +10,7 @@ from pathlib import Path
from typing import Any, Callable, Optional, Union
import requests
from curl_cffi.requests import Session as CurlSession
from langcodes import Language, tag_is_valid
from lxml.etree import Element
from pyplayready.system.pssh import PSSH as PR_PSSH
@@ -34,11 +35,13 @@ class ISM:
self.url = url
@classmethod
def from_url(cls, url: str, session: Optional[Session] = None, **kwargs: Any) -> "ISM":
def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **kwargs: Any) -> "ISM":
if not url:
raise requests.URLRequired("ISM manifest URL must be provided")
if not session:
session = Session()
elif not isinstance(session, (Session, CurlSession)):
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
res = session.get(url, **kwargs)
if res.url != url:
url = res.url

View File

@@ -4,15 +4,10 @@ from __future__ import annotations
from typing import Optional, Union
import httpx
import m3u8
from pyplayready.cdm import Cdm as PlayReadyCdm
from pyplayready.system.pssh import PSSH as PR_PSSH
from pywidevine.cdm import Cdm as WidevineCdm
from pywidevine.pssh import PSSH as WV_PSSH
from curl_cffi.requests import Session as CurlSession
from requests import Session
from unshackle.core.drm import PlayReady, Widevine
from unshackle.core.manifests.hls import HLS
from unshackle.core.tracks import Tracks
@@ -21,54 +16,17 @@ def parse(
master: m3u8.M3U8,
language: str,
*,
session: Optional[Union[Session, httpx.Client]] = None,
session: Optional[Union[Session, CurlSession]] = None,
) -> Tracks:
"""Parse a variant playlist to ``Tracks`` with DRM information."""
"""Parse a variant playlist to ``Tracks`` with basic information, defer DRM loading."""
tracks = HLS(master, session=session).to_tracks(language)
need_wv = not any(isinstance(d, Widevine) for t in tracks for d in (t.drm or []))
need_pr = not any(isinstance(d, PlayReady) for t in tracks for d in (t.drm or []))
bool(master.session_keys or HLS.parse_session_data_keys(master, session or Session()))
if (need_wv or need_pr) and tracks.videos:
if not session:
session = Session()
session_keys = list(master.session_keys or [])
session_keys.extend(HLS.parse_session_data_keys(master, session))
for drm_obj in HLS.get_all_drm(session_keys):
if need_wv and isinstance(drm_obj, Widevine):
for t in tracks.videos + tracks.audio:
t.drm = [d for d in (t.drm or []) if not isinstance(d, Widevine)] + [drm_obj]
need_wv = False
elif need_pr and isinstance(drm_obj, PlayReady):
for t in tracks.videos + tracks.audio:
t.drm = [d for d in (t.drm or []) if not isinstance(d, PlayReady)] + [drm_obj]
need_pr = False
if not need_wv and not need_pr:
break
if (need_wv or need_pr) and tracks.videos:
first_video = tracks.videos[0]
playlist = m3u8.load(first_video.url)
for key in playlist.keys or []:
if not key or not key.keyformat:
continue
fmt = key.keyformat.lower()
if need_wv and fmt == WidevineCdm.urn:
pssh_b64 = key.uri.split(",")[-1]
drm = Widevine(pssh=WV_PSSH(pssh_b64))
for t in tracks.videos + tracks.audio:
t.drm = [d for d in (t.drm or []) if not isinstance(d, Widevine)] + [drm]
need_wv = False
elif need_pr and (fmt == PlayReadyCdm or "com.microsoft.playready" in fmt):
pssh_b64 = key.uri.split(",")[-1]
drm = PlayReady(pssh=PR_PSSH(pssh_b64), pssh_b64=pssh_b64)
for t in tracks.videos + tracks.audio:
t.drm = [d for d in (t.drm or []) if not isinstance(d, PlayReady)] + [drm]
need_pr = False
if not need_wv and not need_pr:
break
if True:
for t in tracks.videos + tracks.audio:
t.needs_drm_loading = True
t.session = session
return tracks

79
unshackle/core/session.py Normal file
View File

@@ -0,0 +1,79 @@
"""Session utilities for creating HTTP sessions with different backends."""
from __future__ import annotations
import warnings
from curl_cffi.requests import Session as CurlSession
from unshackle.core.config import config
# Globally suppress curl_cffi HTTPS proxy warnings since some proxy providers
# (like NordVPN) require HTTPS URLs but curl_cffi expects HTTP format
warnings.filterwarnings(
"ignore", message="Make sure you are using https over https proxy.*", category=RuntimeWarning, module="curl_cffi.*"
)
class Session(CurlSession):
"""curl_cffi Session with warning suppression."""
def request(self, method, url, **kwargs):
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore", message="Make sure you are using https over https proxy.*", category=RuntimeWarning
)
return super().request(method, url, **kwargs)
def session(browser: str | None = None, **kwargs) -> Session:
"""
Create a curl_cffi session that impersonates a browser.
This is a full replacement for requests.Session with browser impersonation
and anti-bot capabilities. The session uses curl-impersonate under the hood
to mimic real browser behavior.
Args:
browser: Browser to impersonate (e.g. "chrome124", "firefox", "safari").
Uses the configured default from curl_impersonate.browser if not specified.
See https://github.com/lexiforest/curl_cffi#sessions for available options.
**kwargs: Additional arguments passed to CurlSession constructor:
- headers: Additional headers (dict)
- cookies: Cookie jar or dict
- auth: HTTP basic auth tuple (username, password)
- proxies: Proxy configuration dict
- verify: SSL certificate verification (bool, default True)
- timeout: Request timeout in seconds (float or tuple)
- allow_redirects: Follow redirects (bool, default True)
- max_redirects: Maximum redirect count (int)
- cert: Client certificate (str or tuple)
Returns:
curl_cffi.requests.Session configured with browser impersonation, common headers,
and equivalent retry behavior to requests.Session.
Example:
from unshackle.core.session import session
class MyService(Service):
@staticmethod
def get_session():
return session() # Uses config default browser
"""
if browser is None:
browser = config.curl_impersonate.get("browser", "chrome124")
session_config = {
"impersonate": browser,
"timeout": 30.0,
"allow_redirects": True,
"max_redirects": 15,
"verify": True,
}
session_config.update(kwargs)
session_obj = Session(**session_config)
session_obj.headers.update(config.headers)
return session_obj

View File

@@ -89,7 +89,17 @@ class Episode(Title):
def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str:
primary_video_track = next(iter(media_info.video_tracks), None)
primary_audio_track = next(iter(media_info.audio_tracks), None)
primary_audio_track = None
if media_info.audio_tracks:
sorted_audio = sorted(
media_info.audio_tracks,
key=lambda x: (
float(x.bit_rate) if x.bit_rate else 0,
bool(x.format_additionalfeatures and "JOC" in x.format_additionalfeatures)
),
reverse=True
)
primary_audio_track = sorted_audio[0]
unique_audio_languages = len({x.language.split("-")[0] for x in media_info.audio_tracks if x.language})
# Title [Year] SXXEXX Name (or Title [Year] SXX if folder)

View File

@@ -52,7 +52,17 @@ class Movie(Title):
def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str:
primary_video_track = next(iter(media_info.video_tracks), None)
primary_audio_track = next(iter(media_info.audio_tracks), None)
primary_audio_track = None
if media_info.audio_tracks:
sorted_audio = sorted(
media_info.audio_tracks,
key=lambda x: (
float(x.bit_rate) if x.bit_rate else 0,
bool(x.format_additionalfeatures and "JOC" in x.format_additionalfeatures)
),
reverse=True
)
primary_audio_track = sorted_audio[0]
unique_audio_languages = len({x.language.split("-")[0] for x in media_info.audio_tracks if x.language})
# Name (Year)

View File

@@ -12,6 +12,7 @@ class Audio(Track):
AAC = "AAC" # https://wikipedia.org/wiki/Advanced_Audio_Coding
AC3 = "DD" # https://wikipedia.org/wiki/Dolby_Digital
EC3 = "DD+" # https://wikipedia.org/wiki/Dolby_Digital_Plus
AC4 = "AC-4" # https://wikipedia.org/wiki/Dolby_AC-4
OPUS = "OPUS" # https://wikipedia.org/wiki/Opus_(audio_format)
OGG = "VORB" # https://wikipedia.org/wiki/Vorbis
DTS = "DTS" # https://en.wikipedia.org/wiki/DTS_(company)#DTS_Digital_Surround
@@ -31,6 +32,8 @@ class Audio(Track):
return Audio.Codec.AC3
if mime == "ec-3":
return Audio.Codec.EC3
if mime == "ac-4":
return Audio.Codec.AC4
if mime == "opus":
return Audio.Codec.OPUS
if mime == "dtsc":
@@ -60,6 +63,8 @@ class Audio(Track):
return Audio.Codec.AC3
if profile.startswith("ddplus"):
return Audio.Codec.EC3
if profile.startswith("ac4"):
return Audio.Codec.AC4
if profile.startswith("playready-oggvorbis"):
return Audio.Codec.OGG
raise ValueError(f"The Content Profile '{profile}' is not a supported Audio Codec")

View File

@@ -10,6 +10,7 @@ from pathlib import Path
from typing import Any, Callable, Iterable, Optional, Union
import pycaption
import pysubs2
import requests
from construct import Container
from pycaption import Caption, CaptionList, CaptionNode, WebVTTReader
@@ -33,6 +34,9 @@ class Subtitle(Track):
TimedTextMarkupLang = "TTML" # https://wikipedia.org/wiki/Timed_Text_Markup_Language
WebVTT = "VTT" # https://wikipedia.org/wiki/WebVTT
SAMI = "SMI" # https://wikipedia.org/wiki/SAMI
MicroDVD = "SUB" # https://wikipedia.org/wiki/MicroDVD
MPL2 = "MPL2" # MPL2 subtitle format
TMP = "TMP" # TMP subtitle format
# MPEG-DASH box-encapsulated subtitle formats
fTTML = "STPP" # https://www.w3.org/TR/2018/REC-ttml-imsc1.0.1-20180424
fVTT = "WVTT" # https://www.w3.org/TR/webvtt1
@@ -56,6 +60,12 @@ class Subtitle(Track):
return Subtitle.Codec.WebVTT
elif mime in ("smi", "sami"):
return Subtitle.Codec.SAMI
elif mime in ("sub", "microdvd"):
return Subtitle.Codec.MicroDVD
elif mime == "mpl2":
return Subtitle.Codec.MPL2
elif mime == "tmp":
return Subtitle.Codec.TMP
elif mime == "stpp":
return Subtitle.Codec.fTTML
elif mime == "wvtt":
@@ -391,6 +401,57 @@ class Subtitle(Track):
# Fall back to existing conversion method on any error
return self._convert_standard(codec)
def convert_with_pysubs2(self, codec: Subtitle.Codec) -> Path:
"""
Convert subtitle using pysubs2 library for broad format support.
pysubs2 is a pure-Python library supporting SubRip (SRT), SubStation Alpha
(SSA/ASS), WebVTT, TTML, SAMI, MicroDVD, MPL2, and TMP formats.
"""
if not self.path or not self.path.exists():
raise ValueError("You must download the subtitle track first.")
if self.codec == codec:
return self.path
output_path = self.path.with_suffix(f".{codec.value.lower()}")
original_path = self.path
codec_to_pysubs2_format = {
Subtitle.Codec.SubRip: "srt",
Subtitle.Codec.SubStationAlpha: "ssa",
Subtitle.Codec.SubStationAlphav4: "ass",
Subtitle.Codec.WebVTT: "vtt",
Subtitle.Codec.TimedTextMarkupLang: "ttml",
Subtitle.Codec.SAMI: "sami",
Subtitle.Codec.MicroDVD: "microdvd",
Subtitle.Codec.MPL2: "mpl2",
Subtitle.Codec.TMP: "tmp",
}
pysubs2_output_format = codec_to_pysubs2_format.get(codec)
if pysubs2_output_format is None:
return self._convert_standard(codec)
try:
subs = pysubs2.load(str(self.path), encoding="utf-8")
subs.save(str(output_path), format_=pysubs2_output_format, encoding="utf-8")
if original_path.exists() and original_path != output_path:
original_path.unlink()
self.path = output_path
self.codec = codec
if callable(self.OnConverted):
self.OnConverted(codec)
return output_path
except Exception:
return self._convert_standard(codec)
def convert(self, codec: Subtitle.Codec) -> Path:
"""
Convert this Subtitle to another Format.
@@ -400,6 +461,7 @@ class Subtitle(Track):
- 'subby': Always uses subby with CommonIssuesFixer
- 'subtitleedit': Uses SubtitleEdit when available, falls back to pycaption
- 'pycaption': Uses only pycaption library
- 'pysubs2': Uses pysubs2 library
"""
# Check configuration for conversion method
conversion_method = config.subtitle.get("conversion_method", "auto")
@@ -407,11 +469,12 @@ class Subtitle(Track):
if conversion_method == "subby":
return self.convert_with_subby(codec)
elif conversion_method == "subtitleedit":
return self._convert_standard(codec) # SubtitleEdit is used in standard conversion
return self._convert_standard(codec)
elif conversion_method == "pycaption":
return self._convert_pycaption_only(codec)
elif conversion_method == "pysubs2":
return self.convert_with_pysubs2(codec)
elif conversion_method == "auto":
# Use subby for formats it handles better
if self.codec in (Subtitle.Codec.WebVTT, Subtitle.Codec.SAMI):
return self.convert_with_subby(codec)
else:

View File

@@ -13,6 +13,7 @@ from typing import Any, Callable, Iterable, Optional, Union
from uuid import UUID
from zlib import crc32
from curl_cffi.requests import Session as CurlSession
from langcodes import Language
from pyplayready.cdm import Cdm as PlayReadyCdm
from pywidevine.cdm import Cdm as WidevineCdm
@@ -420,6 +421,15 @@ class Track:
for drm in self.drm:
if isinstance(drm, PlayReady):
return drm
elif hasattr(cdm, "is_playready"):
if cdm.is_playready:
for drm in self.drm:
if isinstance(drm, PlayReady):
return drm
else:
for drm in self.drm:
if isinstance(drm, Widevine):
return drm
return self.drm[0]
@@ -464,6 +474,83 @@ class Track:
if tenc.key_ID.int != 0:
return tenc.key_ID
def load_drm_if_needed(self, service=None) -> bool:
"""
Load DRM information for this track if it was deferred during parsing.
Args:
service: Service instance that can fetch track-specific DRM info
Returns:
True if DRM was loaded or already present, False if failed
"""
if not getattr(self, "needs_drm_loading", False):
return bool(self.drm)
if self.drm:
self.needs_drm_loading = False
return True
if not service or not hasattr(service, "get_track_drm"):
return self.load_drm_from_playlist()
try:
track_drm = service.get_track_drm(self)
if track_drm:
self.drm = track_drm if isinstance(track_drm, list) else [track_drm]
self.needs_drm_loading = False
return True
except Exception as e:
raise ValueError(f"Failed to load DRM from service for track {self.id}: {e}")
return self.load_drm_from_playlist()
def load_drm_from_playlist(self) -> bool:
"""
Fallback method to load DRM by fetching this track's individual playlist.
"""
if self.drm:
self.needs_drm_loading = False
return True
try:
import m3u8
from pyplayready.cdm import Cdm as PlayReadyCdm
from pyplayready.system.pssh import PSSH as PR_PSSH
from pywidevine.cdm import Cdm as WidevineCdm
from pywidevine.pssh import PSSH as WV_PSSH
session = getattr(self, "session", None) or Session()
response = session.get(self.url)
playlist = m3u8.loads(response.text, self.url)
drm_list = []
for key in playlist.keys or []:
if not key or not key.keyformat:
continue
fmt = key.keyformat.lower()
if fmt == WidevineCdm.urn:
pssh_b64 = key.uri.split(",")[-1]
drm = Widevine(pssh=WV_PSSH(pssh_b64))
drm_list.append(drm)
elif fmt == PlayReadyCdm or "com.microsoft.playready" in fmt:
pssh_b64 = key.uri.split(",")[-1]
drm = PlayReady(pssh=PR_PSSH(pssh_b64), pssh_b64=pssh_b64)
drm_list.append(drm)
if drm_list:
self.drm = drm_list
self.needs_drm_loading = False
return True
except Exception as e:
raise ValueError(f"Failed to load DRM from playlist for track {self.id}: {e}")
return False
def get_init_segment(
self,
maximum_size: int = 20000,
@@ -499,8 +586,8 @@ class Track:
raise TypeError(f"Expected url to be a {str}, not {type(url)}")
if not isinstance(byte_range, (str, type(None))):
raise TypeError(f"Expected byte_range to be a {str}, not {type(byte_range)}")
if not isinstance(session, (Session, type(None))):
raise TypeError(f"Expected session to be a {Session}, not {type(session)}")
if not isinstance(session, (Session, CurlSession, type(None))):
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {type(session)}")
if not url:
if self.descriptor != self.Descriptor.URL:
@@ -558,15 +645,32 @@ class Track:
output_path = original_path.with_stem(f"{original_path.stem}_repack")
def _ffmpeg(extra_args: list[str] = None):
subprocess.run(
args = [
binaries.FFMPEG,
"-hide_banner",
"-loglevel",
"error",
"-i",
original_path,
*(extra_args or []),
]
if hasattr(self, "data") and self.data.get("audio_language"):
audio_lang = self.data["audio_language"]
audio_name = self.data.get("audio_language_name", audio_lang)
args.extend(
[
"-metadata:s:a:0",
f"language={audio_lang}",
"-metadata:s:a:0",
f"title={audio_name}",
"-metadata:s:a:0",
f"handler_name={audio_name}",
]
)
args.extend(
[
binaries.FFMPEG,
"-hide_banner",
"-loglevel",
"error",
"-i",
original_path,
*(extra_args or []),
# Following are very important!
"-map_metadata",
"-1", # don't transfer metadata to output file
@@ -575,7 +679,11 @@ class Track:
"-codec",
"copy",
str(output_path),
],
]
)
subprocess.run(
args,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,

View File

@@ -14,7 +14,7 @@ from rich.tree import Tree
from unshackle.core import binaries
from unshackle.core.config import config
from unshackle.core.console import console
from unshackle.core.constants import LANGUAGE_MAX_DISTANCE, AnyTrack, TrackT
from unshackle.core.constants import LANGUAGE_EXACT_DISTANCE, LANGUAGE_MAX_DISTANCE, AnyTrack, TrackT
from unshackle.core.events import events
from unshackle.core.tracks.attachment import Attachment
from unshackle.core.tracks.audio import Audio
@@ -181,7 +181,7 @@ class Tracks:
log = logging.getLogger("Tracks")
if duplicates:
log.warning(f" - Found and skipped {duplicates} duplicate tracks...")
log.debug(f" - Found and skipped {duplicates} duplicate tracks...")
def sort_videos(self, by_language: Optional[Sequence[Union[str, Language]]] = None) -> None:
"""Sort video tracks by bitrate, and optionally language."""
@@ -202,17 +202,16 @@ class Tracks:
"""Sort audio tracks by bitrate, descriptive, and optionally language."""
if not self.audio:
return
# bitrate
self.audio.sort(key=lambda x: float(x.bitrate or 0.0), reverse=True)
# descriptive
self.audio.sort(key=lambda x: str(x.language) if x.descriptive else "")
self.audio.sort(key=lambda x: x.descriptive)
# bitrate (within each descriptive group)
self.audio.sort(key=lambda x: float(x.bitrate or 0.0), reverse=True)
# language
for language in reversed(by_language or []):
if str(language) in ("all", "best"):
language = next((x.language for x in self.audio if x.is_original_lang), "")
if not language:
continue
self.audio.sort(key=lambda x: str(x.language))
self.audio.sort(key=lambda x: not is_close_match(language, [x.language]))
def sort_subtitles(self, by_language: Optional[Sequence[Union[str, Language]]] = None) -> None:
@@ -295,17 +294,27 @@ class Tracks:
self.videos = selected
@staticmethod
def by_language(tracks: list[TrackT], languages: list[str], per_language: int = 0) -> list[TrackT]:
def by_language(
tracks: list[TrackT], languages: list[str], per_language: int = 0, exact_match: bool = False
) -> list[TrackT]:
distance = LANGUAGE_EXACT_DISTANCE if exact_match else LANGUAGE_MAX_DISTANCE
selected = []
for language in languages:
selected.extend(
[x for x in tracks if closest_supported_match(x.language, [language], LANGUAGE_MAX_DISTANCE)][
[x for x in tracks if closest_supported_match(str(x.language), [language], distance)][
: per_language or None
]
)
return selected
def mux(self, title: str, delete: bool = True, progress: Optional[partial] = None) -> tuple[Path, int, list[str]]:
def mux(
self,
title: str,
delete: bool = True,
progress: Optional[partial] = None,
audio_expected: bool = True,
title_language: Optional[Language] = None,
) -> tuple[Path, int, list[str]]:
"""
Multiplex all the Tracks into a Matroska Container file.
@@ -315,7 +324,28 @@ class Tracks:
delete: Delete all track files after multiplexing.
progress: Update a rich progress bar via `completed=...`. This must be the
progress object's update() func, pre-set with task id via functools.partial.
audio_expected: Whether audio is expected in the output. Used to determine
if embedded audio metadata should be added.
title_language: The title's intended language. Used to select the best video track
for audio metadata when multiple video tracks exist.
"""
if self.videos and not self.audio and audio_expected:
video_track = None
if title_language:
video_track = next((v for v in self.videos if v.language == title_language), None)
if not video_track:
video_track = next((v for v in self.videos if v.is_original_lang), None)
video_track = video_track or self.videos[0]
if video_track.language.is_valid():
lang_code = str(video_track.language)
lang_name = video_track.language.display_name()
for video in self.videos:
video.needs_repack = True
video.data["audio_language"] = lang_code
video.data["audio_language_name"] = lang_name
if not binaries.MKVToolNix:
raise RuntimeError("MKVToolNix (mkvmerge) is required for muxing but was not found")
@@ -332,12 +362,20 @@ class Tracks:
raise ValueError("Video Track must be downloaded before muxing...")
events.emit(events.Types.TRACK_MULTIPLEX, track=vt)
is_default = False
if title_language:
is_default = vt.language == title_language
if not any(v.language == title_language for v in self.videos):
is_default = vt.is_original_lang or i == 0
else:
is_default = i == 0
# Prepare base arguments
video_args = [
"--language",
f"0:{vt.language}",
"--default-track",
f"0:{i == 0}",
f"0:{is_default}",
"--original-flag",
f"0:{vt.is_original_lang}",
"--compression",
@@ -363,6 +401,18 @@ class Tracks:
]
)
if hasattr(vt, "data") and vt.data.get("audio_language"):
audio_lang = vt.data["audio_language"]
audio_name = vt.data.get("audio_language_name", audio_lang)
video_args.extend(
[
"--language",
f"1:{audio_lang}",
"--track-name",
f"1:{audio_name}",
]
)
cl.extend(video_args + ["(", str(vt.path), ")"])
for i, at in enumerate(self.audio):

View File

@@ -24,7 +24,7 @@ from unidecode import unidecode
from unshackle.core.cacher import Cacher
from unshackle.core.config import config
from unshackle.core.constants import LANGUAGE_MAX_DISTANCE
from unshackle.core.constants import LANGUAGE_EXACT_DISTANCE, LANGUAGE_MAX_DISTANCE
def rotate_log_file(log_path: Path, keep: int = 20) -> Path:
@@ -114,6 +114,14 @@ def is_close_match(language: Union[str, Language], languages: Sequence[Union[str
return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_MAX_DISTANCE
def is_exact_match(language: Union[str, Language], languages: Sequence[Union[str, Language, None]]) -> bool:
"""Check if a language is an exact match to any of the provided languages."""
languages = [x for x in languages if x]
if not languages:
return False
return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_EXACT_DISTANCE
def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box:
"""
Scan a byte array for a wanted MP4/ISOBMFF box, then parse and yield each find.

View File

@@ -8,6 +8,7 @@ import tempfile
from difflib import SequenceMatcher
from pathlib import Path
from typing import Optional, Tuple
from xml.sax.saxutils import escape
import requests
from requests.adapters import HTTPAdapter, Retry
@@ -289,11 +290,11 @@ def _apply_tags(path: Path, tags: dict[str, str]) -> None:
log.debug("mkvpropedit not found on PATH; skipping tags")
return
log.debug("Applying tags to %s: %s", path, tags)
xml_lines = ["<?xml version='1.0' encoding='UTF-8'?>", "<Tags>", " <Tag>", " <Targets/>"]
xml_lines = ['<?xml version="1.0" encoding="UTF-8"?>', "<Tags>", " <Tag>", " <Targets/>"]
for name, value in tags.items():
xml_lines.append(f" <Simple><Name>{name}</Name><String>{value}</String></Simple>")
xml_lines.append(f" <Simple><Name>{escape(name)}</Name><String>{escape(value)}</String></Simple>")
xml_lines.extend([" </Tag>", "</Tags>"])
with tempfile.NamedTemporaryFile("w", suffix=".xml", delete=False) as f:
with tempfile.NamedTemporaryFile("w", suffix=".xml", delete=False, encoding="utf-8") as f:
f.write("\n".join(xml_lines))
tmp_path = Path(f.name)
try:
@@ -349,13 +350,25 @@ def tag_file(path: Path, title: Title, tmdb_id: Optional[int] | None = None) ->
if simkl_tmdb_id:
tmdb_id = simkl_tmdb_id
show_ids = simkl_data.get("show", {}).get("ids", {})
if show_ids.get("imdb"):
standard_tags["IMDB"] = f"https://www.imdb.com/title/{show_ids['imdb']}"
if show_ids.get("tvdb"):
standard_tags["TVDB"] = f"https://thetvdb.com/dereferrer/series/{show_ids['tvdb']}"
if show_ids.get("tmdbtv"):
standard_tags["TMDB"] = f"https://www.themoviedb.org/tv/{show_ids['tmdbtv']}"
# Handle TV show data from Simkl
if simkl_data.get("type") == "episode" and "show" in simkl_data:
show_ids = simkl_data.get("show", {}).get("ids", {})
if show_ids.get("imdb"):
standard_tags["IMDB"] = show_ids["imdb"]
if show_ids.get("tvdb"):
standard_tags["TVDB2"] = f"series/{show_ids['tvdb']}"
if show_ids.get("tmdbtv"):
standard_tags["TMDB"] = f"tv/{show_ids['tmdbtv']}"
# Handle movie data from Simkl
elif simkl_data.get("type") == "movie" and "movie" in simkl_data:
movie_ids = simkl_data.get("movie", {}).get("ids", {})
if movie_ids.get("imdb"):
standard_tags["IMDB"] = movie_ids["imdb"]
if movie_ids.get("tvdb"):
standard_tags["TVDB2"] = f"movies/{movie_ids['tvdb']}"
if movie_ids.get("tmdb"):
standard_tags["TMDB"] = f"movie/{movie_ids['tmdb']}"
# Use TMDB API for additional metadata (either from provided ID or Simkl lookup)
api_key = _api_key()
@@ -373,8 +386,8 @@ def tag_file(path: Path, title: Title, tmdb_id: Optional[int] | None = None) ->
_apply_tags(path, custom_tags)
return
tmdb_url = f"https://www.themoviedb.org/{'movie' if kind == 'movie' else 'tv'}/{tmdb_id}"
standard_tags["TMDB"] = tmdb_url
prefix = "movie" if kind == "movie" else "tv"
standard_tags["TMDB"] = f"{prefix}/{tmdb_id}"
try:
ids = external_ids(tmdb_id, kind)
except requests.RequestException as exc:
@@ -385,11 +398,13 @@ def tag_file(path: Path, title: Title, tmdb_id: Optional[int] | None = None) ->
imdb_id = ids.get("imdb_id")
if imdb_id:
standard_tags["IMDB"] = f"https://www.imdb.com/title/{imdb_id}"
standard_tags["IMDB"] = imdb_id
tvdb_id = ids.get("tvdb_id")
if tvdb_id:
tvdb_prefix = "movies" if kind == "movie" else "series"
standard_tags["TVDB"] = f"https://thetvdb.com/dereferrer/{tvdb_prefix}/{tvdb_id}"
if kind == "movie":
standard_tags["TVDB2"] = f"movies/{tvdb_id}"
else:
standard_tags["TVDB2"] = f"series/{tvdb_id}"
merged_tags = {
**custom_tags,

View File

@@ -74,7 +74,9 @@ class Vaults:
for vault in self.vaults:
if not vault.no_push:
try:
success += bool(vault.add_keys(self.service, kid_keys))
# Count each vault that successfully processes the keys (whether new or existing)
vault.add_keys(self.service, kid_keys)
success += 1
except (PermissionError, NotImplementedError):
pass
return success

View File

@@ -282,6 +282,10 @@ class EXAMPLE(Service):
return chapters
def get_widevine_service_certificate(self, **_: any) -> str:
"""Return the Widevine service certificate from config, if available."""
return self.config.get("certificate")
def get_playready_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[bytes]:
"""Retrieve a PlayReady license for a given track."""

View File

@@ -88,6 +88,26 @@ cdm:
jane_uhd: nexus_5_l1 # Profile 'jane_uhd' uses Nexus 5 L1
default: generic_android_l3 # Default CDM for this service
# NEW: Quality-based CDM selection
# Use different CDMs based on video resolution
# Supports operators: >=, >, <=, <, or exact match
EXAMPLE_QUALITY:
"<=1080": generic_android_l3 # Use L3 for 1080p and below
">1080": nexus_5_l1 # Use L1 for above 1080p (1440p, 2160p)
default: generic_android_l3 # Optional: fallback if no quality match
# You can mix profiles and quality thresholds in the same service
NETFLIX:
# Profile-based selection (existing functionality)
john: netflix_l3_profile
jane: netflix_l1_profile
# Quality-based selection (new functionality)
"<=720": netflix_mobile_l3
"1080": netflix_standard_l3
">=1440": netflix_premium_l1
# Fallback
default: netflix_standard_l3
# Use pywidevine Serve-compliant Remote CDMs
remote_cdm:
- name: "chrome"
@@ -106,25 +126,25 @@ remote_cdm:
secret: secret_key
- name: "decrypt_labs_chrome"
type: "decrypt_labs" # Required to identify as DecryptLabs CDM
device_name: "ChromeCDM" # Scheme identifier - must match exactly
type: "decrypt_labs" # Required to identify as DecryptLabs CDM
device_name: "ChromeCDM" # Scheme identifier - must match exactly
device_type: CHROME
system_id: 4464 # Doesn't matter
security_level: 3
host: "https://keyxtractor.decryptlabs.com"
secret: "your_decrypt_labs_api_key_here" # Replace with your API key
secret: "your_decrypt_labs_api_key_here" # Replace with your API key
- name: "decrypt_labs_l1"
type: "decrypt_labs"
device_name: "L1" # Scheme identifier - must match exactly
device_name: "L1" # Scheme identifier - must match exactly
device_type: ANDROID
system_id: 4464
system_id: 4464
security_level: 1
host: "https://keyxtractor.decryptlabs.com"
secret: "your_decrypt_labs_api_key_here"
- name: "decrypt_labs_l2"
type: "decrypt_labs"
device_name: "L2" # Scheme identifier - must match exactly
device_name: "L2" # Scheme identifier - must match exactly
device_type: ANDROID
system_id: 4464
security_level: 2
@@ -133,7 +153,7 @@ remote_cdm:
- name: "decrypt_labs_playready_sl2"
type: "decrypt_labs"
device_name: "SL2" # Scheme identifier - must match exactly
device_name: "SL2" # Scheme identifier - must match exactly
device_type: PLAYREADY
system_id: 0
security_level: 2000
@@ -142,7 +162,7 @@ remote_cdm:
- name: "decrypt_labs_playready_sl3"
type: "decrypt_labs"
device_name: "SL3" # Scheme identifier - must match exactly
device_name: "SL3" # Scheme identifier - must match exactly
device_type: PLAYREADY
system_id: 0
security_level: 3000
@@ -233,6 +253,7 @@ tmdb_api_key: ""
# - subby: Always use subby with advanced processing
# - pycaption: Use only pycaption library (no SubtitleEdit, no subby)
# - subtitleedit: Prefer SubtitleEdit when available, fall back to pycaption
# - pysubs2: Use pysubs2 library (supports SRT/SSA/ASS/WebVTT/TTML/SAMI/MicroDVD/MPL2/TMP)
subtitle:
conversion_method: auto
sdh_method: auto

View File

@@ -16,13 +16,21 @@ class InsertResult(Enum):
class HTTP(Vault):
"""Key Vault using HTTP API with support for both query parameters and JSON payloads."""
"""
Key Vault using HTTP API with support for multiple API modes.
Supported modes:
- query: Uses GET requests with query parameters
- json: Uses POST requests with JSON payloads
- decrypt_labs: Uses DecryptLabs API format (read-only)
"""
def __init__(
self,
name: str,
host: str,
password: str,
password: Optional[str] = None,
api_key: Optional[str] = None,
username: Optional[str] = None,
api_mode: str = "query",
no_push: bool = False,
@@ -34,13 +42,17 @@ class HTTP(Vault):
name: Vault name
host: Host URL
password: Password for query mode or API token for json mode
username: Username (required for query mode, ignored for json mode)
api_mode: "query" for query parameters or "json" for JSON API
api_key: API key (alternative to password, used for decrypt_labs mode)
username: Username (required for query mode, ignored for json/decrypt_labs mode)
api_mode: "query" for query parameters, "json" for JSON API, or "decrypt_labs" for DecryptLabs API
no_push: If True, this vault will not receive pushed keys
"""
super().__init__(name, no_push)
self.url = host
self.password = password
self.password = api_key or password
if not self.password:
raise ValueError("Either password or api_key is required")
self.username = username
self.api_mode = api_mode.lower()
self.current_title = None
@@ -48,11 +60,15 @@ class HTTP(Vault):
self.session.headers.update({"User-Agent": f"unshackle v{__version__}"})
self.api_session_id = None
if self.api_mode == "decrypt_labs":
self.session.headers.update({"decrypt-labs-api-key": self.password})
self.no_push = True
# Validate configuration based on mode
if self.api_mode == "query" and not self.username:
raise ValueError("Username is required for query mode")
elif self.api_mode not in ["query", "json"]:
raise ValueError("api_mode must be either 'query' or 'json'")
elif self.api_mode not in ["query", "json", "decrypt_labs"]:
raise ValueError("api_mode must be either 'query', 'json', or 'decrypt_labs'")
def request(self, method: str, params: dict = None) -> dict:
"""Make a request to the JSON API vault."""
@@ -95,7 +111,51 @@ class HTTP(Vault):
if isinstance(kid, UUID):
kid = kid.hex
if self.api_mode == "json":
if self.api_mode == "decrypt_labs":
try:
request_payload = {"service": service.lower(), "kid": kid}
response = self.session.post(self.url, json=request_payload)
if not response.ok:
return None
data = response.json()
if data.get("message") != "success":
return None
cached_keys = data.get("cached_keys")
if not cached_keys:
return None
if isinstance(cached_keys, str):
try:
cached_keys = json.loads(cached_keys)
except json.JSONDecodeError:
return cached_keys
if isinstance(cached_keys, dict):
if cached_keys.get("kid") == kid:
return cached_keys.get("key")
if kid in cached_keys:
return cached_keys[kid]
elif isinstance(cached_keys, list):
for entry in cached_keys:
if isinstance(entry, dict):
if entry.get("kid") == kid:
return entry.get("key")
elif isinstance(entry, str) and ":" in entry:
entry_kid, entry_key = entry.split(":", 1)
if entry_kid == kid:
return entry_key
except Exception as e:
print(f"Failed to get key from DecryptLabs ({e.__class__.__name__}: {e})")
return None
return None
elif self.api_mode == "json":
try:
params = {
"kid": kid,
@@ -132,7 +192,9 @@ class HTTP(Vault):
return data["keys"][0]["key"]
def get_keys(self, service: str) -> Iterator[tuple[str, str]]:
if self.api_mode == "json":
if self.api_mode == "decrypt_labs":
return iter([])
elif self.api_mode == "json":
# JSON API doesn't support getting all keys, so return empty iterator
# This will cause the copy command to rely on the API's internal duplicate handling
return iter([])
@@ -153,6 +215,9 @@ class HTTP(Vault):
if not key or key.count("0") == len(key):
raise ValueError("You cannot add a NULL Content Key to a Vault.")
if self.api_mode == "decrypt_labs":
return False
if isinstance(kid, UUID):
kid = kid.hex
@@ -192,6 +257,9 @@ class HTTP(Vault):
return data.get("status_code") == 200
def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str]) -> int:
if self.api_mode == "decrypt_labs":
return 0
for kid, key in kid_keys.items():
if not key or key.count("0") == len(key):
raise ValueError("You cannot add a NULL Content Key to a Vault.")
@@ -243,7 +311,9 @@ class HTTP(Vault):
return inserted_count
def get_services(self) -> Iterator[str]:
if self.api_mode == "json":
if self.api_mode == "decrypt_labs":
return iter([])
elif self.api_mode == "json":
try:
response = self.request("GetServices")
services = response.get("services", [])
@@ -283,6 +353,9 @@ class HTTP(Vault):
if not key or key.count("0") == len(key):
raise ValueError("You cannot add a NULL Content Key to a Vault.")
if self.api_mode == "decrypt_labs":
return InsertResult.FAILURE
if isinstance(kid, UUID):
kid = kid.hex

View File

@@ -28,26 +28,33 @@ class MySQL(Vault):
raise PermissionError(f"MySQL vault {self.slug} has no SELECT permission.")
def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]:
if not self.has_table(service):
# no table, no key, simple
return None
if isinstance(kid, UUID):
kid = kid.hex
service_variants = [service]
if service != service.lower():
service_variants.append(service.lower())
if service != service.upper():
service_variants.append(service.upper())
conn = self.conn_factory.get()
cursor = conn.cursor()
try:
cursor.execute(
# TODO: SQL injection risk
f"SELECT `id`, `key_` FROM `{service}` WHERE `kid`=%s AND `key_`!=%s",
(kid, "0" * 32),
)
cek = cursor.fetchone()
if not cek:
return None
return cek["key_"]
for service_name in service_variants:
if not self.has_table(service_name):
continue
cursor.execute(
# TODO: SQL injection risk
f"SELECT `id`, `key_` FROM `{service_name}` WHERE `kid`=%s AND `key_`!=%s",
(kid, "0" * 32),
)
cek = cursor.fetchone()
if cek:
return cek["key_"]
return None
finally:
cursor.close()
@@ -131,16 +138,27 @@ class MySQL(Vault):
if any(isinstance(kid, UUID) for kid, key_ in kid_keys.items()):
kid_keys = {kid.hex if isinstance(kid, UUID) else kid: key_ for kid, key_ in kid_keys.items()}
if not kid_keys:
return 0
conn = self.conn_factory.get()
cursor = conn.cursor()
try:
placeholders = ",".join(["%s"] * len(kid_keys))
cursor.execute(f"SELECT kid FROM `{service}` WHERE kid IN ({placeholders})", list(kid_keys.keys()))
existing_kids = {row["kid"] for row in cursor.fetchall()}
new_keys = {kid: key for kid, key in kid_keys.items() if kid not in existing_kids}
if not new_keys:
return 0
cursor.executemany(
# TODO: SQL injection risk
f"INSERT IGNORE INTO `{service}` (kid, key_) VALUES (%s, %s)",
kid_keys.items(),
f"INSERT INTO `{service}` (kid, key_) VALUES (%s, %s)",
new_keys.items(),
)
return cursor.rowcount
return len(new_keys)
finally:
conn.commit()
cursor.close()

View File

@@ -19,22 +19,30 @@ class SQLite(Vault):
self.conn_factory = ConnectionFactory(self.path)
def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]:
if not self.has_table(service):
# no table, no key, simple
return None
if isinstance(kid, UUID):
kid = kid.hex
conn = self.conn_factory.get()
cursor = conn.cursor()
# Try both the original service name and lowercase version to handle case sensitivity issues
service_variants = [service]
if service != service.lower():
service_variants.append(service.lower())
if service != service.upper():
service_variants.append(service.upper())
try:
cursor.execute(f"SELECT `id`, `key_` FROM `{service}` WHERE `kid`=? AND `key_`!=?", (kid, "0" * 32))
cek = cursor.fetchone()
if not cek:
return None
return cek[1]
for service_name in service_variants:
if not self.has_table(service_name):
continue
cursor.execute(f"SELECT `id`, `key_` FROM `{service_name}` WHERE `kid`=? AND `key_`!=?", (kid, "0" * 32))
cek = cursor.fetchone()
if cek:
return cek[1]
return None
finally:
cursor.close()
@@ -102,16 +110,27 @@ class SQLite(Vault):
if any(isinstance(kid, UUID) for kid, key_ in kid_keys.items()):
kid_keys = {kid.hex if isinstance(kid, UUID) else kid: key_ for kid, key_ in kid_keys.items()}
if not kid_keys:
return 0
conn = self.conn_factory.get()
cursor = conn.cursor()
try:
placeholders = ",".join(["?"] * len(kid_keys))
cursor.execute(f"SELECT kid FROM `{service}` WHERE kid IN ({placeholders})", list(kid_keys.keys()))
existing_kids = {row[0] for row in cursor.fetchall()}
new_keys = {kid: key for kid, key in kid_keys.items() if kid not in existing_kids}
if not new_keys:
return 0
cursor.executemany(
# TODO: SQL injection risk
f"INSERT OR IGNORE INTO `{service}` (kid, key_) VALUES (?, ?)",
kid_keys.items(),
f"INSERT INTO `{service}` (kid, key_) VALUES (?, ?)",
new_keys.items(),
)
return cursor.rowcount
return len(new_keys)
finally:
conn.commit()
cursor.close()

155
uv.lock generated
View File

@@ -645,67 +645,72 @@ wheels = [
[[package]]
name = "lxml"
version = "5.4.0"
version = "6.0.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479, upload-time = "2025-04-23T01:50:29.322Z" }
sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f5/1f/a3b6b74a451ceb84b471caa75c934d2430a4d84395d38ef201d539f38cd1/lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c", size = 8076838, upload-time = "2025-04-23T01:44:29.325Z" },
{ url = "https://files.pythonhosted.org/packages/36/af/a567a55b3e47135b4d1f05a1118c24529104c003f95851374b3748139dc1/lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7", size = 4381827, upload-time = "2025-04-23T01:44:33.345Z" },
{ url = "https://files.pythonhosted.org/packages/50/ba/4ee47d24c675932b3eb5b6de77d0f623c2db6dc466e7a1f199792c5e3e3a/lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf", size = 5204098, upload-time = "2025-04-23T01:44:35.809Z" },
{ url = "https://files.pythonhosted.org/packages/f2/0f/b4db6dfebfefe3abafe360f42a3d471881687fd449a0b86b70f1f2683438/lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28", size = 4930261, upload-time = "2025-04-23T01:44:38.271Z" },
{ url = "https://files.pythonhosted.org/packages/0b/1f/0bb1bae1ce056910f8db81c6aba80fec0e46c98d77c0f59298c70cd362a3/lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609", size = 5529621, upload-time = "2025-04-23T01:44:40.921Z" },
{ url = "https://files.pythonhosted.org/packages/21/f5/e7b66a533fc4a1e7fa63dd22a1ab2ec4d10319b909211181e1ab3e539295/lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4", size = 4983231, upload-time = "2025-04-23T01:44:43.871Z" },
{ url = "https://files.pythonhosted.org/packages/11/39/a38244b669c2d95a6a101a84d3c85ba921fea827e9e5483e93168bf1ccb2/lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7", size = 5084279, upload-time = "2025-04-23T01:44:46.632Z" },
{ url = "https://files.pythonhosted.org/packages/db/64/48cac242347a09a07740d6cee7b7fd4663d5c1abd65f2e3c60420e231b27/lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f", size = 4927405, upload-time = "2025-04-23T01:44:49.843Z" },
{ url = "https://files.pythonhosted.org/packages/98/89/97442835fbb01d80b72374f9594fe44f01817d203fa056e9906128a5d896/lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997", size = 5550169, upload-time = "2025-04-23T01:44:52.791Z" },
{ url = "https://files.pythonhosted.org/packages/f1/97/164ca398ee654eb21f29c6b582685c6c6b9d62d5213abc9b8380278e9c0a/lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c", size = 5062691, upload-time = "2025-04-23T01:44:56.108Z" },
{ url = "https://files.pythonhosted.org/packages/d0/bc/712b96823d7feb53482d2e4f59c090fb18ec7b0d0b476f353b3085893cda/lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b", size = 5133503, upload-time = "2025-04-23T01:44:59.222Z" },
{ url = "https://files.pythonhosted.org/packages/d4/55/a62a39e8f9da2a8b6002603475e3c57c870cd9c95fd4b94d4d9ac9036055/lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b", size = 4999346, upload-time = "2025-04-23T01:45:02.088Z" },
{ url = "https://files.pythonhosted.org/packages/ea/47/a393728ae001b92bb1a9e095e570bf71ec7f7fbae7688a4792222e56e5b9/lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563", size = 5627139, upload-time = "2025-04-23T01:45:04.582Z" },
{ url = "https://files.pythonhosted.org/packages/5e/5f/9dcaaad037c3e642a7ea64b479aa082968de46dd67a8293c541742b6c9db/lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5", size = 5465609, upload-time = "2025-04-23T01:45:07.649Z" },
{ url = "https://files.pythonhosted.org/packages/a7/0a/ebcae89edf27e61c45023005171d0ba95cb414ee41c045ae4caf1b8487fd/lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776", size = 5192285, upload-time = "2025-04-23T01:45:10.456Z" },
{ url = "https://files.pythonhosted.org/packages/42/ad/cc8140ca99add7d85c92db8b2354638ed6d5cc0e917b21d36039cb15a238/lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7", size = 3477507, upload-time = "2025-04-23T01:45:12.474Z" },
{ url = "https://files.pythonhosted.org/packages/e9/39/597ce090da1097d2aabd2f9ef42187a6c9c8546d67c419ce61b88b336c85/lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250", size = 3805104, upload-time = "2025-04-23T01:45:15.104Z" },
{ url = "https://files.pythonhosted.org/packages/81/2d/67693cc8a605a12e5975380d7ff83020dcc759351b5a066e1cced04f797b/lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9", size = 8083240, upload-time = "2025-04-23T01:45:18.566Z" },
{ url = "https://files.pythonhosted.org/packages/73/53/b5a05ab300a808b72e848efd152fe9c022c0181b0a70b8bca1199f1bed26/lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7", size = 4387685, upload-time = "2025-04-23T01:45:21.387Z" },
{ url = "https://files.pythonhosted.org/packages/d8/cb/1a3879c5f512bdcd32995c301886fe082b2edd83c87d41b6d42d89b4ea4d/lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa", size = 4991164, upload-time = "2025-04-23T01:45:23.849Z" },
{ url = "https://files.pythonhosted.org/packages/f9/94/bbc66e42559f9d04857071e3b3d0c9abd88579367fd2588a4042f641f57e/lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df", size = 4746206, upload-time = "2025-04-23T01:45:26.361Z" },
{ url = "https://files.pythonhosted.org/packages/66/95/34b0679bee435da2d7cae895731700e519a8dfcab499c21662ebe671603e/lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e", size = 5342144, upload-time = "2025-04-23T01:45:28.939Z" },
{ url = "https://files.pythonhosted.org/packages/e0/5d/abfcc6ab2fa0be72b2ba938abdae1f7cad4c632f8d552683ea295d55adfb/lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44", size = 4825124, upload-time = "2025-04-23T01:45:31.361Z" },
{ url = "https://files.pythonhosted.org/packages/5a/78/6bd33186c8863b36e084f294fc0a5e5eefe77af95f0663ef33809cc1c8aa/lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba", size = 4876520, upload-time = "2025-04-23T01:45:34.191Z" },
{ url = "https://files.pythonhosted.org/packages/3b/74/4d7ad4839bd0fc64e3d12da74fc9a193febb0fae0ba6ebd5149d4c23176a/lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba", size = 4765016, upload-time = "2025-04-23T01:45:36.7Z" },
{ url = "https://files.pythonhosted.org/packages/24/0d/0a98ed1f2471911dadfc541003ac6dd6879fc87b15e1143743ca20f3e973/lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c", size = 5362884, upload-time = "2025-04-23T01:45:39.291Z" },
{ url = "https://files.pythonhosted.org/packages/48/de/d4f7e4c39740a6610f0f6959052b547478107967362e8424e1163ec37ae8/lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8", size = 4902690, upload-time = "2025-04-23T01:45:42.386Z" },
{ url = "https://files.pythonhosted.org/packages/07/8c/61763abd242af84f355ca4ef1ee096d3c1b7514819564cce70fd18c22e9a/lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86", size = 4944418, upload-time = "2025-04-23T01:45:46.051Z" },
{ url = "https://files.pythonhosted.org/packages/f9/c5/6d7e3b63e7e282619193961a570c0a4c8a57fe820f07ca3fe2f6bd86608a/lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056", size = 4827092, upload-time = "2025-04-23T01:45:48.943Z" },
{ url = "https://files.pythonhosted.org/packages/71/4a/e60a306df54680b103348545706a98a7514a42c8b4fbfdcaa608567bb065/lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7", size = 5418231, upload-time = "2025-04-23T01:45:51.481Z" },
{ url = "https://files.pythonhosted.org/packages/27/f2/9754aacd6016c930875854f08ac4b192a47fe19565f776a64004aa167521/lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd", size = 5261798, upload-time = "2025-04-23T01:45:54.146Z" },
{ url = "https://files.pythonhosted.org/packages/38/a2/0c49ec6941428b1bd4f280650d7b11a0f91ace9db7de32eb7aa23bcb39ff/lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751", size = 4988195, upload-time = "2025-04-23T01:45:56.685Z" },
{ url = "https://files.pythonhosted.org/packages/7a/75/87a3963a08eafc46a86c1131c6e28a4de103ba30b5ae903114177352a3d7/lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4", size = 3474243, upload-time = "2025-04-23T01:45:58.863Z" },
{ url = "https://files.pythonhosted.org/packages/fa/f9/1f0964c4f6c2be861c50db380c554fb8befbea98c6404744ce243a3c87ef/lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539", size = 3815197, upload-time = "2025-04-23T01:46:01.096Z" },
{ url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392, upload-time = "2025-04-23T01:46:04.09Z" },
{ url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103, upload-time = "2025-04-23T01:46:07.227Z" },
{ url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224, upload-time = "2025-04-23T01:46:10.237Z" },
{ url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913, upload-time = "2025-04-23T01:46:12.757Z" },
{ url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441, upload-time = "2025-04-23T01:46:16.037Z" },
{ url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165, upload-time = "2025-04-23T01:46:19.137Z" },
{ url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580, upload-time = "2025-04-23T01:46:21.963Z" },
{ url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493, upload-time = "2025-04-23T01:46:24.316Z" },
{ url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679, upload-time = "2025-04-23T01:46:27.097Z" },
{ url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691, upload-time = "2025-04-23T01:46:30.009Z" },
{ url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075, upload-time = "2025-04-23T01:46:32.33Z" },
{ url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680, upload-time = "2025-04-23T01:46:34.852Z" },
{ url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253, upload-time = "2025-04-23T01:46:37.608Z" },
{ url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651, upload-time = "2025-04-23T01:46:40.183Z" },
{ url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315, upload-time = "2025-04-23T01:46:43.333Z" },
{ url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149, upload-time = "2025-04-23T01:46:45.684Z" },
{ url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095, upload-time = "2025-04-23T01:46:48.521Z" },
{ url = "https://files.pythonhosted.org/packages/c6/b0/e4d1cbb8c078bc4ae44de9c6a79fec4e2b4151b1b4d50af71d799e76b177/lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55", size = 3892319, upload-time = "2025-04-23T01:49:22.069Z" },
{ url = "https://files.pythonhosted.org/packages/5b/aa/e2bdefba40d815059bcb60b371a36fbfcce970a935370e1b367ba1cc8f74/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740", size = 4211614, upload-time = "2025-04-23T01:49:24.599Z" },
{ url = "https://files.pythonhosted.org/packages/3c/5f/91ff89d1e092e7cfdd8453a939436ac116db0a665e7f4be0cd8e65c7dc5a/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5", size = 4306273, upload-time = "2025-04-23T01:49:27.355Z" },
{ url = "https://files.pythonhosted.org/packages/be/7c/8c3f15df2ca534589717bfd19d1e3482167801caedfa4d90a575facf68a6/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37", size = 4208552, upload-time = "2025-04-23T01:49:29.949Z" },
{ url = "https://files.pythonhosted.org/packages/7d/d8/9567afb1665f64d73fc54eb904e418d1138d7f011ed00647121b4dd60b38/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571", size = 4331091, upload-time = "2025-04-23T01:49:32.842Z" },
{ url = "https://files.pythonhosted.org/packages/f1/ab/fdbbd91d8d82bf1a723ba88ec3e3d76c022b53c391b0c13cad441cdb8f9e/lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4", size = 3487862, upload-time = "2025-04-23T01:49:36.296Z" },
{ url = "https://files.pythonhosted.org/packages/db/8a/f8192a08237ef2fb1b19733f709db88a4c43bc8ab8357f01cb41a27e7f6a/lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388", size = 8590589, upload-time = "2025-09-22T04:00:10.51Z" },
{ url = "https://files.pythonhosted.org/packages/12/64/27bcd07ae17ff5e5536e8d88f4c7d581b48963817a13de11f3ac3329bfa2/lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153", size = 4629671, upload-time = "2025-09-22T04:00:15.411Z" },
{ url = "https://files.pythonhosted.org/packages/02/5a/a7d53b3291c324e0b6e48f3c797be63836cc52156ddf8f33cd72aac78866/lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31", size = 4999961, upload-time = "2025-09-22T04:00:17.619Z" },
{ url = "https://files.pythonhosted.org/packages/f5/55/d465e9b89df1761674d8672bb3e4ae2c47033b01ec243964b6e334c6743f/lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9", size = 5157087, upload-time = "2025-09-22T04:00:19.868Z" },
{ url = "https://files.pythonhosted.org/packages/62/38/3073cd7e3e8dfc3ba3c3a139e33bee3a82de2bfb0925714351ad3d255c13/lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8", size = 5067620, upload-time = "2025-09-22T04:00:21.877Z" },
{ url = "https://files.pythonhosted.org/packages/4a/d3/1e001588c5e2205637b08985597827d3827dbaaece16348c8822bfe61c29/lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba", size = 5406664, upload-time = "2025-09-22T04:00:23.714Z" },
{ url = "https://files.pythonhosted.org/packages/20/cf/cab09478699b003857ed6ebfe95e9fb9fa3d3c25f1353b905c9b73cfb624/lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c", size = 5289397, upload-time = "2025-09-22T04:00:25.544Z" },
{ url = "https://files.pythonhosted.org/packages/a3/84/02a2d0c38ac9a8b9f9e5e1bbd3f24b3f426044ad618b552e9549ee91bd63/lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c", size = 4772178, upload-time = "2025-09-22T04:00:27.602Z" },
{ url = "https://files.pythonhosted.org/packages/56/87/e1ceadcc031ec4aa605fe95476892d0b0ba3b7f8c7dcdf88fdeff59a9c86/lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321", size = 5358148, upload-time = "2025-09-22T04:00:29.323Z" },
{ url = "https://files.pythonhosted.org/packages/fe/13/5bb6cf42bb228353fd4ac5f162c6a84fd68a4d6f67c1031c8cf97e131fc6/lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1", size = 5112035, upload-time = "2025-09-22T04:00:31.061Z" },
{ url = "https://files.pythonhosted.org/packages/e4/e2/ea0498552102e59834e297c5c6dff8d8ded3db72ed5e8aad77871476f073/lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34", size = 4799111, upload-time = "2025-09-22T04:00:33.11Z" },
{ url = "https://files.pythonhosted.org/packages/6a/9e/8de42b52a73abb8af86c66c969b3b4c2a96567b6ac74637c037d2e3baa60/lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a", size = 5351662, upload-time = "2025-09-22T04:00:35.237Z" },
{ url = "https://files.pythonhosted.org/packages/28/a2/de776a573dfb15114509a37351937c367530865edb10a90189d0b4b9b70a/lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c", size = 5314973, upload-time = "2025-09-22T04:00:37.086Z" },
{ url = "https://files.pythonhosted.org/packages/50/a0/3ae1b1f8964c271b5eec91db2043cf8c6c0bce101ebb2a633b51b044db6c/lxml-6.0.2-cp310-cp310-win32.whl", hash = "sha256:1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b", size = 3611953, upload-time = "2025-09-22T04:00:39.224Z" },
{ url = "https://files.pythonhosted.org/packages/d1/70/bd42491f0634aad41bdfc1e46f5cff98825fb6185688dc82baa35d509f1a/lxml-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0", size = 4032695, upload-time = "2025-09-22T04:00:41.402Z" },
{ url = "https://files.pythonhosted.org/packages/d2/d0/05c6a72299f54c2c561a6c6cbb2f512e047fca20ea97a05e57931f194ac4/lxml-6.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5", size = 3680051, upload-time = "2025-09-22T04:00:43.525Z" },
{ url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" },
{ url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" },
{ url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" },
{ url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" },
{ url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" },
{ url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" },
{ url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" },
{ url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" },
{ url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" },
{ url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" },
{ url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" },
{ url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" },
{ url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" },
{ url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" },
{ url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" },
{ url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" },
{ url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" },
{ url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" },
{ url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" },
{ url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" },
{ url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" },
{ url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" },
{ url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" },
{ url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" },
{ url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" },
{ url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" },
{ url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" },
{ url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" },
{ url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" },
{ url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" },
{ url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" },
{ url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" },
{ url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" },
{ url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" },
{ url = "https://files.pythonhosted.org/packages/e7/9c/780c9a8fce3f04690b374f72f41306866b0400b9d0fdf3e17aaa37887eed/lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6", size = 3939264, upload-time = "2025-09-22T04:04:32.892Z" },
{ url = "https://files.pythonhosted.org/packages/f5/5a/1ab260c00adf645d8bf7dec7f920f744b032f69130c681302821d5debea6/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba", size = 4216435, upload-time = "2025-09-22T04:04:34.907Z" },
{ url = "https://files.pythonhosted.org/packages/f2/37/565f3b3d7ffede22874b6d86be1a1763d00f4ea9fc5b9b6ccb11e4ec8612/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5", size = 4325913, upload-time = "2025-09-22T04:04:37.205Z" },
{ url = "https://files.pythonhosted.org/packages/22/ec/f3a1b169b2fb9d03467e2e3c0c752ea30e993be440a068b125fc7dd248b0/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4", size = 4269357, upload-time = "2025-09-22T04:04:39.322Z" },
{ url = "https://files.pythonhosted.org/packages/77/a2/585a28fe3e67daa1cf2f06f34490d556d121c25d500b10082a7db96e3bcd/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d", size = 4412295, upload-time = "2025-09-22T04:04:41.647Z" },
{ url = "https://files.pythonhosted.org/packages/7b/d9/a57dd8bcebd7c69386c20263830d4fa72d27e6b72a229ef7a48e88952d9a/lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d", size = 3516913, upload-time = "2025-09-22T04:04:43.602Z" },
{ url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" },
{ url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" },
{ url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" },
{ url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" },
{ url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" },
{ url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" },
]
[[package]]
@@ -1148,21 +1153,22 @@ wheels = [
[[package]]
name = "pyplayready"
version = "0.6.0"
version = "0.6.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
{ name = "click" },
{ name = "construct" },
{ name = "cryptography" },
{ name = "ecpy" },
{ name = "lxml" },
{ name = "pycryptodome" },
{ name = "pyyaml" },
{ name = "requests" },
{ name = "xmltodict" },
]
sdist = { url = "https://files.pythonhosted.org/packages/39/5f/aba36faf0f7feafa4b82bb9e38a0d8c70048e068416a931ee54a565ee3db/pyplayready-0.6.0.tar.gz", hash = "sha256:2b874596a8532efa5d7f2380e8de2cdb611a96cd69b0da5182ab1902083566e9", size = 99157, upload-time = "2025-02-06T13:16:02.763Z" }
sdist = { url = "https://files.pythonhosted.org/packages/53/f2/6d75b6d10a8361b53a2acbe959d51aa586418e9af497381a9f5c436ca488/pyplayready-0.6.3.tar.gz", hash = "sha256:b9b82a32c2cced9c43f910eb1fb891545f1491dc063c1eb9c20634e2417eda76", size = 58019, upload-time = "2025-08-20T19:32:43.642Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/eb/85/a5b7dba7d5420c8f5d133123376a135fda69973f3e8d7c05c58a516a54e5/pyplayready-0.6.0-py3-none-any.whl", hash = "sha256:7f85ba94f2ae0d0c964d2c84e3a4f99bfa947fb120069c70af6c17f83ed6a7f3", size = 114232, upload-time = "2025-02-06T13:16:01.448Z" },
{ url = "https://files.pythonhosted.org/packages/5b/7f/64d5ff5d765f9f2138ee7cc196fd9401f9eae0fb514c66660ad4e56584fa/pyplayready-0.6.3-py3-none-any.whl", hash = "sha256:82f35434e790a7da21df57ec053a2924ceb63622c5a6c5ff9f0fa03db0531c57", size = 66162, upload-time = "2025-08-20T19:32:42.62Z" },
]
[[package]]
@@ -1174,6 +1180,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload-time = "2019-09-20T02:06:22.938Z" },
]
[[package]]
name = "pysubs2"
version = "1.8.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/31/4a/becf78d9d3df56e6c4a9c50b83794e5436b6c5ab6dd8a3f934e94c89338c/pysubs2-1.8.0.tar.gz", hash = "sha256:3397bb58a4a15b1325ba2ae3fd4d7c214e2c0ddb9f33190d6280d783bb433b20", size = 1130048, upload-time = "2024-12-24T12:39:47.769Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/09/0fc0719162e5ad723f71d41cf336f18b6b5054d70dc0fe42ace6b4d2bdc9/pysubs2-1.8.0-py3-none-any.whl", hash = "sha256:05716f5039a9ebe32cd4d7673f923cf36204f3a3e99987f823ab83610b7035a0", size = 43516, upload-time = "2024-12-24T12:39:44.469Z" },
]
[[package]]
name = "pywidevine"
version = "1.8.0"
@@ -1393,8 +1408,8 @@ sdist = { url = "https://files.pythonhosted.org/packages/66/b7/4a1bc231e0681ebf3
[[package]]
name = "subby"
version = "0.3.21"
source = { git = "https://github.com/vevv/subby.git#390cb2f4a55e98057cdd65314d8cbffd5d0a11f1" }
version = "0.3.23"
source = { git = "https://github.com/vevv/subby.git?rev=5a925c367ffb3f5e53fd114ae222d3be1fdff35d#5a925c367ffb3f5e53fd114ae222d3be1fdff35d" }
dependencies = [
{ name = "beautifulsoup4" },
{ name = "click" },
@@ -1499,7 +1514,7 @@ wheels = [
[[package]]
name = "unshackle"
version = "1.4.3"
version = "1.4.8"
source = { editable = "." }
dependencies = [
{ name = "appdirs" },
@@ -1523,6 +1538,7 @@ dependencies = [
{ name = "pymp4" },
{ name = "pymysql" },
{ name = "pyplayready" },
{ name = "pysubs2" },
{ name = "pywidevine", extra = ["serve"] },
{ name = "pyyaml" },
{ name = "requests", extra = ["socks"] },
@@ -1562,7 +1578,7 @@ requires-dist = [
{ name = "httpx", specifier = ">=0.28.1,<0.29" },
{ name = "jsonpickle", specifier = ">=3.0.4,<4" },
{ name = "langcodes", specifier = ">=3.4.0,<4" },
{ name = "lxml", specifier = ">=5.2.1,<6" },
{ name = "lxml", specifier = ">=5.2.1,<7" },
{ name = "pproxy", specifier = ">=2.7.9,<3" },
{ name = "protobuf", specifier = ">=4.25.3,<5" },
{ name = "pycaption", specifier = ">=2.2.6,<3" },
@@ -1571,7 +1587,8 @@ requires-dist = [
{ name = "pymediainfo", specifier = ">=6.1.0,<7" },
{ name = "pymp4", specifier = ">=1.4.0,<2" },
{ name = "pymysql", specifier = ">=1.1.0,<2" },
{ name = "pyplayready", specifier = ">=0.6.0,<0.7" },
{ name = "pyplayready", specifier = ">=0.6.3,<0.7" },
{ name = "pysubs2", specifier = ">=1.7.0,<2" },
{ name = "pywidevine", extras = ["serve"], specifier = ">=1.8.0,<2" },
{ name = "pyyaml", specifier = ">=6.0.1,<7" },
{ name = "requests", extras = ["socks"], specifier = ">=2.31.0,<3" },
@@ -1579,7 +1596,7 @@ requires-dist = [
{ name = "rlaphoenix-m3u8", specifier = ">=3.4.0,<4" },
{ name = "ruamel-yaml", specifier = ">=0.18.6,<0.19" },
{ name = "sortedcontainers", specifier = ">=2.4.0,<3" },
{ name = "subby", git = "https://github.com/vevv/subby.git" },
{ name = "subby", git = "https://github.com/vevv/subby.git?rev=5a925c367ffb3f5e53fd114ae222d3be1fdff35d" },
{ name = "subtitle-filter", specifier = ">=1.4.9,<2" },
{ name = "unidecode", specifier = ">=1.3.8,<2" },
{ name = "urllib3", specifier = ">=2.2.1,<3" },