mirror of
https://github.com/unshackle-dl/unshackle.git
synced 2025-10-23 15:11:08 +00:00
Compare commits
194 Commits
1.0.1
...
feature/re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
984884858f | ||
|
|
bdd219d90c | ||
|
|
07574d8d02 | ||
|
|
9b5d233c69 | ||
|
|
98d4bb4333 | ||
|
|
1c48b282de | ||
|
|
93debf149a | ||
|
|
57fc07ea41 | ||
|
|
df09998a47 | ||
|
|
e04399fbce | ||
|
|
087df59fb6 | ||
|
|
5384b775a4 | ||
|
|
2a90e60a49 | ||
|
|
1409f93de5 | ||
|
|
d3ca8e7039 | ||
|
|
8f2ead2107 | ||
|
|
9921690339 | ||
|
|
3dd12b0cbe | ||
|
|
ed1314572b | ||
|
|
7a49a6a4f9 | ||
|
|
888647ad64 | ||
|
|
133f91a2e8 | ||
|
|
a7bde29401 | ||
|
|
6c1cb21630 | ||
|
|
8437ba24d5 | ||
|
|
45902bba13 | ||
|
|
062e060fca | ||
|
|
170a427af0 | ||
|
|
283736c57b | ||
|
|
3f6a7e1f68 | ||
|
|
b48eecacb5 | ||
|
|
e9ba78cec3 | ||
|
|
4cec942613 | ||
|
|
d6f8e42f52 | ||
|
|
e4bb7e9135 | ||
|
|
c1e2d68e57 | ||
|
|
acbbe734ab | ||
|
|
6d0a210efb | ||
|
|
26a94b7135 | ||
|
|
2d5e807e7d | ||
|
|
ebe5012aeb | ||
|
|
06c4c02312 | ||
|
|
c77a8737ef | ||
|
|
0f4a68ca62 | ||
|
|
3a8dfb26fe | ||
|
|
e0154741f0 | ||
|
|
c9bb0e4224 | ||
|
|
e1e2e35ff4 | ||
|
|
ceea4ac827 | ||
|
|
97f7eb0674 | ||
|
|
03f08159b4 | ||
|
|
724703d14b | ||
|
|
da00258ae0 | ||
|
|
4f3d0f1f7a | ||
|
|
bade3f8c09 | ||
|
|
55f116f1e8 | ||
|
|
2e2f8f5099 | ||
|
|
4459ec4db6 | ||
|
|
2afc59624d | ||
|
|
e92e5c2ba3 | ||
|
|
30314fdb46 | ||
|
|
a2c6798fe6 | ||
|
|
bc26bf3046 | ||
|
|
35efdbff6d | ||
|
|
63b7a49c1a | ||
|
|
98ecf6f876 | ||
|
|
5df6914536 | ||
|
|
c1df074965 | ||
|
|
da60a396dd | ||
|
|
a99a391395 | ||
|
|
ed32939d83 | ||
|
|
4006593a8a | ||
|
|
307be4549b | ||
|
|
a82828768d | ||
|
|
d18a5de0d0 | ||
|
|
04b540b363 | ||
|
|
6137146705 | ||
|
|
859d09693c | ||
|
|
5f022635cb | ||
|
|
ad66502c0c | ||
|
|
e462f07b7a | ||
|
|
83b600e999 | ||
|
|
ea8a7b00c9 | ||
|
|
16ee4175a4 | ||
|
|
f722ec69b6 | ||
|
|
2330297ea4 | ||
|
|
86bb162868 | ||
|
|
501cfd68e8 | ||
|
|
76fb2eea95 | ||
|
|
ea5ec40bcd | ||
|
|
329850b043 | ||
|
|
73595f3b50 | ||
|
|
1e82283133 | ||
|
|
ab13dde9d2 | ||
|
|
9fd0895128 | ||
|
|
ed744205ad | ||
|
|
3ef43afeed | ||
|
|
26851cbe7c | ||
|
|
b4efdf3f2c | ||
|
|
eb30620626 | ||
|
|
7b71d6631c | ||
|
|
5949931b56 | ||
|
|
ddfc0555c9 | ||
|
|
3dda3290d3 | ||
|
|
19ff200617 | ||
|
|
ed0f03eca3 | ||
|
|
72f65adcb2 | ||
|
|
50a5a23341 | ||
|
|
e10c760821 | ||
|
|
990084ab1f | ||
|
|
8e598f7d6a | ||
|
|
06687b51fb | ||
|
|
eb1be7e253 | ||
|
|
eac2ff4cee | ||
|
|
798b5bf3cd | ||
|
|
725f7be563 | ||
|
|
b2686ca2b1 | ||
|
|
abc3b4f1a4 | ||
|
|
9952758b38 | ||
|
|
f56e7c1ec8 | ||
|
|
096b7d70f8 | ||
|
|
460878777d | ||
|
|
9eb6bdbe12 | ||
|
|
41d203aaba | ||
|
|
0c6909be4e | ||
|
|
f0493292af | ||
|
|
ead05d08ac | ||
|
|
8c1f51a431 | ||
|
|
1d4e8bf9ec | ||
|
|
b4a1f2236e | ||
|
|
3277ab0d77 | ||
|
|
be0f7299f8 | ||
|
|
948ef30de7 | ||
|
|
1bd63ddc91 | ||
|
|
4dff597af2 | ||
|
|
8dbdde697d | ||
|
|
63c697f082 | ||
|
|
3e0835d9fb | ||
|
|
c6c83ee43b | ||
|
|
507690834b | ||
|
|
f8a58d966b | ||
|
|
8d12b735ff | ||
|
|
1aaea23669 | ||
|
|
e3571b9518 | ||
|
|
b478a00519 | ||
|
|
24fb8fb00c | ||
|
|
63e9a78b2a | ||
|
|
a2bfe47993 | ||
|
|
cf4dc1ce76 | ||
|
|
40028c81d7 | ||
|
|
06df10cb58 | ||
|
|
d61bec4a8c | ||
|
|
058bb60502 | ||
|
|
7583129e8f | ||
|
|
4691694d2e | ||
|
|
a07345a0a2 | ||
|
|
091d7335a3 | ||
|
|
8c798b95c4 | ||
|
|
46c28fe943 | ||
|
|
22c9aa195e | ||
|
|
776d8f3df0 | ||
|
|
67caf71295 | ||
|
|
3ed76d199c | ||
|
|
4de9251f95 | ||
|
|
d2fb409ad9 | ||
|
|
fdff3a1c56 | ||
|
|
5d1f2eb458 | ||
|
|
3efac3d474 | ||
|
|
f578904b76 | ||
|
|
9f20159605 | ||
|
|
4decb0d107 | ||
|
|
80c40c8677 | ||
|
|
26ef48c889 | ||
|
|
5dad2746b1 | ||
|
|
24aa4647ed | ||
|
|
eeb553cb22 | ||
|
|
06c96b88a5 | ||
|
|
e8e376ad51 | ||
|
|
fbb140ec90 | ||
|
|
16a684c77f | ||
|
|
c97de0c32b | ||
|
|
c81b7f192e | ||
|
|
1b9fbe3401 | ||
|
|
f69eb691d7 | ||
|
|
05ef841282 | ||
|
|
454f19a0f7 | ||
|
|
4276267455 | ||
|
|
ab40dc1bf0 | ||
|
|
ec16e54c10 | ||
|
|
20285f4522 | ||
|
|
eaa5943b8e | ||
|
|
4385035b05 | ||
|
|
cb26ac6fa2 | ||
|
|
95674d5739 |
@@ -1,62 +0,0 @@
|
||||
# Logs and temporary files
|
||||
|
||||
Logs/
|
||||
logs/
|
||||
temp/
|
||||
\*.log
|
||||
|
||||
# Sensitive files
|
||||
|
||||
key_vault.db
|
||||
unshackle/WVDs/
|
||||
unshackle/PRDs/
|
||||
unshackle/cookies/
|
||||
_.prd
|
||||
_.wvd
|
||||
|
||||
# Cache directories
|
||||
|
||||
unshackle/cache/
|
||||
**pycache**/
|
||||
_.pyc
|
||||
_.pyo
|
||||
\*.pyd
|
||||
.Python
|
||||
|
||||
# Development files
|
||||
|
||||
.git/
|
||||
.gitignore
|
||||
.vscode/
|
||||
.idea/
|
||||
_.swp
|
||||
_.swo
|
||||
|
||||
# Documentation and plans
|
||||
|
||||
plan/
|
||||
CONTRIBUTING.md
|
||||
CONFIG.md
|
||||
AGENTS.md
|
||||
OLD-CHANGELOG.md
|
||||
cliff.toml
|
||||
|
||||
# Installation scripts
|
||||
|
||||
install.bat
|
||||
|
||||
# Test files
|
||||
|
||||
_test_
|
||||
_Test_
|
||||
|
||||
# Virtual environments
|
||||
|
||||
venv/
|
||||
env/
|
||||
.venv/
|
||||
|
||||
# OS generated files
|
||||
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
32
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
32
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: Sp5rky
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Run command uv run [...]
|
||||
2. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
- OS: [e.g. Windows/Unix]
|
||||
- Version [e.g. 1.0.1]
|
||||
- Shaka-packager Version [e.g. 2.6.1]
|
||||
- n_m3u8dl-re Version [e.g. 0.3.0 beta]
|
||||
- Any additional software, such as subby/ccextractor/aria2c
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here, if you're reporting issues with services not running or working, please try to expand on where in your service it breaks but don't include service code (unless you have rights to do so.)
|
||||
21
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
21
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: Sp5rky
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
Other tools like Devine/VT had this function [...]
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
99
.github/workflows/docker.yml
vendored
99
.github/workflows/docker.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Build and Publish Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, master]
|
||||
paths: # run only when this file changed at all
|
||||
- "unshackle/core/__init__.py"
|
||||
pull_request: {} # optional – delete if you don’t build on PRs
|
||||
workflow_dispatch: {} # manual override
|
||||
|
||||
jobs:
|
||||
detect-version-change:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
changed: ${{ steps.vdiff.outputs.changed }}
|
||||
version: ${{ steps.vdiff.outputs.version }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with: { fetch-depth: 2 } # we need the previous commit :contentReference[oaicite:1]{index=1}
|
||||
|
||||
- name: Extract & compare version
|
||||
id: vdiff
|
||||
shell: bash
|
||||
run: |
|
||||
current=$(grep -oP '__version__ = "\K[^"]+' unshackle/core/__init__.py)
|
||||
prev=$(git show HEAD^:unshackle/core/__init__.py \
|
||||
| grep -oP '__version__ = "\K[^"]+' || echo '')
|
||||
echo "version=$current" >>"$GITHUB_OUTPUT"
|
||||
echo "changed=$([ "$current" != "$prev" ] && echo true || echo false)" >>"$GITHUB_OUTPUT"
|
||||
echo "Current=$current Previous=$prev"
|
||||
|
||||
build-and-push:
|
||||
needs: detect-version-change
|
||||
if: needs.detect-version-change.outputs.changed == 'true' # only run when bumped :contentReference[oaicite:2]{index=2}
|
||||
runs-on: ubuntu-latest
|
||||
permissions: { contents: read, packages: write }
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Extract version from __init__.py
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(grep -oP '__version__ = "\K[^"]+' unshackle/core/__init__.py)
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "major_minor=$(echo $VERSION | cut -d. -f1-2)" >> $GITHUB_OUTPUT
|
||||
echo "major=$(echo $VERSION | cut -d. -f1)" >> $GITHUB_OUTPUT
|
||||
echo "Extracted version: $VERSION"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=raw,value=v${{ steps.version.outputs.version }},enable={{is_default_branch}}
|
||||
type=raw,value=${{ steps.version.outputs.version }},enable={{is_default_branch}}
|
||||
type=raw,value=${{ steps.version.outputs.major_minor }},enable={{is_default_branch}}
|
||||
type=raw,value=${{ steps.version.outputs.major }},enable={{is_default_branch}}
|
||||
|
||||
- name: Show planned tags
|
||||
run: |
|
||||
echo "Planning to create the following tags:"
|
||||
echo "${{ steps.meta.outputs.tags }}"
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Test Docker image
|
||||
if: github.event_name != 'pull_request'
|
||||
run: |
|
||||
docker run --rm ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest env check
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
# unshackle
|
||||
unshackle.yaml
|
||||
unshackle.yml
|
||||
update_check.json
|
||||
*.mkv
|
||||
*.mp4
|
||||
*.exe
|
||||
@@ -18,13 +19,13 @@ device_cert
|
||||
device_client_id_blob
|
||||
device_private_key
|
||||
device_vmp_blob
|
||||
binaries/
|
||||
unshackle/cache/
|
||||
unshackle/cookies/
|
||||
unshackle/certs/
|
||||
unshackle/WVDs/
|
||||
unshackle/PRDs/
|
||||
temp/
|
||||
logs/
|
||||
services/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
@@ -217,6 +218,7 @@ cython_debug/
|
||||
# you could uncomment the following to ignore the entire vscode folder
|
||||
.vscode/
|
||||
.github/copilot-instructions.md
|
||||
CLAUDE.md
|
||||
|
||||
# Ruff stuff:
|
||||
.ruff_cache/
|
||||
|
||||
563
CHANGELOG.md
Normal file
563
CHANGELOG.md
Normal file
@@ -0,0 +1,563 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [2.0.0] - 2025-10-25
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- **REST API Integration**: Core architecture modified to support REST API functionality
|
||||
- Changes to internal APIs for download management and tracking
|
||||
- Title and track classes updated with API integration points
|
||||
- Core component interfaces modified for queue management support
|
||||
- **Configuration Changes**: New required configuration options for API and enhanced features
|
||||
- Added `simkl_client_id` now required for Simkl functionality
|
||||
- Service-specific configuration override structure introduced
|
||||
- Debug logging configuration options added
|
||||
- **Forced Subtitles**: Behavior change for forced subtitle inclusion
|
||||
- Forced subs no longer auto-included, requires explicit `--forced-subs` flag
|
||||
|
||||
### Added
|
||||
|
||||
- **REST API Server**: Complete download management via REST API (early development)
|
||||
- Implemented download queue management and worker system
|
||||
- Added OpenAPI/Swagger documentation for easy API exploration
|
||||
- Included download progress tracking and status endpoints
|
||||
- API authentication and comprehensive error handling
|
||||
- Updated core components to support API integration
|
||||
- Early development work with more changes planned
|
||||
- **CustomRemoteCDM**: Highly configurable custom CDM API support
|
||||
- Support for third-party CDM API providers with maximum configurability
|
||||
- Full configuration through YAML without code changes
|
||||
- Addresses GitHub issue #26 for flexible CDM integration
|
||||
- **WindscribeVPN Proxy Provider**: New VPN provider support
|
||||
- Added WindscribeVPN following NordVPN and SurfsharkVPN patterns
|
||||
- Fixes GitHub issue #29
|
||||
- **Latest Episode Download**: New `--latest-episode` CLI option
|
||||
- `-le, --latest-episode` flag to download only the most recent episode
|
||||
- Automatically selects the single most recent episode regardless of season
|
||||
- Fixes GitHub issue #28
|
||||
- **Service-Specific Configuration Overrides**: Per-service fine-tuned control
|
||||
- Support for per-service configuration overrides in YAML
|
||||
- Fine-tuned control of downloader and command options per service
|
||||
- Fixes GitHub issue #13
|
||||
- **Comprehensive JSON Debug Logging**: Structured logging for troubleshooting
|
||||
- Binary toggle via `--debug` flag or `debug: true` in config
|
||||
- JSON Lines (.jsonl) format for easy parsing and analysis
|
||||
- Comprehensive logging of all operations (session info, CLI params, CDM details, auth status, title/track metadata, DRM operations, vault queries)
|
||||
- Configurable key logging via `debug_keys` option with smart redaction
|
||||
- Error logging for all critical operations
|
||||
- Removed old text logging system
|
||||
- **curl_cffi Retry Handler**: Enhanced session reliability
|
||||
- Added automatic retry mechanism to curl_cffi Session
|
||||
- Improved download reliability with configurable retries
|
||||
- **Simkl API Configuration**: New API key support
|
||||
- Added `simkl_client_id` configuration option
|
||||
- Simkl now requires client_id from https://simkl.com/settings/developer/
|
||||
|
||||
### Changed
|
||||
|
||||
- **Binary Search Enhancement**: Improved binary discovery
|
||||
- Refactored to search for binaries in root of binary folder or subfolder named after the binary
|
||||
- Better organization of binary dependencies
|
||||
- **Type Annotations**: Modernized to PEP 604 syntax
|
||||
- Updated session.py type annotations to use modern Python syntax
|
||||
- Improved code readability and type checking
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Config Directory Support**: Cross-platform user config directory support
|
||||
- Fixed config loading to properly support user config directories across all platforms
|
||||
- Fixes GitHub issue #23
|
||||
- **HYBRID Mode Validation**: Pre-download validation for hybrid processing
|
||||
- Added validation to check both HDR10 and DV tracks are available before download
|
||||
- Prevents wasted downloads when hybrid processing would fail
|
||||
- **TMDB/Simkl API Keys**: Graceful handling of missing API keys
|
||||
- Improved error handling when TMDB or Simkl API keys are not configured
|
||||
- Better user messaging for API configuration requirements
|
||||
- **Forced Subtitles Behavior**: Correct forced subtitle filtering
|
||||
- Fixed forced subtitles being incorrectly included without `--forced-subs` flag
|
||||
- Forced subs now only included when explicitly requested
|
||||
- **Font Attachment Constructor**: Fixed ASS/SSA font attachment
|
||||
- Use keyword arguments for Attachment constructor in font attachment
|
||||
- Fixes "Invalid URL: No scheme supplied" error
|
||||
- Fixes GitHub issue #24
|
||||
- **Binary Subdirectory Checking**: Enhanced binary location discovery (by @TPD94, PR #19)
|
||||
- Updated binaries.py to check subdirectories in binaries folders named after the binary
|
||||
- Improved binary detection and loading
|
||||
- **HLS Manifest Processing**: Minor HLS parser fix (by @TPD94, PR #19)
|
||||
- **lxml and pyplayready**: Updated dependencies (by @Sp5rky)
|
||||
- Updated lxml constraint and pyplayready import path for compatibility
|
||||
|
||||
### Refactored
|
||||
|
||||
- **Import Cleanup**: Removed unused imports
|
||||
- Removed unused mypy import from binaries.py
|
||||
- Fixed import ordering in API download_manager and handlers
|
||||
|
||||
### Contributors
|
||||
|
||||
This release includes contributions from:
|
||||
|
||||
- @Sp5rky - REST API server implementation, dependency updates
|
||||
- @stabbedbybrick - curl_cffi retry handler (PR #31)
|
||||
- @TPD94 - Binary search enhancements, manifest parser fixes (PR #19)
|
||||
- @scene (Andy) - Core features, configuration system, bug fixes
|
||||
|
||||
## [1.4.8] - 2025-10-08
|
||||
|
||||
### Added
|
||||
|
||||
- **Exact Language Matching**: New `--exact-lang` flag for precise language matching
|
||||
- Enables strict language code matching without fallbacks
|
||||
- **No-Mux Flag**: New `--no-mux` flag to skip muxing tracks into container files
|
||||
- Useful for keeping individual track files separate
|
||||
- **DecryptLabs API Integration for HTTP Vault**: Enhanced vault support
|
||||
- Added DecryptLabs API support to HTTP vault for improved key retrieval
|
||||
- **AC4 Audio Codec Support**: Enhanced audio format handling
|
||||
- Added AC4 codec support in Audio class with updated mime/profile handling
|
||||
- **pysubs2 Subtitle Conversion**: Extended subtitle format support
|
||||
- Added pysubs2 subtitle conversion with extended format support
|
||||
- Configurable conversion method in configuration
|
||||
|
||||
### Changed
|
||||
|
||||
- **Audio Track Sorting**: Optimized audio track selection logic
|
||||
- Improved audio track sorting by grouping descriptive tracks and sorting by bitrate
|
||||
- Better identification of ATMOS and DD+ as highest quality for filenaming
|
||||
- **pyplayready Update**: Upgraded to version 0.6.3
|
||||
- Updated import paths to resolve compatibility issues
|
||||
- Fixed lxml constraints for better dependency management
|
||||
- **pysubs2 Conversion Method**: Moved from auto to manual configuration
|
||||
- pysubs2 no longer auto-selected during testing phase
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Remote CDM**: Fixed curl_cffi compatibility
|
||||
- Added curl_cffi to instance checks in RemoteCDM
|
||||
- **Temporary File Handling**: Improved encoding handling
|
||||
- Specified UTF-8 encoding when opening temporary files
|
||||
|
||||
### Reverted
|
||||
|
||||
- **tinycss SyntaxWarning Suppression**: Removed ineffective warning filter
|
||||
- Reverted warnings filter that didn't work as expected for suppressing tinycss warnings
|
||||
|
||||
## [1.4.7] - 2025-09-25
|
||||
|
||||
### Added
|
||||
|
||||
- **curl_cffi Session Support**: Enhanced anti-bot protection with browser impersonation
|
||||
- Added new session utility with curl_cffi support for bypassing anti-bot measures
|
||||
- Browser impersonation support for Chrome, Firefox, and Safari user agents
|
||||
- Full backward compatibility with requests.Session maintained
|
||||
- Suppressed HTTPS proxy warnings for improved user experience
|
||||
- **Download Retry Functionality**: Configurable retry mechanism for failed downloads
|
||||
- Added retry count option to download function for improved reliability
|
||||
- **Subtitle Requirements Options**: Enhanced subtitle download control
|
||||
- Added options for required subtitles in download command
|
||||
- Better control over subtitle track selection and requirements
|
||||
- **Quality Selection Enhancement**: Improved quality selection options
|
||||
- Added best available quality option in download command for optimal track selection
|
||||
- **DecryptLabs API Integration**: Enhanced remote CDM configuration
|
||||
- Added decrypt_labs_api_key to Config initialization for better API integration
|
||||
|
||||
### Changed
|
||||
|
||||
- **Manifest Parser Updates**: Enhanced compatibility across all parsers
|
||||
- Updated DASH, HLS, ISM, and M3U8 parsers to accept curl_cffi sessions
|
||||
- Improved cookie handling compatibility between requests and curl_cffi
|
||||
- **Logging Improvements**: Reduced log verbosity for better user experience
|
||||
- Changed duplicate track log level to debug to reduce console noise
|
||||
- Dynamic CDM selection messages moved to debug-only output
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Remote CDM Reuse**: Fixed KeyError in dynamic CDM selection
|
||||
- Prevents KeyError when reusing remote CDMs in dynamic selection process
|
||||
- Creates copy of CDM dictionary before modification to prevent configuration mutation
|
||||
- Allows same CDM to be selected multiple times within session without errors
|
||||
|
||||
## [1.4.6] - 2025-09-13
|
||||
|
||||
### Added
|
||||
|
||||
- **Quality-Based CDM Selection**: Dynamic CDM selection based on video resolution
|
||||
- Automatically selects appropriate CDM (L3/L1) based on video track quality
|
||||
- Supports quality thresholds in configuration (>=, >, <=, <, exact match)
|
||||
- Pre-selects optimal CDM based on highest quality across all video tracks
|
||||
- Maintains backward compatibility with existing CDM configurations
|
||||
- **Automatic Audio Language Metadata**: Intelligent embedded audio language detection
|
||||
- Automatically sets audio language metadata when no separate audio tracks exist
|
||||
- Smart video track selection based on title language with fallbacks
|
||||
- Enhanced FFmpeg repackaging with audio stream metadata injection
|
||||
- **Lazy DRM Loading**: Deferred DRM loading for multi-track key retrieval optimization
|
||||
- Add deferred DRM loading to M3U8 parser to mark tracks for later processing
|
||||
- Just-in-time DRM loading during download process for better performance
|
||||
|
||||
### Changed
|
||||
|
||||
- **Enhanced CDM Management**: Improved CDM switching logic for multi-quality downloads
|
||||
- CDM selection now based on highest quality track to avoid inefficient switching
|
||||
- Quality-based selection only within same DRM type (Widevine-to-Widevine, PlayReady-to-PlayReady)
|
||||
- Single CDM used per session for better performance and reliability
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Vault Caching Issues**: Fixed vault count display and NoneType iteration errors
|
||||
- Fix 'NoneType' object is not iterable error in DecryptLabsRemoteCDM
|
||||
- Fix vault count display showing 0/3 instead of actual successful vault count
|
||||
- **Service Name Transmission**: Resolved DecryptLabsRemoteCDM service name issues
|
||||
- Fixed DecryptLabsRemoteCDM sending 'generic' instead of proper service names
|
||||
- Added case-insensitive vault lookups for SQLite/MySQL vaults
|
||||
- Added local vault integration to DecryptLabsRemoteCDM
|
||||
- **Import Organization**: Improved import ordering and code formatting
|
||||
- Reorder imports in decrypt_labs_remote_cdm.py for better organization
|
||||
- Clean up trailing whitespace in vault files
|
||||
|
||||
### Configuration
|
||||
|
||||
- **New CDM Configuration Format**: Extended `cdm:` section supports quality-based selection
|
||||
```yaml
|
||||
cdm:
|
||||
SERVICE_NAME:
|
||||
"<=1080": l3_cdm_name
|
||||
">1080": l1_cdm_name
|
||||
default: l3_cdm_name
|
||||
```
|
||||
|
||||
## [1.4.5] - 2025-09-09
|
||||
|
||||
### Added
|
||||
|
||||
- **Enhanced CDM Key Caching**: Improved key caching and session management for L1/L2 devices
|
||||
- Optimized `get_cached_keys_if_exists` functionality for better performance with L1/L2 devices
|
||||
- Enhanced cached key retrieval logic with improved session handling
|
||||
- **Widevine Common Certificate Fallback**: Added fallback to Widevine common certificate for L1 devices
|
||||
- Improved compatibility for L1 devices when service certificates are unavailable
|
||||
- **Enhanced Vault Loading**: Improved vault loading and key copying logic
|
||||
- Better error handling and key management in vault operations
|
||||
- **PSSH Display Optimization**: Truncated PSSH string display in non-debug mode for cleaner output
|
||||
- **CDM Error Messaging**: Added error messages for missing service certificates in CDM sessions
|
||||
|
||||
### Changed
|
||||
|
||||
- **Dynamic Version Headers**: Updated User-Agent headers to use dynamic version strings
|
||||
- DecryptLabsRemoteCDM now uses dynamic version import instead of hardcoded version
|
||||
- **Intelligent CDM Caching**: Implemented intelligent caching system for CDM license requests
|
||||
- Enhanced caching logic reduces redundant license requests and improves performance
|
||||
- **Enhanced Tag Handling**: Improved tag handling for TV shows and movies from Simkl data
|
||||
- Better metadata processing and formatting for improved media tagging
|
||||
|
||||
### Fixed
|
||||
|
||||
- **CDM Session Management**: Clean up session data when retrieving cached keys
|
||||
- Remove decrypt_labs_session_id and challenge from session when cached keys exist but there are missing kids
|
||||
- Ensures clean state for subsequent requests and prevents session conflicts
|
||||
- **Tag Formatting**: Fixed formatting issues in tag processing
|
||||
- **Import Order**: Fixed import order issues in tags module
|
||||
|
||||
## [1.4.4] - 2025-09-02
|
||||
|
||||
### Added
|
||||
|
||||
- **Enhanced DecryptLabs CDM Support**: Comprehensive remote CDM functionality
|
||||
- Full support for Widevine, PlayReady, and ChromeCDM through DecryptLabsRemoteCDM
|
||||
- Enhanced session management and caching support for remote WV/PR operations
|
||||
- Support for cached keys and improved license handling
|
||||
- New CDM configurations for Chrome and PlayReady devices with updated User-Agent and service certificate
|
||||
- **Advanced Configuration Options**: New device and language preferences
|
||||
- Added configuration options for device certificate status list
|
||||
- Enhanced language preference settings
|
||||
|
||||
### Changed
|
||||
|
||||
- **DRM Decryption Enhancements**: Streamlined decryption process
|
||||
- Simplified decrypt method by removing unused parameter and streamlined logic
|
||||
- Improved DecryptLabs CDM configurations with better device support
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Matroska Tag Compliance**: Enhanced media container compatibility
|
||||
- Fixed Matroska tag compliance with official specification
|
||||
- **Application Branding**: Cleaned up version display
|
||||
- Removed old devine version reference from banner to avoid developer confusion
|
||||
- Updated branding while maintaining original GNU license compliance
|
||||
- **IP Information Handling**: Improved geolocation services
|
||||
- Enhanced get_ip_info functionality with better failover handling
|
||||
- Added support for 429 error handling and multiple API provider fallback
|
||||
- Implemented cached IP info retrieval with fallback tester to avoid rate limiting
|
||||
- **Dependencies**: Streamlined package requirements
|
||||
- Removed unnecessary data extra requirement from langcodes
|
||||
|
||||
### Removed
|
||||
|
||||
- Deprecated version references in application banner for clarity
|
||||
|
||||
## [1.4.3] - 2025-08-20
|
||||
|
||||
### Added
|
||||
|
||||
- Cached IP info helper for region detection
|
||||
- New `get_cached_ip_info()` with 24h cache and provider rotation (ipinfo/ipapi) with 429 handling.
|
||||
- Reduces external calls and stabilizes non-proxy region lookups for caching/logging.
|
||||
|
||||
### Changed
|
||||
|
||||
- DRM decryption selection is fully configuration-driven
|
||||
- Widevine and PlayReady now select the decrypter based solely on `decryption` in YAML (including per-service mapping).
|
||||
- Shaka Packager remains the default decrypter when not specified.
|
||||
- `dl.py` logs the chosen tool based on the resolved configuration.
|
||||
- Geofencing and proxy verification improvements
|
||||
- Safer geofence checks with error handling and clearer logs.
|
||||
- Always verify proxy exit region via live IP lookup; fallback to proxy parsing on failure.
|
||||
- Example config updated to default to Shaka
|
||||
- `unshackle.yaml`/example now sets `decryption.default: shaka` (service overrides still supported).
|
||||
|
||||
### Removed
|
||||
|
||||
- Deprecated parameter `use_mp4decrypt`
|
||||
- Removed from `Widevine.decrypt()` and `PlayReady.decrypt()` and all callsites.
|
||||
- Internal naming switched from mp4decrypt-specific flags to generic `decrypter` selection.
|
||||
|
||||
## [1.4.2] - 2025-08-14
|
||||
|
||||
### Added
|
||||
|
||||
- **Session Management for API Requests**: Enhanced API reliability with retry logic
|
||||
- Implemented session management for tags functionality with automatic retry mechanisms
|
||||
- Improved API request stability and error handling
|
||||
- **Series Year Configuration**: New `series_year` option for title naming control
|
||||
- Added configurable `series_year` option to control year inclusion in series titles
|
||||
- Enhanced YAML configuration with series year handling options
|
||||
- **Audio Language Override**: New audio language selection option
|
||||
- Added `audio_language` option to override default language selection for audio tracks
|
||||
- Provides more granular control over audio track selection
|
||||
- **Vault Key Reception Control**: Enhanced vault security options
|
||||
- Added `no_push` option to Vault and its subclasses to control key reception
|
||||
- Improved key management security and flexibility
|
||||
|
||||
### Changed
|
||||
|
||||
- **HLS Segment Processing**: Enhanced segment retrieval and merging capabilities
|
||||
- Enhanced segment retrieval to allow all file types for better compatibility
|
||||
- Improved segment merging with recursive file search and fallback to binary concatenation
|
||||
- Fixed issues with VTT files from HLS not being found correctly due to format changes
|
||||
- Added cleanup of empty segment directories after processing
|
||||
- **Documentation**: Updated README.md with latest information
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Audio Track Selection**: Improved per-language logic for audio tracks
|
||||
- Adjusted `per_language` logic to ensure correct audio track selection
|
||||
- Fixed issue where all tracks for selected language were being downloaded instead of just the intended ones
|
||||
|
||||
## [1.4.1] - 2025-08-08
|
||||
|
||||
### Added
|
||||
|
||||
- **Title Caching System**: Intelligent title caching to reduce redundant API calls
|
||||
- Configurable title caching with 30-minute default cache duration
|
||||
- 24-hour fallback cache on API failures for improved reliability
|
||||
- Region-aware caching to handle geo-restricted content properly
|
||||
- SHA256 hashing for cache keys to handle complex title IDs
|
||||
- Added `--no-cache` CLI flag to bypass caching when needed
|
||||
- Added `--reset-cache` CLI flag to clear existing cache data
|
||||
- New cache configuration variables in config system
|
||||
- Documented caching options in example configuration file
|
||||
- Significantly improves performance when debugging or modifying CLI parameters
|
||||
- **Enhanced Tagging Configuration**: New options for customizing tag behavior
|
||||
- Added `tag_group_name` config option to control group name inclusion in tags
|
||||
- Added `tag_imdb_tmdb` config option to control IMDB/TMDB details in tags
|
||||
- Added Simkl API endpoint support as fallback when no TMDB API key is provided
|
||||
- Enhanced tag_file function to prioritize provided TMDB ID when `--tmdb` flag is used
|
||||
- Improved TMDB ID handling with better prioritization logic
|
||||
|
||||
### Changed
|
||||
|
||||
- **Language Selection Enhancement**: Improved default language handling
|
||||
- Updated language option default to 'orig' when no `-l` flag is set
|
||||
- Avoids hardcoded 'en' default and respects original content language
|
||||
- **Tagging Logic Improvements**: Simplified and enhanced tagging functionality
|
||||
- Simplified Simkl search logic with soft-fail when no results found
|
||||
- Enhanced tag_file function with better TMDB ID prioritization
|
||||
- Improved error handling in tagging operations
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Subtitle Processing**: Enhanced subtitle filtering for edge cases
|
||||
- Fixed ValueError in subtitle filtering for multiple colons in time references
|
||||
- Improved handling of subtitles containing complex time formatting
|
||||
- Better error handling for malformed subtitle timestamps
|
||||
|
||||
### Removed
|
||||
|
||||
- **Docker Support**: Removed Docker configuration from repository
|
||||
- Removed Dockerfile and .dockerignore files
|
||||
- Cleaned up README.md Docker-related documentation
|
||||
- Focuses on direct installation methods
|
||||
|
||||
## [1.4.0] - 2025-08-05
|
||||
|
||||
### Added
|
||||
|
||||
- **HLG Transfer Characteristics Preservation**: Enhanced video muxing to preserve HLG color metadata
|
||||
- Added automatic detection of HLG video tracks during muxing process
|
||||
- Implemented `--color-transfer-characteristics 0:18` argument for mkvmerge when processing HLG content
|
||||
- Prevents incorrect conversion from HLG (18) to BT.2020 (14) transfer characteristics
|
||||
- Ensures proper HLG playback support on compatible hardware without manual editing
|
||||
- **Original Language Support**: Enhanced language selection with 'orig' keyword support
|
||||
- Added support for 'orig' language selector for both video and audio tracks
|
||||
- Automatically detects and uses the title's original language when 'orig' is specified
|
||||
- Improved language processing logic with better duplicate handling
|
||||
- Enhanced help text to document original language selection usage
|
||||
- **Forced Subtitle Support**: Added option to include forced subtitle tracks
|
||||
- New functionality to download and include forced subtitle tracks alongside regular subtitles
|
||||
- **WebVTT Subtitle Filtering**: Enhanced subtitle processing capabilities
|
||||
- Added filtering for unwanted cues in WebVTT subtitles
|
||||
- Improved subtitle quality by removing unnecessary metadata
|
||||
|
||||
### Changed
|
||||
|
||||
- **DRM Track Decryption**: Improved DRM decryption track selection logic
|
||||
- Enhanced `get_drm_for_cdm()` method usage for better DRM-CDM matching
|
||||
- Added warning messages when no matching DRM is found for tracks
|
||||
- Improved error handling and logging for DRM decryption failures
|
||||
- **Series Tree Representation**: Enhanced episode tree display formatting
|
||||
- Updated series tree to show season breakdown with episode counts
|
||||
- Improved visual representation with "S{season}({count})" format
|
||||
- Better organization of series information in console output
|
||||
- **Hybrid Processing UI**: Enhanced extraction and conversion processes
|
||||
- Added dynamic spinning bars to follow the rest of the codebase design
|
||||
- Improved visual feedback during hybrid HDR processing operations
|
||||
- **Track Selection Logic**: Enhanced multi-track selection capabilities
|
||||
- Fixed track selection to support combining -V, -A, -S flags properly
|
||||
- Improved flexibility in selecting multiple track types simultaneously
|
||||
- **Service Subtitle Support**: Added configuration for services without subtitle support
|
||||
- Services can now indicate if they don't support subtitle downloads
|
||||
- Prevents unnecessary subtitle download attempts for unsupported services
|
||||
- **Update Checker**: Enhanced update checking logic and cache handling
|
||||
- Improved rate limiting and caching mechanisms for update checks
|
||||
- Better performance and reduced API calls to GitHub
|
||||
|
||||
### Fixed
|
||||
|
||||
- **PlayReady KID Extraction**: Enhanced KID extraction from PSSH data
|
||||
- Added base64 support and XML parsing for better KID detection
|
||||
- Fixed issue where only one KID was being extracted for certain services
|
||||
- Improved multi-KID support for PlayReady protected content
|
||||
- **Dolby Vision Detection**: Improved DV codec detection across all formats
|
||||
- Fixed detection of dvhe.05.06 codec which was not being recognized correctly
|
||||
- Enhanced detection logic in Episode and Movie title classes
|
||||
- Better support for various Dolby Vision codec variants
|
||||
|
||||
## [1.3.0] - 2025-08-03
|
||||
|
||||
### Added
|
||||
|
||||
- **mp4decrypt Support**: Alternative DRM decryption method using mp4decrypt from Bento4
|
||||
- Added `mp4decrypt` binary detection and support in binaries module
|
||||
- New `decryption` configuration option in unshackle.yaml for service-specific decryption methods
|
||||
- Enhanced PlayReady and Widevine DRM classes with mp4decrypt decryption support
|
||||
- Service-specific decryption mapping allows choosing between `shaka` and `mp4decrypt` per service
|
||||
- Improved error handling and progress reporting for mp4decrypt operations
|
||||
- **Scene Naming Configuration**: New `scene_naming` option for controlling file naming conventions
|
||||
- Added scene naming logic to movie, episode, and song title classes
|
||||
- Configurable through unshackle.yaml to enable/disable scene naming standards
|
||||
- **Terminal Cleanup and Signal Handling**: Enhanced console management
|
||||
- Implemented proper terminal cleanup on application exit
|
||||
- Added signal handling for graceful shutdown in ComfyConsole
|
||||
- **Configuration Template**: New `unshackle-example.yaml` template file
|
||||
- Replaced main `unshackle.yaml` with example template to prevent git conflicts
|
||||
- Users can now modify their local config without affecting repository updates
|
||||
- **Enhanced Credential Management**: Improved CDM and vault configuration
|
||||
- Expanded credential management documentation in configuration
|
||||
- Enhanced CDM configuration examples and guidelines
|
||||
- **Video Transfer Standards**: Added `Unspecified_Image` option to Transfer enum
|
||||
- Implements ITU-T H.Sup19 standard value 2 for image characteristics
|
||||
- Supports still image coding systems and unknown transfer characteristics
|
||||
- **Update Check Rate Limiting**: Enhanced update checking system
|
||||
- Added configurable update check intervals to prevent excessive API calls
|
||||
- Improved rate limiting for GitHub API requests
|
||||
|
||||
### Changed
|
||||
|
||||
- **DRM Decryption Architecture**: Enhanced decryption system with dual method support
|
||||
- Updated `dl.py` to handle service-specific decryption method selection
|
||||
- Refactored `Config` class to manage decryption method mapping per service
|
||||
- Enhanced DRM decrypt methods with `use_mp4decrypt` parameter for method selection
|
||||
- **Error Handling**: Improved exception handling in Hybrid class
|
||||
- Replaced log.exit calls with ValueError exceptions for better error propagation
|
||||
- Enhanced error handling consistency across hybrid processing
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Proxy Configuration**: Fixed proxy server mapping in configuration
|
||||
- Renamed 'servers' to 'server_map' in proxy configuration to resolve Nord/Surfshark naming conflicts
|
||||
- Updated configuration structure for better compatibility with proxy providers
|
||||
- **HTTP Vault**: Improved URL handling and key retrieval logic
|
||||
- Fixed URL processing issues in HTTP-based key vaults
|
||||
- Enhanced key retrieval reliability and error handling
|
||||
|
||||
## [1.2.0] - 2025-07-30
|
||||
|
||||
### Added
|
||||
|
||||
- **Update Checker**: Automatic GitHub release version checking on startup
|
||||
- Configurable update notifications via `update_checks` setting in unshackle.yaml
|
||||
- Non-blocking HTTP requests with 5-second timeout for performance
|
||||
- Smart semantic version comparison supporting all version formats (x.y.z, x.y, x)
|
||||
- Graceful error handling for network issues and API failures
|
||||
- User-friendly update notifications with current → latest version display
|
||||
- Direct links to GitHub releases page for easy updates
|
||||
- **HDR10+ Support**: Enhanced HDR10+ metadata processing for hybrid tracks
|
||||
- HDR10+ tool binary support (`hdr10plus_tool`) added to binaries module
|
||||
- HDR10+ to Dolby Vision conversion capabilities in hybrid processing
|
||||
- Enhanced metadata extraction for HDR10+ content
|
||||
- **Duration Fix Handling**: Added duration correction for video and hybrid tracks
|
||||
- **Temporary Directory Management**: Automatic creation of temp directories for attachment downloads
|
||||
|
||||
### Changed
|
||||
|
||||
- Enhanced configuration system with new `update_checks` boolean option (defaults to true)
|
||||
- Updated sample unshackle.yaml with update checker configuration documentation
|
||||
- Improved console styling consistency using `bright_black` for dimmed text
|
||||
- **Environment Dependency Check**: Complete overhaul with detailed categorization and status summary
|
||||
- Organized dependencies by category (Core, HDR, Download, Subtitle, Player, Network)
|
||||
- Enhanced status reporting with compact summary display
|
||||
- Improved tool requirement tracking and missing dependency alerts
|
||||
- **Hybrid Track Processing**: Significant improvements to HDR10+ and Dolby Vision handling
|
||||
- Enhanced metadata extraction and processing workflows
|
||||
- Better integration with HDR processing tools
|
||||
|
||||
### Removed
|
||||
|
||||
- **Docker Workflow**: Removed Docker build and publish GitHub Actions workflow for manual builds
|
||||
|
||||
## [1.1.0] - 2025-07-29
|
||||
|
||||
### Added
|
||||
|
||||
- **HDR10+DV Hybrid Processing**: New `-r HYBRID` command for processing HDR10 and Dolby Vision tracks
|
||||
- Support for hybrid HDR processing and injection using dovi_tool
|
||||
- New hybrid track processing module for seamless HDR10/DV conversion
|
||||
- Automatic detection and handling of HDR10 and DV metadata
|
||||
- Support for HDR10 and DV tracks in hybrid mode for EXAMPLE service
|
||||
- Binary availability check for dovi_tool in hybrid mode operations
|
||||
- Enhanced track processing capabilities for HDR content
|
||||
|
||||
### Fixed
|
||||
|
||||
- Import order issues and missing json import in hybrid processing
|
||||
- UV installation process and error handling improvements
|
||||
- Binary search functionality updated to use `binaries.find`
|
||||
|
||||
### Changed
|
||||
|
||||
- Updated package version from 1.0.2 to 1.1.0
|
||||
- Enhanced dl.py command processing for hybrid mode support
|
||||
- Improved core titles (episode/movie) processing for HDR content
|
||||
- Extended tracks module with hybrid processing capabilities
|
||||
145
CONFIG.md
145
CONFIG.md
@@ -141,6 +141,11 @@ The following directories are available and may be overridden,
|
||||
- `logs` - Logs.
|
||||
- `wvds` - Widevine Devices.
|
||||
- `prds` - PlayReady Devices.
|
||||
- `dcsl` - Device Certificate Status List.
|
||||
|
||||
Notes:
|
||||
|
||||
- `services` accepts either a single directory or a list of directories to search for service modules.
|
||||
|
||||
For example,
|
||||
|
||||
@@ -165,6 +170,14 @@ For example to set the default primary language to download to German,
|
||||
lang: de
|
||||
```
|
||||
|
||||
You can also set multiple preferred languages using a list, e.g.,
|
||||
|
||||
```yaml
|
||||
lang:
|
||||
- en
|
||||
- fr
|
||||
```
|
||||
|
||||
to set how many tracks to download concurrently to 4 and download threads to 16,
|
||||
|
||||
```yaml
|
||||
@@ -213,6 +226,37 @@ downloader:
|
||||
|
||||
The `default` entry is optional. If omitted, `requests` will be used for services not listed.
|
||||
|
||||
## decryption (str | dict)
|
||||
|
||||
Choose what software to use to decrypt DRM-protected content throughout unshackle where needed.
|
||||
You may provide a single decryption method globally or a mapping of service tags to
|
||||
decryption methods.
|
||||
|
||||
Options:
|
||||
|
||||
- `shaka` (default) - Shaka Packager - <https://github.com/shaka-project/shaka-packager>
|
||||
- `mp4decrypt` - mp4decrypt from Bento4 - <https://github.com/axiomatic-systems/Bento4>
|
||||
|
||||
Note that Shaka Packager is the traditional method and works with most services. mp4decrypt
|
||||
is an alternative that may work better with certain services that have specific encryption formats.
|
||||
|
||||
Example mapping:
|
||||
|
||||
```yaml
|
||||
decryption:
|
||||
ATVP: mp4decrypt
|
||||
AMZN: shaka
|
||||
default: shaka
|
||||
```
|
||||
|
||||
The `default` entry is optional. If omitted, `shaka` will be used for services not listed.
|
||||
|
||||
Simple configuration (single method for all services):
|
||||
|
||||
```yaml
|
||||
decryption: mp4decrypt
|
||||
```
|
||||
|
||||
## filenames (dict)
|
||||
|
||||
Override the default filenames used across unshackle.
|
||||
@@ -271,6 +315,11 @@ Note: SQLite and MySQL vaults have to connect directly to the Host/IP. It cannot
|
||||
Beware that some Hosting Providers do not let you access the MySQL server outside their intranet and may not be
|
||||
accessible outside their hosting platform.
|
||||
|
||||
Additional behavior:
|
||||
|
||||
- `no_push` (bool): Optional per-vault flag. When `true`, the vault will not receive pushed keys (writes) but
|
||||
will still be queried and can provide keys for lookups. Useful for read-only/backup vaults.
|
||||
|
||||
### Using an API Vault
|
||||
|
||||
API vaults use a specific HTTP request format, therefore API or HTTP Key Vault APIs from other projects or services may
|
||||
@@ -283,6 +332,7 @@ not work in unshackle. The API format can be seen in the [API Vault Code](unshac
|
||||
# uri: "127.0.0.1:80/key-vault"
|
||||
# uri: "https://api.example.com/key-vault"
|
||||
token: "random secret key" # authorization token
|
||||
# no_push: true # optional; make this API vault read-only (lookups only)
|
||||
```
|
||||
|
||||
### Using a MySQL Vault
|
||||
@@ -298,6 +348,7 @@ A MySQL Vault can be on a local or remote network, but I recommend SQLite for lo
|
||||
database: vault # database used for unshackle
|
||||
username: jane11
|
||||
password: Doe123
|
||||
# no_push: false # optional; defaults to false
|
||||
```
|
||||
|
||||
I recommend giving only a trustable user (or yourself) CREATE permission and then use unshackle to cache at least one CEK
|
||||
@@ -321,6 +372,7 @@ case something happens to your MySQL Vault.
|
||||
- type: SQLite
|
||||
name: "My Local Vault" # arbitrary vault name
|
||||
path: "C:/Users/Jane11/Documents/unshackle/data/key_vault.db"
|
||||
# no_push: true # optional; commonly true for local backup vaults
|
||||
```
|
||||
|
||||
**Note**: You do not need to create the file at the specified path.
|
||||
@@ -363,7 +415,7 @@ n_m3u8dl_re:
|
||||
Set your NordVPN Service credentials with `username` and `password` keys to automate the use of NordVPN as a Proxy
|
||||
system where required.
|
||||
|
||||
You can also specify specific servers to use per-region with the `servers` key.
|
||||
You can also specify specific servers to use per-region with the `server_map` key.
|
||||
Sometimes a specific server works best for a service than others, so hard-coding one for a day or two helps.
|
||||
|
||||
For example,
|
||||
@@ -372,8 +424,8 @@ For example,
|
||||
nordvpn:
|
||||
username: zxqsR7C5CyGwmGb6KSvk8qsZ # example of the login format
|
||||
password: wXVHmht22hhRKUEQ32PQVjCZ
|
||||
servers:
|
||||
- us: 12 # force US server #12 for US proxies
|
||||
server_map:
|
||||
us: 12 # force US server #12 for US proxies
|
||||
```
|
||||
|
||||
The username and password should NOT be your normal NordVPN Account Credentials.
|
||||
@@ -412,7 +464,7 @@ second proxy of the US list.
|
||||
Set your NordVPN Service credentials with `username` and `password` keys to automate the use of NordVPN as a Proxy
|
||||
system where required.
|
||||
|
||||
You can also specify specific servers to use per-region with the `servers` key.
|
||||
You can also specify specific servers to use per-region with the `server_map` key.
|
||||
Sometimes a specific server works best for a service than others, so hard-coding one for a day or two helps.
|
||||
|
||||
For example,
|
||||
@@ -420,8 +472,8 @@ For example,
|
||||
```yaml
|
||||
username: zxqsR7C5CyGwmGb6KSvk8qsZ # example of the login format
|
||||
password: wXVHmht22hhRKUEQ32PQVjCZ
|
||||
servers:
|
||||
- us: 12 # force US server #12 for US proxies
|
||||
server_map:
|
||||
us: 12 # force US server #12 for US proxies
|
||||
```
|
||||
|
||||
The username and password should NOT be your normal NordVPN Account Credentials.
|
||||
@@ -432,6 +484,20 @@ You can even set a specific server number this way, e.g., `--proxy=gb2366`.
|
||||
|
||||
Note that `gb` is used instead of `uk` to be more consistent across regional systems.
|
||||
|
||||
### surfsharkvpn (dict)
|
||||
|
||||
Enable Surfshark VPN proxy service using Surfshark Service credentials (not your login password).
|
||||
You may pin specific server IDs per region using `server_map`.
|
||||
|
||||
```yaml
|
||||
username: your_surfshark_service_username # https://my.surfshark.com/vpn/manual-setup/main/openvpn
|
||||
password: your_surfshark_service_password # service credentials, not account password
|
||||
server_map:
|
||||
us: 3844 # force US server #3844
|
||||
gb: 2697 # force GB server #2697
|
||||
au: 4621 # force AU server #4621
|
||||
```
|
||||
|
||||
### hola (dict)
|
||||
|
||||
Enable Hola VPN proxy service. This is a simple provider that doesn't require configuration.
|
||||
@@ -466,15 +532,27 @@ For example,
|
||||
|
||||
[pywidevine]: https://github.com/rlaphoenix/pywidevine
|
||||
|
||||
## scene_naming (bool)
|
||||
|
||||
Set scene-style naming for titles. When `true` uses scene naming patterns (e.g., `Prime.Suspect.S07E01...`), when
|
||||
`false` uses a more human-readable style (e.g., `Prime Suspect S07E01 ...`). Default: `true`.
|
||||
|
||||
## series_year (bool)
|
||||
|
||||
Whether to include the series year in series names for episodes and folders. Default: `true`.
|
||||
|
||||
## serve (dict)
|
||||
|
||||
Configuration data for pywidevine's serve functionality run through unshackle.
|
||||
This effectively allows you to run `unshackle serve` to start serving pywidevine Serve-compliant CDMs right from your
|
||||
local widevine device files.
|
||||
|
||||
- `api_secret` - Secret key for REST API authentication. When set, enables the REST API server alongside the CDM serve functionality. This key is required for authenticating API requests.
|
||||
|
||||
For example,
|
||||
|
||||
```yaml
|
||||
api_secret: "your-secret-key-here"
|
||||
users:
|
||||
secret_key_for_jane: # 32bit hex recommended, case-sensitive
|
||||
devices: # list of allowed devices for this user
|
||||
@@ -530,6 +608,27 @@ set_terminal_bg: true
|
||||
Group or Username to postfix to the end of all download filenames following a dash.
|
||||
For example, `tag: "J0HN"` will have `-J0HN` at the end of all download filenames.
|
||||
|
||||
## tag_group_name (bool)
|
||||
|
||||
Enable/disable tagging downloads with your group name when `tag` is set. Default: `true`.
|
||||
|
||||
## tag_imdb_tmdb (bool)
|
||||
|
||||
Enable/disable tagging downloaded files with IMDB/TMDB/TVDB identifiers (when available). Default: `true`.
|
||||
|
||||
## title_cache_enabled (bool)
|
||||
|
||||
Enable/disable caching of title metadata to reduce redundant API calls. Default: `true`.
|
||||
|
||||
## title_cache_time (int)
|
||||
|
||||
Cache duration in seconds for title metadata. Default: `1800` (30 minutes).
|
||||
|
||||
## title_cache_max_retention (int)
|
||||
|
||||
Maximum retention time in seconds for serving slightly stale cached title metadata when API calls fail.
|
||||
Default: `86400` (24 hours). Effective retention is `min(title_cache_time + grace, title_cache_max_retention)`.
|
||||
|
||||
## tmdb_api_key (str)
|
||||
|
||||
API key for The Movie Database (TMDB). This is used for tagging downloaded files with TMDB,
|
||||
@@ -549,3 +648,37 @@ tmdb_api_key: cf66bf18956kca5311ada3bebb84eb9a # Not a real key
|
||||
```
|
||||
|
||||
**Note**: Keep your API key secure and do not share it publicly. This key is used by the core/utils/tags.py module to fetch metadata from TMDB for proper file tagging.
|
||||
|
||||
## subtitle (dict)
|
||||
|
||||
Control subtitle conversion and SDH (hearing-impaired) stripping behavior.
|
||||
|
||||
- `conversion_method`: How to convert subtitles between formats. Default: `auto`.
|
||||
|
||||
- `auto`: Use subby for WebVTT/SAMI, standard for others.
|
||||
- `subby`: Always use subby with CommonIssuesFixer.
|
||||
- `subtitleedit`: Prefer SubtitleEdit when available; otherwise fallback to standard conversion.
|
||||
- `pycaption`: Use only the pycaption library (no SubtitleEdit, no subby).
|
||||
- `pysubs2`: Use pysubs2 library (supports SRT, SSA, ASS, WebVTT, TTML, SAMI, MicroDVD, MPL2, TMP formats).
|
||||
|
||||
- `sdh_method`: How to strip SDH cues. Default: `auto`.
|
||||
- `auto`: Try subby for SRT first, then SubtitleEdit, then filter-subs.
|
||||
- `subby`: Use subby's SDHStripper (SRT only).
|
||||
- `subtitleedit`: Use SubtitleEdit's RemoveTextForHI when available.
|
||||
- `filter-subs`: Use the subtitle-filter library.
|
||||
|
||||
Example:
|
||||
|
||||
```yaml
|
||||
subtitle:
|
||||
conversion_method: auto
|
||||
sdh_method: auto
|
||||
```
|
||||
|
||||
## update_checks (bool)
|
||||
|
||||
Check for updates from the GitHub repository on startup. Default: `true`.
|
||||
|
||||
## update_check_interval (int)
|
||||
|
||||
How often to check for updates, in hours. Default: `24`.
|
||||
|
||||
78
Dockerfile
78
Dockerfile
@@ -1,78 +0,0 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set environment variables to reduce image size
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
UV_CACHE_DIR=/tmp/uv-cache
|
||||
|
||||
# Add container metadata
|
||||
LABEL org.opencontainers.image.description="Docker image for Unshackle with all required dependencies for downloading media content"
|
||||
|
||||
# Install base dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget \
|
||||
gnupg \
|
||||
git \
|
||||
curl \
|
||||
build-essential \
|
||||
cmake \
|
||||
pkg-config \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Set up repos for mkvtools and bullseye for ccextractor
|
||||
RUN wget -O /etc/apt/keyrings/gpg-pub-moritzbunkus.gpg https://mkvtoolnix.download/gpg-pub-moritzbunkus.gpg \
|
||||
&& echo "deb [signed-by=/etc/apt/keyrings/gpg-pub-moritzbunkus.gpg] https://mkvtoolnix.download/debian/ bookworm main" >> /etc/apt/sources.list \
|
||||
&& echo "deb-src [signed-by=/etc/apt/keyrings/gpg-pub-moritzbunkus.gpg] https://mkvtoolnix.download/debian/ bookworm main" >> /etc/apt/sources.list \
|
||||
&& echo "deb http://ftp.debian.org/debian bullseye main" >> /etc/apt/sources.list
|
||||
|
||||
# Install all dependencies from apt
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ffmpeg \
|
||||
ccextractor \
|
||||
mkvtoolnix \
|
||||
aria2 \
|
||||
libmediainfo-dev \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install shaka packager
|
||||
RUN wget https://github.com/shaka-project/shaka-packager/releases/download/v2.6.1/packager-linux-x64 \
|
||||
&& chmod +x packager-linux-x64 \
|
||||
&& mv packager-linux-x64 /usr/local/bin/packager
|
||||
|
||||
# Install N_m3u8DL-RE
|
||||
RUN wget https://github.com/nilaoda/N_m3u8DL-RE/releases/download/v0.3.0-beta/N_m3u8DL-RE_v0.3.0-beta_linux-x64_20241203.tar.gz \
|
||||
&& tar -xzf N_m3u8DL-RE_v0.3.0-beta_linux-x64_20241203.tar.gz \
|
||||
&& mv N_m3u8DL-RE /usr/local/bin/ \
|
||||
&& chmod +x /usr/local/bin/N_m3u8DL-RE \
|
||||
&& rm N_m3u8DL-RE_v0.3.0-beta_linux-x64_20241203.tar.gz
|
||||
|
||||
# Create binaries directory and add symlinks for all required executables
|
||||
RUN mkdir -p /app/binaries && \
|
||||
ln -sf /usr/bin/ffprobe /app/binaries/ffprobe && \
|
||||
ln -sf /usr/bin/ffmpeg /app/binaries/ffmpeg && \
|
||||
ln -sf /usr/bin/mkvmerge /app/binaries/mkvmerge && \
|
||||
ln -sf /usr/local/bin/N_m3u8DL-RE /app/binaries/N_m3u8DL-RE && \
|
||||
ln -sf /usr/local/bin/packager /app/binaries/packager && \
|
||||
ln -sf /usr/local/bin/packager /usr/local/bin/shaka-packager && \
|
||||
ln -sf /usr/local/bin/packager /usr/local/bin/packager-linux-x64
|
||||
|
||||
# Install uv
|
||||
RUN pip install --no-cache-dir uv
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependency files and README (required by pyproject.toml)
|
||||
COPY pyproject.toml uv.lock README.md ./
|
||||
|
||||
# Copy source code first
|
||||
COPY unshackle/ ./unshackle/
|
||||
|
||||
# Install dependencies with uv (including the project itself)
|
||||
RUN uv sync --frozen --no-dev
|
||||
|
||||
# Set entrypoint to allow passing commands directly to unshackle
|
||||
ENTRYPOINT ["uv", "run", "unshackle"]
|
||||
CMD ["-h"]
|
||||
49
README.md
49
README.md
@@ -2,8 +2,15 @@
|
||||
<img width="16" height="16" alt="no_encryption" src="https://github.com/user-attachments/assets/6ff88473-0dd2-4bbc-b1ea-c683d5d7a134" /> unshackle
|
||||
<br/>
|
||||
<sup><em>Movie, TV, and Music Archival Software</em></sup>
|
||||
<br/>
|
||||
<a href="https://discord.gg/mHYyPaCbFK">
|
||||
<img src="https://img.shields.io/discord/1395571732001325127?label=&logo=discord&logoColor=ffffff&color=7289DA&labelColor=7289DA" alt="Discord">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
> [!WARNING]
|
||||
> **Development Branch**: This is the `dev` branch containing bleeding-edge features and experimental changes. Use for testing only. For stable releases, use the [`main`](https://github.com/unshackle-dl/unshackle/tree/main) branch.
|
||||
|
||||
## What is unshackle?
|
||||
|
||||
unshackle is a fork of [Devine](https://github.com/devine-dl/devine/), a powerful archival tool for downloading movies, TV shows, and music from streaming services. Built with a focus on modularity and extensibility, it provides a robust framework for content acquisition with support for DRM-protected content.
|
||||
@@ -14,6 +21,7 @@ unshackle is a fork of [Devine](https://github.com/devine-dl/devine/), a powerfu
|
||||
- 🎥 **Multi-Media Support** - Movies, TV episodes, and music
|
||||
- 🛠️ **Built-in Parsers** - DASH/HLS and ISM manifest support
|
||||
- 🔒 **DRM Support** - Widevine and PlayReady integration
|
||||
- 🌈 **HDR10+DV Hybrid** - Hybrid Dolby Vision injection via [dovi_tool](https://github.com/quietvoid/dovi_tool)
|
||||
- 💾 **Flexible Storage** - Local and remote key vaults
|
||||
- 👥 **Multi-Profile Auth** - Support for cookies and credentials
|
||||
- 🤖 **Smart Naming** - Automatic P2P-style filename structure
|
||||
@@ -41,46 +49,6 @@ uv tool install git+https://github.com/unshackle-dl/unshackle.git
|
||||
uvx unshackle --help # or just `unshackle` once PATH updated
|
||||
```
|
||||
|
||||
### Docker Installation
|
||||
|
||||
Run unshackle using our pre-built Docker image from GitHub Container Registry:
|
||||
|
||||
```bash
|
||||
# Run with default help command
|
||||
docker run --rm ghcr.io/unshackle-dl/unshackle:latest
|
||||
|
||||
# Check environment dependencies
|
||||
docker run --rm ghcr.io/unshackle-dl/unshackle:latest env check
|
||||
|
||||
# Download content (mount directories for persistent data)
|
||||
docker run --rm \
|
||||
-v "$(pwd)/downloads:/downloads" \
|
||||
-v "$(pwd)/unshackle/cookies:/app/unshackle/cookies" \
|
||||
-v "$(pwd)/unshackle/services:/app/unshackle/services" \
|
||||
-v "$(pwd)/unshackle/WVDs:/app/unshackle/WVDs" \
|
||||
-v "$(pwd)/unshackle/PRDs:/app/unshackle/PRDs" \
|
||||
-v "$(pwd)/temp:/app/temp" \
|
||||
-v "$(pwd)/unshackle/unshackle.yaml:/app/unshackle.yaml" \
|
||||
ghcr.io/unshackle-dl/unshackle:latest dl SERVICE_NAME CONTENT_ID
|
||||
|
||||
# Run interactively for configuration
|
||||
docker run --rm -it \
|
||||
-v "$(pwd)/unshackle/cookies:/app/unshackle/cookies" \
|
||||
-v "$(pwd)/unshackle/services:/app/unshackle/services" \
|
||||
-v "$(pwd)/unshackle.yaml:/app/unshackle.yaml" \
|
||||
ghcr.io/unshackle-dl/unshackle:latest cfg
|
||||
```
|
||||
|
||||
**Alternative: Build locally**
|
||||
|
||||
```bash
|
||||
# Clone and build your own image
|
||||
git clone https://github.com/unshackle-dl/unshackle.git
|
||||
cd unshackle
|
||||
docker build -t unshackle .
|
||||
docker run --rm unshackle env check
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> After installation, you may need to add the installation path to your PATH environment variable if prompted.
|
||||
|
||||
@@ -88,7 +56,6 @@ docker run --rm unshackle env check
|
||||
|
||||
## Planned Features
|
||||
|
||||
- 🌈 **HDR10+DV Hybrid Support** - Allow support for hybrid HDR10+ and Dolby Vision.
|
||||
- 🖥️ **Web UI Access & Control** - Manage and control unshackle from a modern web interface.
|
||||
- 🔄 **Sonarr/Radarr Interactivity** - Direct integration for automated personal downloads.
|
||||
- ⚙️ **Better ISM Support** - Improve on ISM support for multiple services
|
||||
|
||||
52
install.bat
52
install.bat
@@ -1,47 +1,61 @@
|
||||
@echo off
|
||||
echo Installing unshackle dependencies...
|
||||
setlocal EnableExtensions EnableDelayedExpansion
|
||||
|
||||
echo.
|
||||
echo === Unshackle setup (Windows) ===
|
||||
echo.
|
||||
|
||||
REM Check if UV is already installed
|
||||
uv --version >nul 2>&1
|
||||
where uv >nul 2>&1
|
||||
if %errorlevel% equ 0 (
|
||||
echo UV is already installed.
|
||||
echo [OK] uv is already installed.
|
||||
goto install_deps
|
||||
)
|
||||
|
||||
echo UV not found. Installing UV...
|
||||
echo.
|
||||
echo [..] uv not found. Installing...
|
||||
|
||||
REM Install UV using the official installer
|
||||
powershell -Command "irm https://astral.sh/uv/install.ps1 | iex"
|
||||
powershell -NoProfile -ExecutionPolicy Bypass -Command "irm https://astral.sh/uv/install.ps1 | iex"
|
||||
if %errorlevel% neq 0 (
|
||||
echo Failed to install UV. Please install UV manually from https://docs.astral.sh/uv/getting-started/installation/
|
||||
echo [ERR] Failed to install uv.
|
||||
echo PowerShell may be blocking scripts. Try:
|
||||
echo Set-ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
echo or install manually: https://docs.astral.sh/uv/getting-started/installation/
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
REM Add UV to PATH for current session
|
||||
set "PATH=%USERPROFILE%\.cargo\bin;%PATH%"
|
||||
set "UV_BIN="
|
||||
for %%D in ("%USERPROFILE%\.local\bin" "%LOCALAPPDATA%\Programs\uv\bin" "%USERPROFILE%\.cargo\bin") do (
|
||||
if exist "%%~fD\uv.exe" set "UV_BIN=%%~fD"
|
||||
)
|
||||
|
||||
echo UV installed successfully.
|
||||
echo.
|
||||
if not defined UV_BIN (
|
||||
echo [WARN] Could not locate uv.exe. You may need to reopen your terminal.
|
||||
) else (
|
||||
set "PATH=%UV_BIN%;%PATH%"
|
||||
)
|
||||
|
||||
:: Verify
|
||||
uv --version >nul 2>&1
|
||||
if %errorlevel% neq 0 (
|
||||
echo [ERR] uv still not reachable in this shell. Open a new terminal and re-run this script.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
echo [OK] uv installed and reachable.
|
||||
|
||||
:install_deps
|
||||
echo Installing project dependencies in editable mode with dev dependencies...
|
||||
echo.
|
||||
|
||||
REM Install the project in editable mode with dev dependencies
|
||||
uv sync
|
||||
if %errorlevel% neq 0 (
|
||||
echo Failed to install dependencies. Please check the error messages above.
|
||||
echo [ERR] Dependency install failed. See errors above.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo.
|
||||
echo Installation completed successfully!
|
||||
echo.
|
||||
echo You can now run unshackle using:
|
||||
echo Try:
|
||||
echo uv run unshackle --help
|
||||
echo.
|
||||
pause
|
||||
endlocal
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "unshackle"
|
||||
version = "1.0.1"
|
||||
version = "2.0.0"
|
||||
description = "Modular Movie, TV, and Music Archival Software."
|
||||
authors = [{ name = "unshackle team" }]
|
||||
requires-python = ">=3.10,<3.13"
|
||||
@@ -32,8 +32,8 @@ dependencies = [
|
||||
"construct>=2.8.8,<3",
|
||||
"crccheck>=1.3.0,<2",
|
||||
"jsonpickle>=3.0.4,<4",
|
||||
"langcodes[data]>=3.4.0,<4",
|
||||
"lxml>=5.2.1,<6",
|
||||
"langcodes>=3.4.0,<4",
|
||||
"lxml>=5.2.1,<7",
|
||||
"pproxy>=2.7.9,<3",
|
||||
"protobuf>=4.25.3,<5",
|
||||
"pycaption>=2.2.6,<3",
|
||||
@@ -54,9 +54,12 @@ dependencies = [
|
||||
"urllib3>=2.2.1,<3",
|
||||
"chardet>=5.2.0,<6",
|
||||
"curl-cffi>=0.7.0b4,<0.8",
|
||||
"pyplayready>=0.6.0,<0.7",
|
||||
"pyplayready>=0.6.3,<0.7",
|
||||
"httpx>=0.28.1,<0.29",
|
||||
"cryptography>=45.0.0",
|
||||
"subby",
|
||||
"aiohttp-swagger3>=0.9.0,<1",
|
||||
"pysubs2>=1.7.0,<2",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
@@ -112,3 +115,4 @@ no_implicit_optional = true
|
||||
|
||||
[tool.uv.sources]
|
||||
unshackle = { workspace = true }
|
||||
subby = { git = "https://github.com/vevv/subby.git", rev = "5a925c367ffb3f5e53fd114ae222d3be1fdff35d" }
|
||||
|
||||
0
unshackle/binaries/placehere.txt
Normal file
0
unshackle/binaries/placehere.txt
Normal file
@@ -65,7 +65,7 @@ def cfg(ctx: click.Context, key: str, value: str, unset: bool, list_: bool) -> N
|
||||
|
||||
if not is_write and not is_delete:
|
||||
data = data.mlget(key_items, default=KeyError)
|
||||
if data == KeyError:
|
||||
if data is KeyError:
|
||||
raise click.ClickException(f"Key '{key}' does not exist in the config.")
|
||||
yaml.dump(data, sys.stdout)
|
||||
else:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,11 +10,11 @@ from rich.padding import Padding
|
||||
from rich.table import Table
|
||||
from rich.tree import Tree
|
||||
|
||||
from unshackle.core import binaries
|
||||
from unshackle.core.config import POSSIBLE_CONFIG_PATHS, config, config_path
|
||||
from unshackle.core.console import console
|
||||
from unshackle.core.constants import context_settings
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.utils.osenvironment import get_os_arch
|
||||
|
||||
|
||||
@click.group(short_help="Manage and configure the project environment.", context_settings=context_settings)
|
||||
@@ -25,45 +25,134 @@ def env() -> None:
|
||||
@env.command()
|
||||
def check() -> None:
|
||||
"""Checks environment for the required dependencies."""
|
||||
table = Table(title="Dependencies", expand=True)
|
||||
table.add_column("Name", no_wrap=True)
|
||||
table.add_column("Installed", justify="center")
|
||||
table.add_column("Path", no_wrap=False, overflow="fold")
|
||||
|
||||
# builds shaka-packager based on os, arch
|
||||
packager_dep = get_os_arch("packager")
|
||||
|
||||
# Helper function to find binary with multiple possible names
|
||||
def find_binary(*names):
|
||||
for name in names:
|
||||
if shutil.which(name):
|
||||
return name
|
||||
return names[0] # Return first name as fallback for display
|
||||
|
||||
dependencies = [
|
||||
{"name": "CCExtractor", "binary": "ccextractor"},
|
||||
{"name": "FFMpeg", "binary": "ffmpeg"},
|
||||
{"name": "MKVToolNix", "binary": "mkvmerge"},
|
||||
{"name": "Shaka-Packager", "binary": packager_dep},
|
||||
{"name": "N_m3u8DL-RE", "binary": find_binary("N_m3u8DL-RE", "n-m3u8dl-re")},
|
||||
{"name": "Aria2(c)", "binary": "aria2c"},
|
||||
# Define all dependencies
|
||||
all_deps = [
|
||||
# Core Media Tools
|
||||
{"name": "FFmpeg", "binary": binaries.FFMPEG, "required": True, "desc": "Media processing", "cat": "Core"},
|
||||
{"name": "FFprobe", "binary": binaries.FFProbe, "required": True, "desc": "Media analysis", "cat": "Core"},
|
||||
{"name": "MKVToolNix", "binary": binaries.MKVToolNix, "required": True, "desc": "MKV muxing", "cat": "Core"},
|
||||
{
|
||||
"name": "mkvpropedit",
|
||||
"binary": binaries.Mkvpropedit,
|
||||
"required": True,
|
||||
"desc": "MKV metadata",
|
||||
"cat": "Core",
|
||||
},
|
||||
{
|
||||
"name": "shaka-packager",
|
||||
"binary": binaries.ShakaPackager,
|
||||
"required": True,
|
||||
"desc": "DRM decryption",
|
||||
"cat": "DRM",
|
||||
},
|
||||
{
|
||||
"name": "mp4decrypt",
|
||||
"binary": binaries.Mp4decrypt,
|
||||
"required": False,
|
||||
"desc": "DRM decryption",
|
||||
"cat": "DRM",
|
||||
},
|
||||
# HDR Processing
|
||||
{"name": "dovi_tool", "binary": binaries.DoviTool, "required": False, "desc": "Dolby Vision", "cat": "HDR"},
|
||||
{
|
||||
"name": "HDR10Plus_tool",
|
||||
"binary": binaries.HDR10PlusTool,
|
||||
"required": False,
|
||||
"desc": "HDR10+ metadata",
|
||||
"cat": "HDR",
|
||||
},
|
||||
# Downloaders
|
||||
{"name": "aria2c", "binary": binaries.Aria2, "required": False, "desc": "Multi-thread DL", "cat": "Download"},
|
||||
{
|
||||
"name": "N_m3u8DL-RE",
|
||||
"binary": binaries.N_m3u8DL_RE,
|
||||
"required": False,
|
||||
"desc": "HLS/DASH/ISM",
|
||||
"cat": "Download",
|
||||
},
|
||||
# Subtitle Tools
|
||||
{
|
||||
"name": "SubtitleEdit",
|
||||
"binary": binaries.SubtitleEdit,
|
||||
"required": False,
|
||||
"desc": "Sub conversion",
|
||||
"cat": "Subtitle",
|
||||
},
|
||||
{
|
||||
"name": "CCExtractor",
|
||||
"binary": binaries.CCExtractor,
|
||||
"required": False,
|
||||
"desc": "CC extraction",
|
||||
"cat": "Subtitle",
|
||||
},
|
||||
# Media Players
|
||||
{"name": "FFplay", "binary": binaries.FFPlay, "required": False, "desc": "Simple player", "cat": "Player"},
|
||||
{"name": "MPV", "binary": binaries.MPV, "required": False, "desc": "Advanced player", "cat": "Player"},
|
||||
# Network Tools
|
||||
{
|
||||
"name": "HolaProxy",
|
||||
"binary": binaries.HolaProxy,
|
||||
"required": False,
|
||||
"desc": "Proxy service",
|
||||
"cat": "Network",
|
||||
},
|
||||
{"name": "Caddy", "binary": binaries.Caddy, "required": False, "desc": "Web server", "cat": "Network"},
|
||||
]
|
||||
|
||||
for dep in dependencies:
|
||||
path = shutil.which(dep["binary"])
|
||||
# Track overall status
|
||||
all_required_installed = True
|
||||
total_installed = 0
|
||||
total_required = 0
|
||||
missing_required = []
|
||||
|
||||
# Create a single table
|
||||
table = Table(
|
||||
title="Environment Dependencies", title_style="bold", show_header=True, header_style="bold", expand=False
|
||||
)
|
||||
table.add_column("Category", style="bold cyan", width=10)
|
||||
table.add_column("Tool", width=16)
|
||||
table.add_column("Status", justify="center", width=10)
|
||||
table.add_column("Req", justify="center", width=4)
|
||||
table.add_column("Purpose", style="bright_black", width=20)
|
||||
|
||||
last_cat = None
|
||||
for dep in all_deps:
|
||||
path = dep["binary"]
|
||||
|
||||
# Category column (only show when it changes)
|
||||
category = dep["cat"] if dep["cat"] != last_cat else ""
|
||||
last_cat = dep["cat"]
|
||||
|
||||
# Status
|
||||
if path:
|
||||
installed = "[green]:heavy_check_mark:[/green]"
|
||||
path_output = path.lower()
|
||||
status = "[green]✓[/green]"
|
||||
total_installed += 1
|
||||
else:
|
||||
installed = "[red]:x:[/red]"
|
||||
path_output = "Not Found"
|
||||
status = "[red]✗[/red]"
|
||||
if dep["required"]:
|
||||
all_required_installed = False
|
||||
missing_required.append(dep["name"])
|
||||
|
||||
# Add to the table
|
||||
table.add_row(dep["name"], installed, path_output)
|
||||
if dep["required"]:
|
||||
total_required += 1
|
||||
|
||||
# Display the result
|
||||
console.print(Padding(table, (1, 5)))
|
||||
# Required column (compact)
|
||||
req = "[red]Y[/red]" if dep["required"] else "[bright_black]-[/bright_black]"
|
||||
|
||||
# Add row
|
||||
table.add_row(category, dep["name"], status, req, dep["desc"])
|
||||
|
||||
console.print(Padding(table, (1, 2)))
|
||||
|
||||
# Compact summary
|
||||
summary_parts = [f"[bold]Total:[/bold] {total_installed}/{len(all_deps)}"]
|
||||
|
||||
if all_required_installed:
|
||||
summary_parts.append("[green]All required tools installed ✓[/green]")
|
||||
else:
|
||||
summary_parts.append(f"[red]Missing required: {', '.join(missing_required)}[/red]")
|
||||
|
||||
console.print(Padding(" ".join(summary_parts), (1, 2)))
|
||||
|
||||
|
||||
@env.command()
|
||||
@@ -79,7 +168,7 @@ def info() -> None:
|
||||
tree.add(f"[repr.number]{i}.[/] [text2]{path.resolve()}[/]")
|
||||
console.print(Padding(tree, (0, 5)))
|
||||
|
||||
table = Table(title="Directories", expand=True)
|
||||
table = Table(title="Directories", title_style="bold", expand=True)
|
||||
table.add_column("Name", no_wrap=True)
|
||||
table.add_column("Path", no_wrap=False, overflow="fold")
|
||||
|
||||
@@ -92,12 +181,21 @@ def info() -> None:
|
||||
for name in sorted(dir(config.directories)):
|
||||
if name.startswith("__") or name == "app_dirs":
|
||||
continue
|
||||
path = getattr(config.directories, name).resolve()
|
||||
for var, var_path in path_vars.items():
|
||||
if path.is_relative_to(var_path):
|
||||
path = rf"%{var}%\{path.relative_to(var_path)}"
|
||||
break
|
||||
table.add_row(name.title(), str(path))
|
||||
attr_value = getattr(config.directories, name)
|
||||
|
||||
# Handle both single Path objects and lists of Path objects
|
||||
if isinstance(attr_value, list):
|
||||
# For lists, show each path on a separate line
|
||||
paths_str = "\n".join(str(path.resolve()) for path in attr_value)
|
||||
table.add_row(name.title(), paths_str)
|
||||
else:
|
||||
# For single Path objects, use the original logic
|
||||
path = attr_value.resolve()
|
||||
for var, var_path in path_vars.items():
|
||||
if path.is_relative_to(var_path):
|
||||
path = rf"%{var}%\{path.relative_to(var_path)}"
|
||||
break
|
||||
table.add_row(name.title(), str(path))
|
||||
|
||||
console.print(Padding(table, (1, 5)))
|
||||
|
||||
|
||||
@@ -12,83 +12,113 @@ from unshackle.core.vault import Vault
|
||||
from unshackle.core.vaults import Vaults
|
||||
|
||||
|
||||
def _load_vaults(vault_names: list[str]) -> Vaults:
|
||||
"""Load and validate vaults by name."""
|
||||
vaults = Vaults()
|
||||
for vault_name in vault_names:
|
||||
vault_config = next((x for x in config.key_vaults if x["name"] == vault_name), None)
|
||||
if not vault_config:
|
||||
raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
|
||||
|
||||
vault_type = vault_config["type"]
|
||||
vault_args = vault_config.copy()
|
||||
del vault_args["type"]
|
||||
|
||||
if not vaults.load(vault_type, **vault_args):
|
||||
raise click.ClickException(f"Failed to load vault ({vault_name}).")
|
||||
|
||||
return vaults
|
||||
|
||||
|
||||
def _process_service_keys(from_vault: Vault, service: str, log: logging.Logger) -> dict[str, str]:
|
||||
"""Get and validate keys from a vault for a specific service."""
|
||||
content_keys = list(from_vault.get_keys(service))
|
||||
|
||||
bad_keys = {kid: key for kid, key in content_keys if not key or key.count("0") == len(key)}
|
||||
for kid, key in bad_keys.items():
|
||||
log.warning(f"Skipping NULL key: {kid}:{key}")
|
||||
|
||||
return {kid: key for kid, key in content_keys if kid not in bad_keys}
|
||||
|
||||
|
||||
def _copy_service_data(to_vault: Vault, from_vault: Vault, service: str, log: logging.Logger) -> int:
|
||||
"""Copy data for a single service between vaults."""
|
||||
content_keys = _process_service_keys(from_vault, service, log)
|
||||
total_count = len(content_keys)
|
||||
|
||||
if total_count == 0:
|
||||
log.info(f"{service}: No keys found in {from_vault}")
|
||||
return 0
|
||||
|
||||
try:
|
||||
added = to_vault.add_keys(service, content_keys)
|
||||
except PermissionError:
|
||||
log.warning(f"{service}: No permission to create table in {to_vault}, skipped")
|
||||
return 0
|
||||
|
||||
existed = total_count - added
|
||||
|
||||
if added > 0 and existed > 0:
|
||||
log.info(f"{service}: {added} added, {existed} skipped ({total_count} total)")
|
||||
elif added > 0:
|
||||
log.info(f"{service}: {added} added ({total_count} total)")
|
||||
else:
|
||||
log.info(f"{service}: {existed} skipped (all existed)")
|
||||
|
||||
return added
|
||||
|
||||
|
||||
@click.group(short_help="Manage and configure Key Vaults.", context_settings=context_settings)
|
||||
def kv() -> None:
|
||||
"""Manage and configure Key Vaults."""
|
||||
|
||||
|
||||
@kv.command()
|
||||
@click.argument("to_vault", type=str)
|
||||
@click.argument("from_vaults", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.argument("to_vault_name", type=str)
|
||||
@click.argument("from_vault_names", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.option("-s", "--service", type=str, default=None, help="Only copy data to and from a specific service.")
|
||||
def copy(to_vault: str, from_vaults: list[str], service: Optional[str] = None) -> None:
|
||||
def copy(to_vault_name: str, from_vault_names: list[str], service: Optional[str] = None) -> None:
|
||||
"""
|
||||
Copy data from multiple Key Vaults into a single Key Vault.
|
||||
Rows with matching KIDs are skipped unless there's no KEY set.
|
||||
Existing data is not deleted or altered.
|
||||
|
||||
The `to_vault` argument is the key vault you wish to copy data to.
|
||||
The `to_vault_name` argument is the key vault you wish to copy data to.
|
||||
It should be the name of a Key Vault defined in the config.
|
||||
|
||||
The `from_vaults` argument is the key vault(s) you wish to take
|
||||
The `from_vault_names` argument is the key vault(s) you wish to take
|
||||
data from. You may supply multiple key vaults.
|
||||
"""
|
||||
if not from_vaults:
|
||||
if not from_vault_names:
|
||||
raise click.ClickException("No Vaults were specified to copy data from.")
|
||||
|
||||
log = logging.getLogger("kv")
|
||||
|
||||
vaults = Vaults()
|
||||
for vault_name in [to_vault] + list(from_vaults):
|
||||
vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
|
||||
if not vault:
|
||||
raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
|
||||
vault_type = vault["type"]
|
||||
vault_args = vault.copy()
|
||||
del vault_args["type"]
|
||||
vaults.load(vault_type, **vault_args)
|
||||
all_vault_names = [to_vault_name] + list(from_vault_names)
|
||||
vaults = _load_vaults(all_vault_names)
|
||||
|
||||
to_vault: Vault = vaults.vaults[0]
|
||||
from_vaults: list[Vault] = vaults.vaults[1:]
|
||||
to_vault = vaults.vaults[0]
|
||||
from_vaults = vaults.vaults[1:]
|
||||
|
||||
vault_names = ", ".join([v.name for v in from_vaults])
|
||||
log.info(f"Copying data from {vault_names} → {to_vault.name}")
|
||||
|
||||
log.info(f"Copying data from {', '.join([x.name for x in from_vaults])}, into {to_vault.name}")
|
||||
if service:
|
||||
service = Services.get_tag(service)
|
||||
log.info(f"Only copying data for service {service}")
|
||||
log.info(f"Filtering by service: {service}")
|
||||
|
||||
total_added = 0
|
||||
for from_vault in from_vaults:
|
||||
if service:
|
||||
services = [service]
|
||||
else:
|
||||
services = from_vault.get_services()
|
||||
|
||||
for service_ in services:
|
||||
log.info(f"Getting data from {from_vault} for {service_}")
|
||||
content_keys = list(from_vault.get_keys(service_)) # important as it's a generator we iterate twice
|
||||
|
||||
bad_keys = {kid: key for kid, key in content_keys if not key or key.count("0") == len(key)}
|
||||
|
||||
for kid, key in bad_keys.items():
|
||||
log.warning(f"Cannot add a NULL Content Key to a Vault, skipping: {kid}:{key}")
|
||||
|
||||
content_keys = {kid: key for kid, key in content_keys if kid not in bad_keys}
|
||||
|
||||
total_count = len(content_keys)
|
||||
log.info(f"Adding {total_count} Content Keys to {to_vault} for {service_}")
|
||||
|
||||
try:
|
||||
added = to_vault.add_keys(service_, content_keys)
|
||||
except PermissionError:
|
||||
log.warning(f" - No permission to create table ({service_}) in {to_vault}, skipping...")
|
||||
continue
|
||||
services_to_copy = [service] if service else from_vault.get_services()
|
||||
|
||||
for service_tag in services_to_copy:
|
||||
added = _copy_service_data(to_vault, from_vault, service_tag, log)
|
||||
total_added += added
|
||||
existed = total_count - added
|
||||
|
||||
log.info(f"{to_vault} ({service_}): {added} newly added, {existed} already existed (skipped)")
|
||||
|
||||
log.info(f"{to_vault}: {total_added} total newly added")
|
||||
if total_added > 0:
|
||||
log.info(f"Successfully added {total_added} new keys to {to_vault}")
|
||||
else:
|
||||
log.info("Copy completed - no new keys to add")
|
||||
|
||||
|
||||
@kv.command()
|
||||
@@ -105,9 +135,9 @@ def sync(ctx: click.Context, vaults: list[str], service: Optional[str] = None) -
|
||||
if not len(vaults) > 1:
|
||||
raise click.ClickException("You must provide more than one Vault to sync.")
|
||||
|
||||
ctx.invoke(copy, to_vault=vaults[0], from_vaults=vaults[1:], service=service)
|
||||
ctx.invoke(copy, to_vault_name=vaults[0], from_vault_names=vaults[1:], service=service)
|
||||
for i in range(1, len(vaults)):
|
||||
ctx.invoke(copy, to_vault=vaults[i], from_vaults=[vaults[i - 1]], service=service)
|
||||
ctx.invoke(copy, to_vault_name=vaults[i], from_vault_names=[vaults[i - 1]], service=service)
|
||||
|
||||
|
||||
@kv.command()
|
||||
@@ -134,15 +164,7 @@ def add(file: Path, service: str, vaults: list[str]) -> None:
|
||||
log = logging.getLogger("kv")
|
||||
service = Services.get_tag(service)
|
||||
|
||||
vaults_ = Vaults()
|
||||
for vault_name in vaults:
|
||||
vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
|
||||
if not vault:
|
||||
raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
|
||||
vault_type = vault["type"]
|
||||
vault_args = vault.copy()
|
||||
del vault_args["type"]
|
||||
vaults_.load(vault_type, **vault_args)
|
||||
vaults_ = _load_vaults(list(vaults))
|
||||
|
||||
data = file.read_text(encoding="utf8")
|
||||
kid_keys: dict[str, str] = {}
|
||||
@@ -172,15 +194,7 @@ def prepare(vaults: list[str]) -> None:
|
||||
"""Create Service Tables on Vaults if not yet created."""
|
||||
log = logging.getLogger("kv")
|
||||
|
||||
vaults_ = Vaults()
|
||||
for vault_name in vaults:
|
||||
vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
|
||||
if not vault:
|
||||
raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
|
||||
vault_type = vault["type"]
|
||||
vault_args = vault.copy()
|
||||
del vault_args["type"]
|
||||
vaults_.load(vault_type, **vault_args)
|
||||
vaults_ = _load_vaults(vaults)
|
||||
|
||||
for vault in vaults_:
|
||||
if hasattr(vault, "has_table") and hasattr(vault, "create_table"):
|
||||
|
||||
@@ -5,10 +5,10 @@ from typing import Optional
|
||||
import click
|
||||
import requests
|
||||
from Crypto.Random import get_random_bytes
|
||||
from pyplayready import InvalidCertificateChain, OutdatedDevice
|
||||
from pyplayready.cdm import Cdm
|
||||
from pyplayready.crypto.ecc_key import ECCKey
|
||||
from pyplayready.device import Device
|
||||
from pyplayready.exceptions import InvalidCertificateChain, OutdatedDevice
|
||||
from pyplayready.system.bcert import Certificate, CertificateChain
|
||||
from pyplayready.system.pssh import PSSH
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from unshackle.core import binaries
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.console import console
|
||||
from unshackle.core.constants import context_settings
|
||||
from unshackle.core.proxies import Basic, Hola, NordVPN
|
||||
from unshackle.core.proxies import Basic, Hola, NordVPN, SurfsharkVPN
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.utils.click_types import ContextData
|
||||
@@ -69,6 +69,8 @@ def search(ctx: click.Context, no_proxy: bool, profile: Optional[str] = None, pr
|
||||
proxy_providers.append(Basic(**config.proxy_providers["basic"]))
|
||||
if config.proxy_providers.get("nordvpn"):
|
||||
proxy_providers.append(NordVPN(**config.proxy_providers["nordvpn"]))
|
||||
if config.proxy_providers.get("surfsharkvpn"):
|
||||
proxy_providers.append(SurfsharkVPN(**config.proxy_providers["surfsharkvpn"]))
|
||||
if binaries.HolaProxy:
|
||||
proxy_providers.append(Hola())
|
||||
for proxy_provider in proxy_providers:
|
||||
|
||||
@@ -1,19 +1,26 @@
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
from aiohttp import web
|
||||
|
||||
from unshackle.core import binaries
|
||||
from unshackle.core.api import cors_middleware, setup_routes, setup_swagger
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.constants import context_settings
|
||||
|
||||
|
||||
@click.command(short_help="Serve your Local Widevine Devices for Remote Access.", context_settings=context_settings)
|
||||
@click.command(
|
||||
short_help="Serve your Local Widevine Devices and REST API for Remote Access.", context_settings=context_settings
|
||||
)
|
||||
@click.option("-h", "--host", type=str, default="0.0.0.0", help="Host to serve from.")
|
||||
@click.option("-p", "--port", type=int, default=8786, help="Port to serve from.")
|
||||
@click.option("--caddy", is_flag=True, default=False, help="Also serve with Caddy.")
|
||||
def serve(host: str, port: int, caddy: bool) -> None:
|
||||
@click.option("--api-only", is_flag=True, default=False, help="Serve only the REST API, not pywidevine CDM.")
|
||||
@click.option("--no-key", is_flag=True, default=False, help="Disable API key authentication (allows all requests).")
|
||||
def serve(host: str, port: int, caddy: bool, api_only: bool, no_key: bool) -> None:
|
||||
"""
|
||||
Serve your Local Widevine Devices for Remote Access.
|
||||
Serve your Local Widevine Devices and REST API for Remote Access.
|
||||
|
||||
\b
|
||||
Host as 127.0.0.1 may block remote access even if port-forwarded.
|
||||
@@ -23,8 +30,46 @@ def serve(host: str, port: int, caddy: bool) -> None:
|
||||
You may serve with Caddy at the same time with --caddy. You can use Caddy
|
||||
as a reverse-proxy to serve with HTTPS. The config used will be the Caddyfile
|
||||
next to the unshackle config.
|
||||
|
||||
\b
|
||||
The REST API provides programmatic access to unshackle functionality.
|
||||
Configure authentication in your config under serve.users and serve.api_secret.
|
||||
|
||||
\b
|
||||
REMOTE SERVICES:
|
||||
The server exposes endpoints that allow remote unshackle clients to use
|
||||
your configured services without needing the service implementations.
|
||||
Remote clients can authenticate, get titles/tracks, and receive session data
|
||||
for downloading. Configure remote clients in unshackle.yaml:
|
||||
|
||||
\b
|
||||
remote_services:
|
||||
- url: "http://your-server:8786"
|
||||
api_key: "your-api-key"
|
||||
name: "my-server"
|
||||
|
||||
\b
|
||||
Available remote endpoints:
|
||||
- GET /api/remote/services - List available services
|
||||
- POST /api/remote/{service}/search - Search for content
|
||||
- POST /api/remote/{service}/titles - Get titles
|
||||
- POST /api/remote/{service}/tracks - Get tracks
|
||||
- POST /api/remote/{service}/chapters - Get chapters
|
||||
"""
|
||||
from pywidevine import serve
|
||||
from pywidevine import serve as pywidevine_serve
|
||||
|
||||
log = logging.getLogger("serve")
|
||||
|
||||
# Validate API secret for REST API routes (unless --no-key is used)
|
||||
if not no_key:
|
||||
api_secret = config.serve.get("api_secret")
|
||||
if not api_secret:
|
||||
raise click.ClickException(
|
||||
"API secret key is not configured. Please add 'api_secret' to the 'serve' section in your config."
|
||||
)
|
||||
else:
|
||||
api_secret = None
|
||||
log.warning("Running with --no-key: Authentication is DISABLED for all API endpoints!")
|
||||
|
||||
if caddy:
|
||||
if not binaries.Caddy:
|
||||
@@ -39,7 +84,51 @@ def serve(host: str, port: int, caddy: bool) -> None:
|
||||
if not config.serve.get("devices"):
|
||||
config.serve["devices"] = []
|
||||
config.serve["devices"].extend(list(config.directories.wvds.glob("*.wvd")))
|
||||
serve.run(config.serve, host, port)
|
||||
|
||||
if api_only:
|
||||
# API-only mode: serve just the REST API
|
||||
log.info("Starting REST API server (pywidevine CDM disabled)")
|
||||
if no_key:
|
||||
app = web.Application(middlewares=[cors_middleware])
|
||||
app["config"] = {"users": []}
|
||||
else:
|
||||
app = web.Application(middlewares=[cors_middleware, pywidevine_serve.authentication])
|
||||
app["config"] = {"users": [api_secret]}
|
||||
setup_routes(app)
|
||||
setup_swagger(app)
|
||||
log.info(f"REST API endpoints available at http://{host}:{port}/api/")
|
||||
log.info(f"Swagger UI available at http://{host}:{port}/api/docs/")
|
||||
log.info("(Press CTRL+C to quit)")
|
||||
web.run_app(app, host=host, port=port, print=None)
|
||||
else:
|
||||
# Integrated mode: serve both pywidevine + REST API
|
||||
log.info("Starting integrated server (pywidevine CDM + REST API)")
|
||||
|
||||
# Create integrated app with both pywidevine and API routes
|
||||
if no_key:
|
||||
app = web.Application(middlewares=[cors_middleware])
|
||||
app["config"] = dict(config.serve)
|
||||
app["config"]["users"] = []
|
||||
else:
|
||||
app = web.Application(middlewares=[cors_middleware, pywidevine_serve.authentication])
|
||||
# Setup config - add API secret to users for authentication
|
||||
serve_config = dict(config.serve)
|
||||
if not serve_config.get("users"):
|
||||
serve_config["users"] = []
|
||||
if api_secret not in serve_config["users"]:
|
||||
serve_config["users"].append(api_secret)
|
||||
app["config"] = serve_config
|
||||
|
||||
app.on_startup.append(pywidevine_serve._startup)
|
||||
app.on_cleanup.append(pywidevine_serve._cleanup)
|
||||
app.add_routes(pywidevine_serve.routes)
|
||||
setup_routes(app)
|
||||
setup_swagger(app)
|
||||
|
||||
log.info(f"REST API endpoints available at http://{host}:{port}/api/")
|
||||
log.info(f"Swagger UI available at http://{host}:{port}/api/docs/")
|
||||
log.info("(Press CTRL+C to quit)")
|
||||
web.run_app(app, host=host, port=port, print=None)
|
||||
finally:
|
||||
if caddy_p:
|
||||
caddy_p.kill()
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = "1.0.1"
|
||||
__version__ = "2.0.0"
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import atexit
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
import urllib3
|
||||
@@ -16,23 +14,17 @@ from unshackle.core.commands import Commands
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.console import ComfyRichHandler, console
|
||||
from unshackle.core.constants import context_settings
|
||||
from unshackle.core.utilities import rotate_log_file
|
||||
|
||||
LOGGING_PATH = None
|
||||
from unshackle.core.update_checker import UpdateChecker
|
||||
from unshackle.core.utilities import close_debug_logger, init_debug_logger
|
||||
|
||||
|
||||
@click.command(cls=Commands, invoke_without_command=True, context_settings=context_settings)
|
||||
@click.option("-v", "--version", is_flag=True, default=False, help="Print version information.")
|
||||
@click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs.")
|
||||
@click.option(
|
||||
"--log",
|
||||
"log_path",
|
||||
type=Path,
|
||||
default=config.directories.logs / config.filenames.log,
|
||||
help="Log path (or filename). Path can contain the following f-string args: {name} {time}.",
|
||||
)
|
||||
def main(version: bool, debug: bool, log_path: Path) -> None:
|
||||
@click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs and JSON debug logging.")
|
||||
def main(version: bool, debug: bool) -> None:
|
||||
"""unshackle—Modular Movie, TV, and Music Archival Software."""
|
||||
debug_logging_enabled = debug or config.debug
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG if debug else logging.INFO,
|
||||
format="%(message)s",
|
||||
@@ -48,11 +40,8 @@ def main(version: bool, debug: bool, log_path: Path) -> None:
|
||||
],
|
||||
)
|
||||
|
||||
if log_path:
|
||||
global LOGGING_PATH
|
||||
console.record = True
|
||||
new_log_path = rotate_log_file(log_path)
|
||||
LOGGING_PATH = new_log_path
|
||||
if debug_logging_enabled:
|
||||
init_debug_logger(enabled=True)
|
||||
|
||||
urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
@@ -69,7 +58,7 @@ def main(version: bool, debug: bool, log_path: Path) -> None:
|
||||
r" ▀▀▀ ▀▀ █▪ ▀▀▀▀ ▀▀▀ · ▀ ▀ ·▀▀▀ ·▀ ▀.▀▀▀ ▀▀▀ ",
|
||||
style="ascii.art",
|
||||
),
|
||||
f"v[repr.number]{__version__}[/]",
|
||||
f"v [repr.number]{__version__}[/] - © 2025 - github.com/unshackle-dl/unshackle",
|
||||
),
|
||||
(1, 11, 1, 10),
|
||||
expand=True,
|
||||
@@ -80,12 +69,27 @@ def main(version: bool, debug: bool, log_path: Path) -> None:
|
||||
if version:
|
||||
return
|
||||
|
||||
if config.update_checks:
|
||||
try:
|
||||
latest_version = UpdateChecker.check_for_updates_sync(__version__)
|
||||
if latest_version:
|
||||
console.print(
|
||||
f"\n[yellow]⚠️ Update available![/yellow] "
|
||||
f"Current: {__version__} → Latest: [green]{latest_version}[/green]",
|
||||
justify="center",
|
||||
)
|
||||
console.print(
|
||||
"Visit: https://github.com/unshackle-dl/unshackle/releases/latest\n",
|
||||
justify="center",
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@atexit.register
|
||||
def save_log():
|
||||
if console.record and LOGGING_PATH:
|
||||
# TODO: Currently semi-bust. Everything that refreshes gets duplicated.
|
||||
console.save_text(LOGGING_PATH)
|
||||
def cleanup():
|
||||
"""Clean up resources on exit."""
|
||||
close_debug_logger()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
3
unshackle/core/api/__init__.py
Normal file
3
unshackle/core/api/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from unshackle.core.api.routes import cors_middleware, setup_routes, setup_swagger
|
||||
|
||||
__all__ = ["setup_routes", "setup_swagger", "cors_middleware"]
|
||||
631
unshackle/core/api/download_manager.py
Normal file
631
unshackle/core/api/download_manager.py
Normal file
@@ -0,0 +1,631 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import uuid
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
|
||||
log = logging.getLogger("download_manager")
|
||||
|
||||
|
||||
class JobStatus(Enum):
|
||||
QUEUED = "queued"
|
||||
DOWNLOADING = "downloading"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DownloadJob:
|
||||
"""Represents a download job with all its parameters and status."""
|
||||
|
||||
job_id: str
|
||||
status: JobStatus
|
||||
created_time: datetime
|
||||
service: str
|
||||
title_id: str
|
||||
parameters: Dict[str, Any]
|
||||
|
||||
# Progress tracking
|
||||
started_time: Optional[datetime] = None
|
||||
completed_time: Optional[datetime] = None
|
||||
progress: float = 0.0
|
||||
|
||||
# Results and error info
|
||||
output_files: List[str] = field(default_factory=list)
|
||||
error_message: Optional[str] = None
|
||||
error_details: Optional[str] = None
|
||||
|
||||
# Cancellation support
|
||||
cancel_event: threading.Event = field(default_factory=threading.Event)
|
||||
|
||||
def to_dict(self, include_full_details: bool = False) -> Dict[str, Any]:
|
||||
"""Convert job to dictionary for JSON response."""
|
||||
result = {
|
||||
"job_id": self.job_id,
|
||||
"status": self.status.value,
|
||||
"created_time": self.created_time.isoformat(),
|
||||
"service": self.service,
|
||||
"title_id": self.title_id,
|
||||
"progress": self.progress,
|
||||
}
|
||||
|
||||
if include_full_details:
|
||||
result.update(
|
||||
{
|
||||
"parameters": self.parameters,
|
||||
"started_time": self.started_time.isoformat() if self.started_time else None,
|
||||
"completed_time": self.completed_time.isoformat() if self.completed_time else None,
|
||||
"output_files": self.output_files,
|
||||
"error_message": self.error_message,
|
||||
"error_details": self.error_details,
|
||||
}
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _perform_download(
|
||||
job_id: str,
|
||||
service: str,
|
||||
title_id: str,
|
||||
params: Dict[str, Any],
|
||||
cancel_event: Optional[threading.Event] = None,
|
||||
progress_callback: Optional[Callable[[Dict[str, Any]], None]] = None,
|
||||
) -> List[str]:
|
||||
"""Execute the synchronous download logic for a job."""
|
||||
|
||||
def _check_cancel(stage: str):
|
||||
if cancel_event and cancel_event.is_set():
|
||||
raise Exception(f"Job was cancelled {stage}")
|
||||
|
||||
from contextlib import redirect_stderr, redirect_stdout
|
||||
from io import StringIO
|
||||
|
||||
_check_cancel("before execution started")
|
||||
|
||||
# Import dl.py components lazily to avoid circular deps during module import
|
||||
import click
|
||||
import yaml
|
||||
|
||||
from unshackle.commands.dl import dl
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.utils.click_types import ContextData
|
||||
from unshackle.core.utils.collections import merge_dict
|
||||
|
||||
log.info(f"Starting sync download for job {job_id}")
|
||||
|
||||
# Load service configuration
|
||||
service_config_path = Services.get_path(service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(service), service_config)
|
||||
|
||||
from unshackle.commands.dl import dl as dl_command
|
||||
|
||||
ctx = click.Context(dl_command.cli)
|
||||
ctx.invoked_subcommand = service
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=params.get("profile"))
|
||||
ctx.params = {
|
||||
"proxy": params.get("proxy"),
|
||||
"no_proxy": params.get("no_proxy", False),
|
||||
"profile": params.get("profile"),
|
||||
"tag": params.get("tag"),
|
||||
"tmdb_id": params.get("tmdb_id"),
|
||||
"tmdb_name": params.get("tmdb_name", False),
|
||||
"tmdb_year": params.get("tmdb_year", False),
|
||||
}
|
||||
|
||||
dl_instance = dl(
|
||||
ctx=ctx,
|
||||
no_proxy=params.get("no_proxy", False),
|
||||
profile=params.get("profile"),
|
||||
proxy=params.get("proxy"),
|
||||
tag=params.get("tag"),
|
||||
tmdb_id=params.get("tmdb_id"),
|
||||
tmdb_name=params.get("tmdb_name", False),
|
||||
tmdb_year=params.get("tmdb_year", False),
|
||||
)
|
||||
|
||||
service_module = Services.load(service)
|
||||
|
||||
_check_cancel("before service instantiation")
|
||||
|
||||
try:
|
||||
import inspect
|
||||
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
service_ctx = click.Context(click.Command(service))
|
||||
service_ctx.parent = ctx
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {}
|
||||
|
||||
if "title" in service_init_params:
|
||||
service_kwargs["title"] = title_id
|
||||
|
||||
for key, value in params.items():
|
||||
if key in service_init_params and key not in ["service", "title_id"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
if param_name == "movie":
|
||||
service_kwargs[param_name] = "/movies/" in title_id
|
||||
elif param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
else:
|
||||
log.warning(f"Unknown required parameter '{param_name}' for service {service}, using None")
|
||||
service_kwargs[param_name] = None
|
||||
|
||||
service_instance = service_module(service_ctx, **service_kwargs)
|
||||
|
||||
except Exception as exc: # noqa: BLE001 - propagate meaningful failure
|
||||
log.error(f"Failed to create service instance: {exc}")
|
||||
raise
|
||||
|
||||
original_download_dir = config.directories.downloads
|
||||
|
||||
_check_cancel("before download execution")
|
||||
|
||||
stdout_capture = StringIO()
|
||||
stderr_capture = StringIO()
|
||||
|
||||
# Simple progress tracking if callback provided
|
||||
if progress_callback:
|
||||
# Report initial progress
|
||||
progress_callback({"progress": 0.0, "status": "starting"})
|
||||
|
||||
# Simple approach: report progress at key points
|
||||
original_result = dl_instance.result
|
||||
|
||||
def result_with_progress(*args, **kwargs):
|
||||
try:
|
||||
# Report that download started
|
||||
progress_callback({"progress": 5.0, "status": "downloading"})
|
||||
|
||||
# Call original method
|
||||
result = original_result(*args, **kwargs)
|
||||
|
||||
# Report completion
|
||||
progress_callback({"progress": 100.0, "status": "completed"})
|
||||
return result
|
||||
except Exception as e:
|
||||
progress_callback({"progress": 0.0, "status": "failed", "error": str(e)})
|
||||
raise
|
||||
|
||||
dl_instance.result = result_with_progress
|
||||
|
||||
try:
|
||||
with redirect_stdout(stdout_capture), redirect_stderr(stderr_capture):
|
||||
dl_instance.result(
|
||||
service=service_instance,
|
||||
quality=params.get("quality", []),
|
||||
vcodec=params.get("vcodec"),
|
||||
acodec=params.get("acodec"),
|
||||
vbitrate=params.get("vbitrate"),
|
||||
abitrate=params.get("abitrate"),
|
||||
range_=params.get("range", []),
|
||||
channels=params.get("channels"),
|
||||
no_atmos=params.get("no_atmos", False),
|
||||
wanted=params.get("wanted", []),
|
||||
lang=params.get("lang", ["orig"]),
|
||||
v_lang=params.get("v_lang", []),
|
||||
a_lang=params.get("a_lang", []),
|
||||
s_lang=params.get("s_lang", ["all"]),
|
||||
require_subs=params.get("require_subs", []),
|
||||
forced_subs=params.get("forced_subs", False),
|
||||
sub_format=params.get("sub_format"),
|
||||
video_only=params.get("video_only", False),
|
||||
audio_only=params.get("audio_only", False),
|
||||
subs_only=params.get("subs_only", False),
|
||||
chapters_only=params.get("chapters_only", False),
|
||||
no_subs=params.get("no_subs", False),
|
||||
no_audio=params.get("no_audio", False),
|
||||
no_chapters=params.get("no_chapters", False),
|
||||
slow=params.get("slow", False),
|
||||
list_=False,
|
||||
list_titles=False,
|
||||
skip_dl=params.get("skip_dl", False),
|
||||
export=params.get("export"),
|
||||
cdm_only=params.get("cdm_only"),
|
||||
no_proxy=params.get("no_proxy", False),
|
||||
no_folder=params.get("no_folder", False),
|
||||
no_source=params.get("no_source", False),
|
||||
workers=params.get("workers"),
|
||||
downloads=params.get("downloads", 1),
|
||||
best_available=params.get("best_available", False),
|
||||
)
|
||||
|
||||
except SystemExit as exc:
|
||||
if exc.code != 0:
|
||||
stdout_str = stdout_capture.getvalue()
|
||||
stderr_str = stderr_capture.getvalue()
|
||||
log.error(f"Download exited with code {exc.code}")
|
||||
log.error(f"Stdout: {stdout_str}")
|
||||
log.error(f"Stderr: {stderr_str}")
|
||||
raise Exception(f"Download failed with exit code {exc.code}")
|
||||
|
||||
except Exception as exc: # noqa: BLE001 - propagate to caller
|
||||
stdout_str = stdout_capture.getvalue()
|
||||
stderr_str = stderr_capture.getvalue()
|
||||
log.error(f"Download execution failed: {exc}")
|
||||
log.error(f"Stdout: {stdout_str}")
|
||||
log.error(f"Stderr: {stderr_str}")
|
||||
raise
|
||||
|
||||
log.info(f"Download completed for job {job_id}, files in {original_download_dir}")
|
||||
|
||||
return []
|
||||
|
||||
|
||||
class DownloadQueueManager:
|
||||
"""Manages download job queue with configurable concurrency limits."""
|
||||
|
||||
def __init__(self, max_concurrent_downloads: int = 2, job_retention_hours: int = 24):
|
||||
self.max_concurrent_downloads = max_concurrent_downloads
|
||||
self.job_retention_hours = job_retention_hours
|
||||
|
||||
self._jobs: Dict[str, DownloadJob] = {}
|
||||
self._job_queue: asyncio.Queue = asyncio.Queue()
|
||||
self._active_downloads: Dict[str, asyncio.Task] = {}
|
||||
self._download_processes: Dict[str, asyncio.subprocess.Process] = {}
|
||||
self._job_temp_files: Dict[str, Dict[str, str]] = {}
|
||||
self._workers_started = False
|
||||
self._shutdown_event = asyncio.Event()
|
||||
|
||||
log.info(
|
||||
f"Initialized download queue manager: max_concurrent={max_concurrent_downloads}, retention_hours={job_retention_hours}"
|
||||
)
|
||||
|
||||
def create_job(self, service: str, title_id: str, **parameters) -> DownloadJob:
|
||||
"""Create a new download job and add it to the queue."""
|
||||
job_id = str(uuid.uuid4())
|
||||
job = DownloadJob(
|
||||
job_id=job_id,
|
||||
status=JobStatus.QUEUED,
|
||||
created_time=datetime.now(),
|
||||
service=service,
|
||||
title_id=title_id,
|
||||
parameters=parameters,
|
||||
)
|
||||
|
||||
self._jobs[job_id] = job
|
||||
self._job_queue.put_nowait(job)
|
||||
|
||||
log.info(f"Created download job {job_id} for {service}:{title_id}")
|
||||
return job
|
||||
|
||||
def get_job(self, job_id: str) -> Optional[DownloadJob]:
|
||||
"""Get job by ID."""
|
||||
return self._jobs.get(job_id)
|
||||
|
||||
def list_jobs(self) -> List[DownloadJob]:
|
||||
"""List all jobs."""
|
||||
return list(self._jobs.values())
|
||||
|
||||
def cancel_job(self, job_id: str) -> bool:
|
||||
"""Cancel a job if it's queued or downloading."""
|
||||
job = self._jobs.get(job_id)
|
||||
if not job:
|
||||
return False
|
||||
|
||||
if job.status == JobStatus.QUEUED:
|
||||
job.status = JobStatus.CANCELLED
|
||||
job.cancel_event.set() # Signal cancellation
|
||||
log.info(f"Cancelled queued job {job_id}")
|
||||
return True
|
||||
elif job.status == JobStatus.DOWNLOADING:
|
||||
# Set the cancellation event first - this will be checked by the download thread
|
||||
job.cancel_event.set()
|
||||
job.status = JobStatus.CANCELLED
|
||||
log.info(f"Signaled cancellation for downloading job {job_id}")
|
||||
|
||||
# Cancel the active download task
|
||||
task = self._active_downloads.get(job_id)
|
||||
if task:
|
||||
task.cancel()
|
||||
log.info(f"Cancelled download task for job {job_id}")
|
||||
|
||||
process = self._download_processes.get(job_id)
|
||||
if process:
|
||||
try:
|
||||
process.terminate()
|
||||
log.info(f"Terminated worker process for job {job_id}")
|
||||
except ProcessLookupError:
|
||||
log.debug(f"Worker process for job {job_id} already exited")
|
||||
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def cleanup_old_jobs(self) -> int:
|
||||
"""Remove jobs older than retention period."""
|
||||
cutoff_time = datetime.now() - timedelta(hours=self.job_retention_hours)
|
||||
jobs_to_remove = []
|
||||
|
||||
for job_id, job in self._jobs.items():
|
||||
if job.status in [JobStatus.COMPLETED, JobStatus.FAILED, JobStatus.CANCELLED]:
|
||||
if job.completed_time and job.completed_time < cutoff_time:
|
||||
jobs_to_remove.append(job_id)
|
||||
elif not job.completed_time and job.created_time < cutoff_time:
|
||||
jobs_to_remove.append(job_id)
|
||||
|
||||
for job_id in jobs_to_remove:
|
||||
del self._jobs[job_id]
|
||||
|
||||
if jobs_to_remove:
|
||||
log.info(f"Cleaned up {len(jobs_to_remove)} old jobs")
|
||||
|
||||
return len(jobs_to_remove)
|
||||
|
||||
async def start_workers(self):
|
||||
"""Start worker tasks to process the download queue."""
|
||||
if self._workers_started:
|
||||
return
|
||||
|
||||
self._workers_started = True
|
||||
|
||||
# Start worker tasks
|
||||
for i in range(self.max_concurrent_downloads):
|
||||
asyncio.create_task(self._download_worker(f"worker-{i}"))
|
||||
|
||||
# Start cleanup task
|
||||
asyncio.create_task(self._cleanup_worker())
|
||||
|
||||
log.info(f"Started {self.max_concurrent_downloads} download workers")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown the queue manager and cancel all active downloads."""
|
||||
log.info("Shutting down download queue manager")
|
||||
self._shutdown_event.set()
|
||||
|
||||
# Cancel all active downloads
|
||||
for task in self._active_downloads.values():
|
||||
task.cancel()
|
||||
|
||||
# Terminate worker processes
|
||||
for job_id, process in list(self._download_processes.items()):
|
||||
try:
|
||||
process.terminate()
|
||||
except ProcessLookupError:
|
||||
log.debug(f"Worker process for job {job_id} already exited during shutdown")
|
||||
|
||||
for job_id, process in list(self._download_processes.items()):
|
||||
try:
|
||||
await asyncio.wait_for(process.wait(), timeout=5)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"Worker process for job {job_id} did not exit, killing")
|
||||
process.kill()
|
||||
await process.wait()
|
||||
finally:
|
||||
self._download_processes.pop(job_id, None)
|
||||
|
||||
# Clean up any remaining temp files
|
||||
for paths in self._job_temp_files.values():
|
||||
for path in paths.values():
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
self._job_temp_files.clear()
|
||||
|
||||
# Wait for workers to finish
|
||||
if self._active_downloads:
|
||||
await asyncio.gather(*self._active_downloads.values(), return_exceptions=True)
|
||||
|
||||
async def _download_worker(self, worker_name: str):
|
||||
"""Worker task that processes jobs from the queue."""
|
||||
log.debug(f"Download worker {worker_name} started")
|
||||
|
||||
while not self._shutdown_event.is_set():
|
||||
try:
|
||||
# Wait for a job or shutdown signal
|
||||
job = await asyncio.wait_for(self._job_queue.get(), timeout=1.0)
|
||||
|
||||
if job.status == JobStatus.CANCELLED:
|
||||
continue
|
||||
|
||||
# Start processing the job
|
||||
job.status = JobStatus.DOWNLOADING
|
||||
job.started_time = datetime.now()
|
||||
|
||||
log.info(f"Worker {worker_name} starting job {job.job_id}")
|
||||
|
||||
# Create download task
|
||||
download_task = asyncio.create_task(self._execute_download(job))
|
||||
self._active_downloads[job.job_id] = download_task
|
||||
|
||||
try:
|
||||
await download_task
|
||||
except asyncio.CancelledError:
|
||||
job.status = JobStatus.CANCELLED
|
||||
log.info(f"Job {job.job_id} was cancelled")
|
||||
except Exception as e:
|
||||
job.status = JobStatus.FAILED
|
||||
job.error_message = str(e)
|
||||
log.error(f"Job {job.job_id} failed: {e}")
|
||||
finally:
|
||||
job.completed_time = datetime.now()
|
||||
if job.job_id in self._active_downloads:
|
||||
del self._active_downloads[job.job_id]
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
continue
|
||||
except Exception as e:
|
||||
log.error(f"Worker {worker_name} error: {e}")
|
||||
|
||||
async def _execute_download(self, job: DownloadJob):
|
||||
"""Execute the actual download for a job."""
|
||||
log.info(f"Executing download for job {job.job_id}")
|
||||
|
||||
try:
|
||||
output_files = await self._run_download_async(job)
|
||||
job.status = JobStatus.COMPLETED
|
||||
job.output_files = output_files
|
||||
job.progress = 100.0
|
||||
log.info(f"Download completed for job {job.job_id}: {len(output_files)} files")
|
||||
except Exception as e:
|
||||
job.status = JobStatus.FAILED
|
||||
job.error_message = str(e)
|
||||
job.error_details = str(e)
|
||||
log.error(f"Download failed for job {job.job_id}: {e}")
|
||||
raise
|
||||
|
||||
async def _run_download_async(self, job: DownloadJob) -> List[str]:
|
||||
"""Invoke a worker subprocess to execute the download."""
|
||||
|
||||
payload = {
|
||||
"job_id": job.job_id,
|
||||
"service": job.service,
|
||||
"title_id": job.title_id,
|
||||
"parameters": job.parameters,
|
||||
}
|
||||
|
||||
payload_fd, payload_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_payload.json")
|
||||
os.close(payload_fd)
|
||||
result_fd, result_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_result.json")
|
||||
os.close(result_fd)
|
||||
progress_fd, progress_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_progress.json")
|
||||
os.close(progress_fd)
|
||||
|
||||
with open(payload_path, "w", encoding="utf-8") as handle:
|
||||
json.dump(payload, handle)
|
||||
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"unshackle.core.api.download_worker",
|
||||
payload_path,
|
||||
result_path,
|
||||
progress_path,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
self._download_processes[job.job_id] = process
|
||||
self._job_temp_files[job.job_id] = {"payload": payload_path, "result": result_path, "progress": progress_path}
|
||||
|
||||
communicate_task = asyncio.create_task(process.communicate())
|
||||
|
||||
stdout_bytes = b""
|
||||
stderr_bytes = b""
|
||||
|
||||
try:
|
||||
while True:
|
||||
done, _ = await asyncio.wait({communicate_task}, timeout=0.5)
|
||||
if communicate_task in done:
|
||||
stdout_bytes, stderr_bytes = communicate_task.result()
|
||||
break
|
||||
|
||||
# Check for progress updates
|
||||
try:
|
||||
if os.path.exists(progress_path):
|
||||
with open(progress_path, "r", encoding="utf-8") as handle:
|
||||
progress_data = json.load(handle)
|
||||
if "progress" in progress_data:
|
||||
new_progress = float(progress_data["progress"])
|
||||
if new_progress != job.progress:
|
||||
job.progress = new_progress
|
||||
log.info(f"Job {job.job_id} progress updated: {job.progress}%")
|
||||
except (FileNotFoundError, json.JSONDecodeError, ValueError) as e:
|
||||
log.debug(f"Could not read progress for job {job.job_id}: {e}")
|
||||
|
||||
if job.cancel_event.is_set() or job.status == JobStatus.CANCELLED:
|
||||
log.info(f"Cancellation detected for job {job.job_id}, terminating worker process")
|
||||
process.terminate()
|
||||
try:
|
||||
await asyncio.wait_for(communicate_task, timeout=5)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"Worker process for job {job.job_id} did not terminate, killing")
|
||||
process.kill()
|
||||
await asyncio.wait_for(communicate_task, timeout=5)
|
||||
raise asyncio.CancelledError("Job was cancelled")
|
||||
|
||||
returncode = process.returncode
|
||||
stdout = stdout_bytes.decode("utf-8", errors="ignore")
|
||||
stderr = stderr_bytes.decode("utf-8", errors="ignore")
|
||||
|
||||
if stdout.strip():
|
||||
log.debug(f"Worker stdout for job {job.job_id}: {stdout.strip()}")
|
||||
if stderr.strip():
|
||||
log.warning(f"Worker stderr for job {job.job_id}: {stderr.strip()}")
|
||||
|
||||
result_data: Optional[Dict[str, Any]] = None
|
||||
try:
|
||||
with open(result_path, "r", encoding="utf-8") as handle:
|
||||
result_data = json.load(handle)
|
||||
except FileNotFoundError:
|
||||
log.error(f"Result file missing for job {job.job_id}")
|
||||
except json.JSONDecodeError as exc:
|
||||
log.error(f"Failed to parse worker result for job {job.job_id}: {exc}")
|
||||
|
||||
if returncode != 0:
|
||||
message = result_data.get("message") if result_data else "unknown error"
|
||||
raise Exception(f"Worker exited with code {returncode}: {message}")
|
||||
|
||||
if not result_data or result_data.get("status") != "success":
|
||||
message = result_data.get("message") if result_data else "worker did not report success"
|
||||
raise Exception(f"Worker failure: {message}")
|
||||
|
||||
return result_data.get("output_files", [])
|
||||
|
||||
finally:
|
||||
if not communicate_task.done():
|
||||
communicate_task.cancel()
|
||||
with suppress(asyncio.CancelledError):
|
||||
await communicate_task
|
||||
|
||||
self._download_processes.pop(job.job_id, None)
|
||||
|
||||
temp_paths = self._job_temp_files.pop(job.job_id, {})
|
||||
for path in temp_paths.values():
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def _execute_download_sync(self, job: DownloadJob) -> List[str]:
|
||||
"""Execute download synchronously using existing dl.py logic."""
|
||||
return _perform_download(job.job_id, job.service, job.title_id, job.parameters.copy(), job.cancel_event)
|
||||
|
||||
async def _cleanup_worker(self):
|
||||
"""Worker that periodically cleans up old jobs."""
|
||||
while not self._shutdown_event.is_set():
|
||||
try:
|
||||
await asyncio.sleep(3600) # Run every hour
|
||||
self.cleanup_old_jobs()
|
||||
except Exception as e:
|
||||
log.error(f"Cleanup worker error: {e}")
|
||||
|
||||
|
||||
# Global instance
|
||||
download_manager: Optional[DownloadQueueManager] = None
|
||||
|
||||
|
||||
def get_download_manager() -> DownloadQueueManager:
|
||||
"""Get the global download manager instance."""
|
||||
global download_manager
|
||||
if download_manager is None:
|
||||
# Load configuration from unshackle config
|
||||
from unshackle.core.config import config
|
||||
|
||||
max_concurrent = getattr(config, "max_concurrent_downloads", 2)
|
||||
retention_hours = getattr(config, "download_job_retention_hours", 24)
|
||||
|
||||
download_manager = DownloadQueueManager(max_concurrent, retention_hours)
|
||||
|
||||
return download_manager
|
||||
84
unshackle/core/api/download_worker.py
Normal file
84
unshackle/core/api/download_worker.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Standalone worker process entry point for executing download jobs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from .download_manager import _perform_download
|
||||
|
||||
log = logging.getLogger("download_worker")
|
||||
|
||||
|
||||
def _read_payload(path: Path) -> Dict[str, Any]:
|
||||
with path.open("r", encoding="utf-8") as handle:
|
||||
return json.load(handle)
|
||||
|
||||
|
||||
def _write_result(path: Path, payload: Dict[str, Any]) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(payload, handle)
|
||||
|
||||
|
||||
def main(argv: list[str]) -> int:
|
||||
if len(argv) not in [3, 4]:
|
||||
print(
|
||||
"Usage: python -m unshackle.core.api.download_worker <payload_path> <result_path> [progress_path]",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 2
|
||||
|
||||
payload_path = Path(argv[1])
|
||||
result_path = Path(argv[2])
|
||||
progress_path = Path(argv[3]) if len(argv) > 3 else None
|
||||
|
||||
result: Dict[str, Any] = {}
|
||||
exit_code = 0
|
||||
|
||||
try:
|
||||
payload = _read_payload(payload_path)
|
||||
job_id = payload["job_id"]
|
||||
service = payload["service"]
|
||||
title_id = payload["title_id"]
|
||||
params = payload.get("parameters", {})
|
||||
|
||||
log.info(f"Worker starting job {job_id} ({service}:{title_id})")
|
||||
|
||||
def progress_callback(progress_data: Dict[str, Any]) -> None:
|
||||
"""Write progress updates to file for main process to read."""
|
||||
if progress_path:
|
||||
try:
|
||||
log.info(f"Writing progress update: {progress_data}")
|
||||
_write_result(progress_path, progress_data)
|
||||
log.info(f"Progress update written to {progress_path}")
|
||||
except Exception as e:
|
||||
log.error(f"Failed to write progress update: {e}")
|
||||
|
||||
output_files = _perform_download(
|
||||
job_id, service, title_id, params, cancel_event=None, progress_callback=progress_callback
|
||||
)
|
||||
|
||||
result = {"status": "success", "output_files": output_files}
|
||||
|
||||
except Exception as exc: # noqa: BLE001 - capture for parent process
|
||||
exit_code = 1
|
||||
tb = traceback.format_exc()
|
||||
log.error(f"Worker failed with error: {exc}")
|
||||
result = {"status": "error", "message": str(exc), "traceback": tb}
|
||||
|
||||
finally:
|
||||
try:
|
||||
_write_result(result_path, result)
|
||||
except Exception as exc: # noqa: BLE001 - last resort logging
|
||||
log.error(f"Failed to write worker result file: {exc}")
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
652
unshackle/core/api/handlers.py
Normal file
652
unshackle/core/api/handlers.py
Normal file
@@ -0,0 +1,652 @@
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from unshackle.core.constants import AUDIO_CODEC_MAP, DYNAMIC_RANGE_MAP, VIDEO_CODEC_MAP
|
||||
from unshackle.core.proxies.basic import Basic
|
||||
from unshackle.core.proxies.hola import Hola
|
||||
from unshackle.core.proxies.nordvpn import NordVPN
|
||||
from unshackle.core.proxies.surfsharkvpn import SurfsharkVPN
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.titles import Episode, Movie, Title_T
|
||||
from unshackle.core.tracks import Audio, Subtitle, Video
|
||||
|
||||
log = logging.getLogger("api")
|
||||
|
||||
|
||||
def initialize_proxy_providers() -> List[Any]:
|
||||
"""Initialize and return available proxy providers."""
|
||||
proxy_providers = []
|
||||
try:
|
||||
from unshackle.core import binaries
|
||||
# Load the main unshackle config to get proxy provider settings
|
||||
from unshackle.core.config import config as main_config
|
||||
|
||||
log.debug(f"Main config proxy providers: {getattr(main_config, 'proxy_providers', {})}")
|
||||
log.debug(f"Available proxy provider configs: {list(getattr(main_config, 'proxy_providers', {}).keys())}")
|
||||
|
||||
# Use main_config instead of the service-specific config for proxy providers
|
||||
proxy_config = getattr(main_config, "proxy_providers", {})
|
||||
|
||||
if proxy_config.get("basic"):
|
||||
log.debug("Loading Basic proxy provider")
|
||||
proxy_providers.append(Basic(**proxy_config["basic"]))
|
||||
if proxy_config.get("nordvpn"):
|
||||
log.debug("Loading NordVPN proxy provider")
|
||||
proxy_providers.append(NordVPN(**proxy_config["nordvpn"]))
|
||||
if proxy_config.get("surfsharkvpn"):
|
||||
log.debug("Loading SurfsharkVPN proxy provider")
|
||||
proxy_providers.append(SurfsharkVPN(**proxy_config["surfsharkvpn"]))
|
||||
if hasattr(binaries, "HolaProxy") and binaries.HolaProxy:
|
||||
log.debug("Loading Hola proxy provider")
|
||||
proxy_providers.append(Hola())
|
||||
|
||||
for proxy_provider in proxy_providers:
|
||||
log.info(f"Loaded {proxy_provider.__class__.__name__}: {proxy_provider}")
|
||||
|
||||
if not proxy_providers:
|
||||
log.warning("No proxy providers were loaded. Check your proxy provider configuration in unshackle.yaml")
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"Failed to initialize some proxy providers: {e}")
|
||||
|
||||
return proxy_providers
|
||||
|
||||
|
||||
def resolve_proxy(proxy: str, proxy_providers: List[Any]) -> str:
|
||||
"""Resolve proxy parameter to actual proxy URI."""
|
||||
import re
|
||||
|
||||
if not proxy:
|
||||
return proxy
|
||||
|
||||
# Check if explicit proxy URI
|
||||
if re.match(r"^https?://", proxy):
|
||||
return proxy
|
||||
|
||||
# Handle provider:country format (e.g., "nordvpn:us")
|
||||
requested_provider = None
|
||||
if re.match(r"^[a-z]+:.+$", proxy, re.IGNORECASE):
|
||||
requested_provider, proxy = proxy.split(":", maxsplit=1)
|
||||
|
||||
# Handle country code format (e.g., "us", "uk")
|
||||
if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE):
|
||||
proxy = proxy.lower()
|
||||
|
||||
if requested_provider:
|
||||
# Find specific provider (case-insensitive matching)
|
||||
proxy_provider = next(
|
||||
(x for x in proxy_providers if x.__class__.__name__.lower() == requested_provider.lower()),
|
||||
None,
|
||||
)
|
||||
if not proxy_provider:
|
||||
available_providers = [x.__class__.__name__ for x in proxy_providers]
|
||||
raise ValueError(
|
||||
f"The proxy provider '{requested_provider}' was not recognized. Available providers: {available_providers}"
|
||||
)
|
||||
|
||||
proxy_uri = proxy_provider.get_proxy(proxy)
|
||||
if not proxy_uri:
|
||||
raise ValueError(f"The proxy provider {requested_provider} had no proxy for {proxy}")
|
||||
|
||||
log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy_uri}")
|
||||
return proxy_uri
|
||||
else:
|
||||
# Try all providers
|
||||
for proxy_provider in proxy_providers:
|
||||
proxy_uri = proxy_provider.get_proxy(proxy)
|
||||
if proxy_uri:
|
||||
log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy_uri}")
|
||||
return proxy_uri
|
||||
|
||||
raise ValueError(f"No proxy provider had a proxy for {proxy}")
|
||||
|
||||
# Return as-is if not recognized format
|
||||
log.info(f"Using explicit Proxy: {proxy}")
|
||||
return proxy
|
||||
|
||||
|
||||
def validate_service(service_tag: str) -> Optional[str]:
|
||||
"""Validate and normalize service tag."""
|
||||
try:
|
||||
normalized = Services.get_tag(service_tag)
|
||||
service_path = Services.get_path(normalized)
|
||||
if not service_path.exists():
|
||||
return None
|
||||
return normalized
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def serialize_title(title: Title_T) -> Dict[str, Any]:
|
||||
"""Convert a title object to JSON-serializable dict."""
|
||||
if isinstance(title, Episode):
|
||||
episode_name = title.name if title.name else f"Episode {title.number:02d}"
|
||||
result = {
|
||||
"type": "episode",
|
||||
"name": episode_name,
|
||||
"series_title": str(title.title),
|
||||
"season": title.season,
|
||||
"number": title.number,
|
||||
"year": title.year,
|
||||
"id": str(title.id) if hasattr(title, "id") else None,
|
||||
}
|
||||
elif isinstance(title, Movie):
|
||||
result = {
|
||||
"type": "movie",
|
||||
"name": str(title.name) if hasattr(title, "name") else str(title),
|
||||
"year": title.year,
|
||||
"id": str(title.id) if hasattr(title, "id") else None,
|
||||
}
|
||||
else:
|
||||
result = {
|
||||
"type": "other",
|
||||
"name": str(title.name) if hasattr(title, "name") else str(title),
|
||||
"id": str(title.id) if hasattr(title, "id") else None,
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def serialize_video_track(track: Video) -> Dict[str, Any]:
|
||||
"""Convert video track to JSON-serializable dict."""
|
||||
codec_name = track.codec.name if hasattr(track.codec, "name") else str(track.codec)
|
||||
range_name = track.range.name if hasattr(track.range, "name") else str(track.range)
|
||||
|
||||
return {
|
||||
"id": str(track.id),
|
||||
"codec": codec_name,
|
||||
"codec_display": VIDEO_CODEC_MAP.get(codec_name, codec_name),
|
||||
"bitrate": int(track.bitrate / 1000) if track.bitrate else None,
|
||||
"width": track.width,
|
||||
"height": track.height,
|
||||
"resolution": f"{track.width}x{track.height}" if track.width and track.height else None,
|
||||
"fps": track.fps if track.fps else None,
|
||||
"range": range_name,
|
||||
"range_display": DYNAMIC_RANGE_MAP.get(range_name, range_name),
|
||||
"language": str(track.language) if track.language else None,
|
||||
"drm": str(track.drm) if hasattr(track, "drm") and track.drm else None,
|
||||
}
|
||||
|
||||
|
||||
def serialize_audio_track(track: Audio) -> Dict[str, Any]:
|
||||
"""Convert audio track to JSON-serializable dict."""
|
||||
codec_name = track.codec.name if hasattr(track.codec, "name") else str(track.codec)
|
||||
|
||||
return {
|
||||
"id": str(track.id),
|
||||
"codec": codec_name,
|
||||
"codec_display": AUDIO_CODEC_MAP.get(codec_name, codec_name),
|
||||
"bitrate": int(track.bitrate / 1000) if track.bitrate else None,
|
||||
"channels": track.channels if track.channels else None,
|
||||
"language": str(track.language) if track.language else None,
|
||||
"atmos": track.atmos if hasattr(track, "atmos") else False,
|
||||
"descriptive": track.descriptive if hasattr(track, "descriptive") else False,
|
||||
"drm": str(track.drm) if hasattr(track, "drm") and track.drm else None,
|
||||
}
|
||||
|
||||
|
||||
def serialize_subtitle_track(track: Subtitle) -> Dict[str, Any]:
|
||||
"""Convert subtitle track to JSON-serializable dict."""
|
||||
return {
|
||||
"id": str(track.id),
|
||||
"codec": track.codec.name if hasattr(track.codec, "name") else str(track.codec),
|
||||
"language": str(track.language) if track.language else None,
|
||||
"forced": track.forced if hasattr(track, "forced") else False,
|
||||
"sdh": track.sdh if hasattr(track, "sdh") else False,
|
||||
"cc": track.cc if hasattr(track, "cc") else False,
|
||||
}
|
||||
|
||||
|
||||
async def list_titles_handler(data: Dict[str, Any]) -> web.Response:
|
||||
"""Handle list-titles request."""
|
||||
service_tag = data.get("service")
|
||||
title_id = data.get("title_id")
|
||||
profile = data.get("profile")
|
||||
|
||||
if not service_tag:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||
|
||||
if not title_id:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
import inspect
|
||||
|
||||
import click
|
||||
import yaml
|
||||
|
||||
from unshackle.commands.dl import dl
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.utils.click_types import ContextData
|
||||
from unshackle.core.utils.collections import merge_dict
|
||||
|
||||
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(normalized_service), service_config)
|
||||
|
||||
@click.command()
|
||||
@click.pass_context
|
||||
def dummy_service(ctx: click.Context) -> None:
|
||||
pass
|
||||
|
||||
# Handle proxy configuration
|
||||
proxy_param = data.get("proxy")
|
||||
no_proxy = data.get("no_proxy", False)
|
||||
proxy_providers = []
|
||||
|
||||
if not no_proxy:
|
||||
proxy_providers = initialize_proxy_providers()
|
||||
|
||||
if proxy_param and not no_proxy:
|
||||
try:
|
||||
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||
proxy_param = resolved_proxy
|
||||
except ValueError as e:
|
||||
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||
|
||||
ctx = click.Context(dummy_service)
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||
|
||||
service_module = Services.load(normalized_service)
|
||||
|
||||
dummy_service.name = normalized_service
|
||||
dummy_service.params = [click.Argument([title_id], type=str)]
|
||||
ctx.invoked_subcommand = normalized_service
|
||||
|
||||
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {"title": title_id}
|
||||
|
||||
# Add additional parameters from request data
|
||||
for key, value in data.items():
|
||||
if key not in ["service", "title_id", "profile", "season", "episode", "wanted", "proxy", "no_proxy"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
# Get service parameter info and click command defaults
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
# Extract default values from the click command
|
||||
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||
for param in service_module.cli.params:
|
||||
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||
# Add default value if parameter is not already provided
|
||||
if hasattr(param, "default") and param.default is not None:
|
||||
service_kwargs[param.name] = param.default
|
||||
|
||||
# Handle required parameters that don't have click defaults
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
# Check if parameter is required (no default value in signature)
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
# Provide sensible defaults for common required parameters
|
||||
if param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
elif param_name == "movie":
|
||||
service_kwargs[param_name] = False
|
||||
else:
|
||||
# Log warning for unknown required parameters
|
||||
log.warning(f"Unknown required parameter '{param_name}' for service {normalized_service}")
|
||||
|
||||
# Filter out any parameters that the service doesn't accept
|
||||
filtered_kwargs = {}
|
||||
for key, value in service_kwargs.items():
|
||||
if key in service_init_params:
|
||||
filtered_kwargs[key] = value
|
||||
|
||||
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||
|
||||
cookies = dl.get_cookie_jar(normalized_service, profile)
|
||||
credential = dl.get_credentials(normalized_service, profile)
|
||||
service_instance.authenticate(cookies, credential)
|
||||
|
||||
titles = service_instance.get_titles()
|
||||
|
||||
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||
title_list = [serialize_title(t) for t in titles]
|
||||
else:
|
||||
title_list = [serialize_title(titles)]
|
||||
|
||||
return web.json_response({"titles": title_list})
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error listing titles")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def list_tracks_handler(data: Dict[str, Any]) -> web.Response:
|
||||
"""Handle list-tracks request."""
|
||||
service_tag = data.get("service")
|
||||
title_id = data.get("title_id")
|
||||
profile = data.get("profile")
|
||||
|
||||
if not service_tag:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||
|
||||
if not title_id:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
import inspect
|
||||
|
||||
import click
|
||||
import yaml
|
||||
|
||||
from unshackle.commands.dl import dl
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.utils.click_types import ContextData
|
||||
from unshackle.core.utils.collections import merge_dict
|
||||
|
||||
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(normalized_service), service_config)
|
||||
|
||||
@click.command()
|
||||
@click.pass_context
|
||||
def dummy_service(ctx: click.Context) -> None:
|
||||
pass
|
||||
|
||||
# Handle proxy configuration
|
||||
proxy_param = data.get("proxy")
|
||||
no_proxy = data.get("no_proxy", False)
|
||||
proxy_providers = []
|
||||
|
||||
if not no_proxy:
|
||||
proxy_providers = initialize_proxy_providers()
|
||||
|
||||
if proxy_param and not no_proxy:
|
||||
try:
|
||||
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||
proxy_param = resolved_proxy
|
||||
except ValueError as e:
|
||||
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||
|
||||
ctx = click.Context(dummy_service)
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||
|
||||
service_module = Services.load(normalized_service)
|
||||
|
||||
dummy_service.name = normalized_service
|
||||
dummy_service.params = [click.Argument([title_id], type=str)]
|
||||
ctx.invoked_subcommand = normalized_service
|
||||
|
||||
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {"title": title_id}
|
||||
|
||||
# Add additional parameters from request data
|
||||
for key, value in data.items():
|
||||
if key not in ["service", "title_id", "profile", "season", "episode", "wanted", "proxy", "no_proxy"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
# Get service parameter info and click command defaults
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
# Extract default values from the click command
|
||||
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||
for param in service_module.cli.params:
|
||||
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||
# Add default value if parameter is not already provided
|
||||
if hasattr(param, "default") and param.default is not None:
|
||||
service_kwargs[param.name] = param.default
|
||||
|
||||
# Handle required parameters that don't have click defaults
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
# Check if parameter is required (no default value in signature)
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
# Provide sensible defaults for common required parameters
|
||||
if param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
elif param_name == "movie":
|
||||
service_kwargs[param_name] = False
|
||||
else:
|
||||
# Log warning for unknown required parameters
|
||||
log.warning(f"Unknown required parameter '{param_name}' for service {normalized_service}")
|
||||
|
||||
# Filter out any parameters that the service doesn't accept
|
||||
filtered_kwargs = {}
|
||||
for key, value in service_kwargs.items():
|
||||
if key in service_init_params:
|
||||
filtered_kwargs[key] = value
|
||||
|
||||
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||
|
||||
cookies = dl.get_cookie_jar(normalized_service, profile)
|
||||
credential = dl.get_credentials(normalized_service, profile)
|
||||
service_instance.authenticate(cookies, credential)
|
||||
|
||||
titles = service_instance.get_titles()
|
||||
|
||||
wanted_param = data.get("wanted")
|
||||
season = data.get("season")
|
||||
episode = data.get("episode")
|
||||
|
||||
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||
titles_list = list(titles)
|
||||
|
||||
wanted = None
|
||||
if wanted_param:
|
||||
from unshackle.core.utils.click_types import SeasonRange
|
||||
|
||||
try:
|
||||
season_range = SeasonRange()
|
||||
wanted = season_range.parse_tokens(wanted_param)
|
||||
log.debug(f"Parsed wanted '{wanted_param}' into {len(wanted)} episodes: {wanted[:10]}...")
|
||||
except Exception as e:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid wanted parameter: {e}"}, status=400
|
||||
)
|
||||
elif season is not None and episode is not None:
|
||||
wanted = [f"{season}x{episode}"]
|
||||
|
||||
if wanted:
|
||||
# Filter titles based on wanted episodes, similar to how dl.py does it
|
||||
matching_titles = []
|
||||
log.debug(f"Filtering {len(titles_list)} titles with {len(wanted)} wanted episodes")
|
||||
for title in titles_list:
|
||||
if isinstance(title, Episode):
|
||||
episode_key = f"{title.season}x{title.number}"
|
||||
if episode_key in wanted:
|
||||
log.debug(f"Episode {episode_key} matches wanted list")
|
||||
matching_titles.append(title)
|
||||
else:
|
||||
log.debug(f"Episode {episode_key} not in wanted list")
|
||||
else:
|
||||
matching_titles.append(title)
|
||||
|
||||
log.debug(f"Found {len(matching_titles)} matching titles")
|
||||
|
||||
if not matching_titles:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": "No episodes found matching wanted criteria"}, status=404
|
||||
)
|
||||
|
||||
# If multiple episodes match, return tracks for all episodes
|
||||
if len(matching_titles) > 1 and all(isinstance(t, Episode) for t in matching_titles):
|
||||
episodes_data = []
|
||||
failed_episodes = []
|
||||
|
||||
# Sort matching titles by season and episode number for consistent ordering
|
||||
sorted_titles = sorted(matching_titles, key=lambda t: (t.season, t.number))
|
||||
|
||||
for title in sorted_titles:
|
||||
try:
|
||||
tracks = service_instance.get_tracks(title)
|
||||
video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
|
||||
episode_data = {
|
||||
"title": serialize_title(title),
|
||||
"video": [serialize_video_track(t) for t in video_tracks],
|
||||
"audio": [serialize_audio_track(t) for t in audio_tracks],
|
||||
"subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles],
|
||||
}
|
||||
episodes_data.append(episode_data)
|
||||
log.debug(f"Successfully got tracks for {title.season}x{title.number}")
|
||||
except SystemExit:
|
||||
# Service calls sys.exit() for unavailable episodes - catch and skip
|
||||
failed_episodes.append(f"S{title.season}E{title.number:02d}")
|
||||
log.debug(f"Episode {title.season}x{title.number} not available, skipping")
|
||||
continue
|
||||
except Exception as e:
|
||||
# Handle other errors gracefully
|
||||
failed_episodes.append(f"S{title.season}E{title.number:02d}")
|
||||
log.debug(f"Error getting tracks for {title.season}x{title.number}: {e}")
|
||||
continue
|
||||
|
||||
if episodes_data:
|
||||
response = {"episodes": episodes_data}
|
||||
if failed_episodes:
|
||||
response["unavailable_episodes"] = failed_episodes
|
||||
return web.json_response(response)
|
||||
else:
|
||||
return web.json_response(
|
||||
{
|
||||
"status": "error",
|
||||
"message": f"No available episodes found. Unavailable: {', '.join(failed_episodes)}",
|
||||
},
|
||||
status=404,
|
||||
)
|
||||
else:
|
||||
# Single episode or movie
|
||||
first_title = matching_titles[0]
|
||||
else:
|
||||
first_title = titles_list[0]
|
||||
else:
|
||||
first_title = titles
|
||||
|
||||
tracks = service_instance.get_tracks(first_title)
|
||||
|
||||
video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
|
||||
response = {
|
||||
"title": serialize_title(first_title),
|
||||
"video": [serialize_video_track(t) for t in video_tracks],
|
||||
"audio": [serialize_audio_track(t) for t in audio_tracks],
|
||||
"subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles],
|
||||
}
|
||||
|
||||
return web.json_response(response)
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error listing tracks")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def download_handler(data: Dict[str, Any]) -> web.Response:
|
||||
"""Handle download request - create and queue a download job."""
|
||||
from unshackle.core.api.download_manager import get_download_manager
|
||||
|
||||
service_tag = data.get("service")
|
||||
title_id = data.get("title_id")
|
||||
|
||||
if not service_tag:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||
|
||||
if not title_id:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
# Get download manager and start workers if needed
|
||||
manager = get_download_manager()
|
||||
await manager.start_workers()
|
||||
|
||||
# Create download job with filtered parameters (exclude service and title_id as they're already passed)
|
||||
filtered_params = {k: v for k, v in data.items() if k not in ["service", "title_id"]}
|
||||
job = manager.create_job(normalized_service, title_id, **filtered_params)
|
||||
|
||||
return web.json_response(
|
||||
{"job_id": job.job_id, "status": job.status.value, "created_time": job.created_time.isoformat()}, status=202
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error creating download job")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def list_download_jobs_handler(data: Dict[str, Any]) -> web.Response:
|
||||
"""Handle list download jobs request."""
|
||||
from unshackle.core.api.download_manager import get_download_manager
|
||||
|
||||
try:
|
||||
manager = get_download_manager()
|
||||
jobs = manager.list_jobs()
|
||||
|
||||
job_list = [job.to_dict(include_full_details=False) for job in jobs]
|
||||
|
||||
return web.json_response({"jobs": job_list})
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error listing download jobs")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def get_download_job_handler(job_id: str) -> web.Response:
|
||||
"""Handle get specific download job request."""
|
||||
from unshackle.core.api.download_manager import get_download_manager
|
||||
|
||||
try:
|
||||
manager = get_download_manager()
|
||||
job = manager.get_job(job_id)
|
||||
|
||||
if not job:
|
||||
return web.json_response({"status": "error", "message": "Job not found"}, status=404)
|
||||
|
||||
return web.json_response(job.to_dict(include_full_details=True))
|
||||
|
||||
except Exception as e:
|
||||
log.exception(f"Error getting download job {job_id}")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def cancel_download_job_handler(job_id: str) -> web.Response:
|
||||
"""Handle cancel download job request."""
|
||||
from unshackle.core.api.download_manager import get_download_manager
|
||||
|
||||
try:
|
||||
manager = get_download_manager()
|
||||
|
||||
if not manager.get_job(job_id):
|
||||
return web.json_response({"status": "error", "message": "Job not found"}, status=404)
|
||||
|
||||
success = manager.cancel_job(job_id)
|
||||
|
||||
if success:
|
||||
return web.json_response({"status": "success", "message": "Job cancelled"})
|
||||
else:
|
||||
return web.json_response({"status": "error", "message": "Job cannot be cancelled"}, status=400)
|
||||
|
||||
except Exception as e:
|
||||
log.exception(f"Error cancelling download job {job_id}")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
941
unshackle/core/api/remote_handlers.py
Normal file
941
unshackle/core/api/remote_handlers.py
Normal file
@@ -0,0 +1,941 @@
|
||||
"""API handlers for remote service functionality."""
|
||||
|
||||
import http.cookiejar
|
||||
import inspect
|
||||
import logging
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import click
|
||||
import yaml
|
||||
from aiohttp import web
|
||||
|
||||
from unshackle.commands.dl import dl
|
||||
from unshackle.core.api.handlers import (initialize_proxy_providers, resolve_proxy, serialize_audio_track,
|
||||
serialize_subtitle_track, serialize_title, serialize_video_track,
|
||||
validate_service)
|
||||
from unshackle.core.api.session_serializer import serialize_session
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.titles import Episode
|
||||
from unshackle.core.utils.click_types import ContextData
|
||||
from unshackle.core.utils.collections import merge_dict
|
||||
|
||||
log = logging.getLogger("api.remote")
|
||||
|
||||
|
||||
def load_cookies_from_content(cookies_content: Optional[str]) -> Optional[http.cookiejar.MozillaCookieJar]:
|
||||
"""
|
||||
Load cookies from raw cookie file content.
|
||||
|
||||
Args:
|
||||
cookies_content: Raw content of a Netscape/Mozilla format cookie file
|
||||
|
||||
Returns:
|
||||
MozillaCookieJar object or None
|
||||
"""
|
||||
if not cookies_content:
|
||||
return None
|
||||
|
||||
# Write to temporary file
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as f:
|
||||
f.write(cookies_content)
|
||||
temp_path = f.name
|
||||
|
||||
try:
|
||||
# Load using standard cookie jar
|
||||
cookie_jar = http.cookiejar.MozillaCookieJar(temp_path)
|
||||
cookie_jar.load(ignore_discard=True, ignore_expires=True)
|
||||
return cookie_jar
|
||||
finally:
|
||||
# Clean up temp file
|
||||
Path(temp_path).unlink(missing_ok=True)
|
||||
|
||||
|
||||
def create_credential_from_dict(cred_data: Optional[Dict[str, str]]) -> Optional[Credential]:
|
||||
"""
|
||||
Create a Credential object from dictionary.
|
||||
|
||||
Args:
|
||||
cred_data: Dictionary with 'username' and 'password' keys
|
||||
|
||||
Returns:
|
||||
Credential object or None
|
||||
"""
|
||||
if not cred_data or "username" not in cred_data or "password" not in cred_data:
|
||||
return None
|
||||
|
||||
return Credential(username=cred_data["username"], password=cred_data["password"])
|
||||
|
||||
|
||||
def get_auth_from_request(data: Dict[str, Any], service_tag: str, profile: Optional[str] = None):
|
||||
"""
|
||||
Get authentication (cookies and credentials) from request data or fallback to server config.
|
||||
|
||||
Args:
|
||||
data: Request data
|
||||
service_tag: Service tag
|
||||
profile: Profile name
|
||||
|
||||
Returns:
|
||||
Tuple of (cookies, credential)
|
||||
"""
|
||||
# Try to get from client request first
|
||||
cookies_content = data.get("cookies")
|
||||
credential_data = data.get("credential")
|
||||
|
||||
if cookies_content:
|
||||
cookies = load_cookies_from_content(cookies_content)
|
||||
else:
|
||||
# Fallback to server-side cookies if not provided by client
|
||||
cookies = dl.get_cookie_jar(service_tag, profile)
|
||||
|
||||
if credential_data:
|
||||
credential = create_credential_from_dict(credential_data)
|
||||
else:
|
||||
# Fallback to server-side credentials if not provided by client
|
||||
credential = dl.get_credentials(service_tag, profile)
|
||||
|
||||
return cookies, credential
|
||||
|
||||
|
||||
async def remote_list_services(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List all available services on this remote server.
|
||||
---
|
||||
summary: List remote services
|
||||
description: Get all available services that can be accessed remotely
|
||||
responses:
|
||||
'200':
|
||||
description: List of available services
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
example: success
|
||||
services:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
tag:
|
||||
type: string
|
||||
aliases:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
geofence:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
help:
|
||||
type: string
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
try:
|
||||
service_tags = Services.get_tags()
|
||||
services_info = []
|
||||
|
||||
for tag in service_tags:
|
||||
service_data = {
|
||||
"tag": tag,
|
||||
"aliases": [],
|
||||
"geofence": [],
|
||||
"help": None,
|
||||
}
|
||||
|
||||
try:
|
||||
service_module = Services.load(tag)
|
||||
|
||||
if hasattr(service_module, "ALIASES"):
|
||||
service_data["aliases"] = list(service_module.ALIASES)
|
||||
|
||||
if hasattr(service_module, "GEOFENCE"):
|
||||
service_data["geofence"] = list(service_module.GEOFENCE)
|
||||
|
||||
if service_module.__doc__:
|
||||
service_data["help"] = service_module.__doc__.strip()
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"Could not load details for service {tag}: {e}")
|
||||
|
||||
services_info.append(service_data)
|
||||
|
||||
return web.json_response({"status": "success", "services": services_info})
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error listing remote services")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def remote_search(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Search for content on a remote service.
|
||||
---
|
||||
summary: Search remote service
|
||||
description: Search for content using a remote service
|
||||
parameters:
|
||||
- name: service
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- query
|
||||
properties:
|
||||
query:
|
||||
type: string
|
||||
description: Search query
|
||||
profile:
|
||||
type: string
|
||||
description: Profile to use for credentials
|
||||
responses:
|
||||
'200':
|
||||
description: Search results
|
||||
'400':
|
||||
description: Invalid request
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
service_tag = request.match_info.get("service")
|
||||
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
query = data.get("query")
|
||||
if not query:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: query"}, status=400)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
profile = data.get("profile")
|
||||
|
||||
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(normalized_service), service_config)
|
||||
|
||||
@click.command()
|
||||
@click.pass_context
|
||||
def dummy_service(ctx: click.Context) -> None:
|
||||
pass
|
||||
|
||||
# Handle proxy configuration
|
||||
proxy_param = data.get("proxy")
|
||||
no_proxy = data.get("no_proxy", False)
|
||||
proxy_providers = []
|
||||
|
||||
if not no_proxy:
|
||||
proxy_providers = initialize_proxy_providers()
|
||||
|
||||
if proxy_param and not no_proxy:
|
||||
try:
|
||||
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||
proxy_param = resolved_proxy
|
||||
except ValueError as e:
|
||||
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||
|
||||
ctx = click.Context(dummy_service)
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||
|
||||
service_module = Services.load(normalized_service)
|
||||
|
||||
dummy_service.name = normalized_service
|
||||
ctx.invoked_subcommand = normalized_service
|
||||
|
||||
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
# Get service initialization parameters
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
service_kwargs = {}
|
||||
|
||||
# Extract defaults from click command
|
||||
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||
for param in service_module.cli.params:
|
||||
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||
if hasattr(param, "default") and param.default is not None:
|
||||
service_kwargs[param.name] = param.default
|
||||
|
||||
# Add query parameter
|
||||
if "query" in service_init_params:
|
||||
service_kwargs["query"] = query
|
||||
|
||||
# Filter to only valid parameters
|
||||
filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params}
|
||||
|
||||
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||
|
||||
# Authenticate with client-provided or server-side auth
|
||||
cookies, credential = get_auth_from_request(data, normalized_service, profile)
|
||||
service_instance.authenticate(cookies, credential)
|
||||
|
||||
# Perform search
|
||||
search_results = []
|
||||
if hasattr(service_instance, "search"):
|
||||
for result in service_instance.search():
|
||||
if isinstance(result, SearchResult):
|
||||
search_results.append(
|
||||
{
|
||||
"id": str(result.id_),
|
||||
"title": result.title,
|
||||
"description": result.description,
|
||||
"label": result.label,
|
||||
"url": result.url,
|
||||
}
|
||||
)
|
||||
|
||||
# Serialize session data
|
||||
session_data = serialize_session(service_instance.session)
|
||||
|
||||
return web.json_response({"status": "success", "results": search_results, "session": session_data})
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error performing remote search")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def remote_get_titles(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Get titles from a remote service.
|
||||
---
|
||||
summary: Get titles from remote service
|
||||
description: Get available titles for content from a remote service
|
||||
parameters:
|
||||
- name: service
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- title
|
||||
properties:
|
||||
title:
|
||||
type: string
|
||||
description: Title identifier, URL, or any format accepted by the service
|
||||
profile:
|
||||
type: string
|
||||
description: Profile to use for credentials
|
||||
proxy:
|
||||
type: string
|
||||
description: Proxy region code (e.g., "ca", "us") or full proxy URL - uses server's proxy configuration
|
||||
no_proxy:
|
||||
type: boolean
|
||||
description: Disable proxy usage
|
||||
cookies:
|
||||
type: string
|
||||
description: Raw Netscape/Mozilla format cookie file content (optional - uses server cookies if not provided)
|
||||
credential:
|
||||
type: object
|
||||
description: Credentials object with username and password (optional - uses server credentials if not provided)
|
||||
properties:
|
||||
username:
|
||||
type: string
|
||||
password:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Titles and session data
|
||||
'400':
|
||||
description: Invalid request
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
service_tag = request.match_info.get("service")
|
||||
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
# Accept 'title', 'title_id', or 'url' for flexibility
|
||||
title = data.get("title") or data.get("title_id") or data.get("url")
|
||||
if not title:
|
||||
return web.json_response(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Missing required parameter: title (can be URL, ID, or any format accepted by the service)",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
profile = data.get("profile")
|
||||
|
||||
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(normalized_service), service_config)
|
||||
|
||||
@click.command()
|
||||
@click.pass_context
|
||||
def dummy_service(ctx: click.Context) -> None:
|
||||
pass
|
||||
|
||||
# Handle proxy configuration
|
||||
proxy_param = data.get("proxy")
|
||||
no_proxy = data.get("no_proxy", False)
|
||||
proxy_providers = []
|
||||
|
||||
if not no_proxy:
|
||||
proxy_providers = initialize_proxy_providers()
|
||||
|
||||
if proxy_param and not no_proxy:
|
||||
try:
|
||||
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||
proxy_param = resolved_proxy
|
||||
except ValueError as e:
|
||||
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||
|
||||
ctx = click.Context(dummy_service)
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||
|
||||
service_module = Services.load(normalized_service)
|
||||
|
||||
dummy_service.name = normalized_service
|
||||
dummy_service.params = [click.Argument([title], type=str)]
|
||||
ctx.invoked_subcommand = normalized_service
|
||||
|
||||
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {"title": title}
|
||||
|
||||
# Add additional parameters from request data
|
||||
for key, value in data.items():
|
||||
if key not in ["title", "title_id", "url", "profile", "proxy", "no_proxy"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
# Get service parameter info and click command defaults
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
# Extract default values from the click command
|
||||
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||
for param in service_module.cli.params:
|
||||
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||
if hasattr(param, "default") and param.default is not None:
|
||||
service_kwargs[param.name] = param.default
|
||||
|
||||
# Handle required parameters
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
if param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
elif param_name == "movie":
|
||||
service_kwargs[param_name] = False
|
||||
|
||||
# Filter to only valid parameters
|
||||
filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params}
|
||||
|
||||
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||
|
||||
# Authenticate with client-provided or server-side auth
|
||||
cookies, credential = get_auth_from_request(data, normalized_service, profile)
|
||||
service_instance.authenticate(cookies, credential)
|
||||
|
||||
# Get titles
|
||||
titles = service_instance.get_titles()
|
||||
|
||||
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||
title_list = [serialize_title(t) for t in titles]
|
||||
else:
|
||||
title_list = [serialize_title(titles)]
|
||||
|
||||
# Serialize session data
|
||||
session_data = serialize_session(service_instance.session)
|
||||
|
||||
return web.json_response({"status": "success", "titles": title_list, "session": session_data})
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error getting remote titles")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def remote_get_tracks(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Get tracks from a remote service.
|
||||
---
|
||||
summary: Get tracks from remote service
|
||||
description: Get available tracks for a title from a remote service
|
||||
parameters:
|
||||
- name: service
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- title
|
||||
properties:
|
||||
title:
|
||||
type: string
|
||||
description: Title identifier, URL, or any format accepted by the service
|
||||
wanted:
|
||||
type: string
|
||||
description: Specific episodes/seasons
|
||||
profile:
|
||||
type: string
|
||||
description: Profile to use for credentials
|
||||
proxy:
|
||||
type: string
|
||||
description: Proxy region code (e.g., "ca", "us") or full proxy URL - uses server's proxy configuration
|
||||
no_proxy:
|
||||
type: boolean
|
||||
description: Disable proxy usage
|
||||
cookies:
|
||||
type: string
|
||||
description: Raw Netscape/Mozilla format cookie file content (optional - uses server cookies if not provided)
|
||||
credential:
|
||||
type: object
|
||||
description: Credentials object with username and password (optional - uses server credentials if not provided)
|
||||
properties:
|
||||
username:
|
||||
type: string
|
||||
password:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Tracks and session data
|
||||
'400':
|
||||
description: Invalid request
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
service_tag = request.match_info.get("service")
|
||||
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
# Accept 'title', 'title_id', or 'url' for flexibility
|
||||
title = data.get("title") or data.get("title_id") or data.get("url")
|
||||
if not title:
|
||||
return web.json_response(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Missing required parameter: title (can be URL, ID, or any format accepted by the service)",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
profile = data.get("profile")
|
||||
|
||||
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(normalized_service), service_config)
|
||||
|
||||
@click.command()
|
||||
@click.pass_context
|
||||
def dummy_service(ctx: click.Context) -> None:
|
||||
pass
|
||||
|
||||
# Handle proxy configuration
|
||||
proxy_param = data.get("proxy")
|
||||
no_proxy = data.get("no_proxy", False)
|
||||
proxy_providers = []
|
||||
|
||||
if not no_proxy:
|
||||
proxy_providers = initialize_proxy_providers()
|
||||
|
||||
if proxy_param and not no_proxy:
|
||||
try:
|
||||
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||
proxy_param = resolved_proxy
|
||||
except ValueError as e:
|
||||
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||
|
||||
ctx = click.Context(dummy_service)
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||
|
||||
service_module = Services.load(normalized_service)
|
||||
|
||||
dummy_service.name = normalized_service
|
||||
dummy_service.params = [click.Argument([title], type=str)]
|
||||
ctx.invoked_subcommand = normalized_service
|
||||
|
||||
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {"title": title}
|
||||
|
||||
# Add additional parameters
|
||||
for key, value in data.items():
|
||||
if key not in ["title", "title_id", "url", "profile", "wanted", "season", "episode", "proxy", "no_proxy"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
# Get service parameters
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
# Extract defaults from click command
|
||||
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||
for param in service_module.cli.params:
|
||||
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||
if hasattr(param, "default") and param.default is not None:
|
||||
service_kwargs[param.name] = param.default
|
||||
|
||||
# Handle required parameters
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
if param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
elif param_name == "movie":
|
||||
service_kwargs[param_name] = False
|
||||
|
||||
# Filter to valid parameters
|
||||
filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params}
|
||||
|
||||
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||
|
||||
# Authenticate with client-provided or server-side auth
|
||||
cookies, credential = get_auth_from_request(data, normalized_service, profile)
|
||||
service_instance.authenticate(cookies, credential)
|
||||
|
||||
# Get titles
|
||||
titles = service_instance.get_titles()
|
||||
|
||||
wanted_param = data.get("wanted")
|
||||
season = data.get("season")
|
||||
episode = data.get("episode")
|
||||
|
||||
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||
titles_list = list(titles)
|
||||
|
||||
wanted = None
|
||||
if wanted_param:
|
||||
from unshackle.core.utils.click_types import SeasonRange
|
||||
|
||||
try:
|
||||
season_range = SeasonRange()
|
||||
wanted = season_range.parse_tokens(wanted_param)
|
||||
except Exception as e:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid wanted parameter: {e}"}, status=400
|
||||
)
|
||||
elif season is not None and episode is not None:
|
||||
wanted = [f"{season}x{episode}"]
|
||||
|
||||
if wanted:
|
||||
matching_titles = []
|
||||
for title in titles_list:
|
||||
if isinstance(title, Episode):
|
||||
episode_key = f"{title.season}x{title.number}"
|
||||
if episode_key in wanted:
|
||||
matching_titles.append(title)
|
||||
else:
|
||||
matching_titles.append(title)
|
||||
|
||||
if not matching_titles:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": "No episodes found matching wanted criteria"}, status=404
|
||||
)
|
||||
|
||||
# Handle multiple episodes
|
||||
if len(matching_titles) > 1 and all(isinstance(t, Episode) for t in matching_titles):
|
||||
episodes_data = []
|
||||
failed_episodes = []
|
||||
|
||||
sorted_titles = sorted(matching_titles, key=lambda t: (t.season, t.number))
|
||||
|
||||
for title in sorted_titles:
|
||||
try:
|
||||
tracks = service_instance.get_tracks(title)
|
||||
video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
|
||||
episode_data = {
|
||||
"title": serialize_title(title),
|
||||
"video": [serialize_video_track(t) for t in video_tracks],
|
||||
"audio": [serialize_audio_track(t) for t in audio_tracks],
|
||||
"subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles],
|
||||
}
|
||||
episodes_data.append(episode_data)
|
||||
except (SystemExit, Exception):
|
||||
failed_episodes.append(f"S{title.season}E{title.number:02d}")
|
||||
continue
|
||||
|
||||
if episodes_data:
|
||||
session_data = serialize_session(service_instance.session)
|
||||
response = {"status": "success", "episodes": episodes_data, "session": session_data}
|
||||
if failed_episodes:
|
||||
response["unavailable_episodes"] = failed_episodes
|
||||
return web.json_response(response)
|
||||
else:
|
||||
return web.json_response(
|
||||
{
|
||||
"status": "error",
|
||||
"message": f"No available episodes. Unavailable: {', '.join(failed_episodes)}",
|
||||
},
|
||||
status=404,
|
||||
)
|
||||
else:
|
||||
first_title = matching_titles[0]
|
||||
else:
|
||||
first_title = titles_list[0]
|
||||
else:
|
||||
first_title = titles
|
||||
|
||||
# Get tracks for single title
|
||||
tracks = service_instance.get_tracks(first_title)
|
||||
|
||||
video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
|
||||
# Serialize session data
|
||||
session_data = serialize_session(service_instance.session)
|
||||
|
||||
response_data = {
|
||||
"status": "success",
|
||||
"title": serialize_title(first_title),
|
||||
"video": [serialize_video_track(t) for t in video_tracks],
|
||||
"audio": [serialize_audio_track(t) for t in audio_tracks],
|
||||
"subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles],
|
||||
"session": session_data,
|
||||
}
|
||||
|
||||
return web.json_response(response_data)
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error getting remote tracks")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def remote_get_chapters(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Get chapters from a remote service.
|
||||
---
|
||||
summary: Get chapters from remote service
|
||||
description: Get available chapters for a title from a remote service
|
||||
parameters:
|
||||
- name: service
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- title
|
||||
properties:
|
||||
title:
|
||||
type: string
|
||||
description: Title identifier, URL, or any format accepted by the service
|
||||
profile:
|
||||
type: string
|
||||
description: Profile to use for credentials
|
||||
proxy:
|
||||
type: string
|
||||
description: Proxy region code (e.g., "ca", "us") or full proxy URL - uses server's proxy configuration
|
||||
no_proxy:
|
||||
type: boolean
|
||||
description: Disable proxy usage
|
||||
cookies:
|
||||
type: string
|
||||
description: Raw Netscape/Mozilla format cookie file content (optional - uses server cookies if not provided)
|
||||
credential:
|
||||
type: object
|
||||
description: Credentials object with username and password (optional - uses server credentials if not provided)
|
||||
properties:
|
||||
username:
|
||||
type: string
|
||||
password:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Chapters and session data
|
||||
'400':
|
||||
description: Invalid request
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
service_tag = request.match_info.get("service")
|
||||
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
# Accept 'title', 'title_id', or 'url' for flexibility
|
||||
title = data.get("title") or data.get("title_id") or data.get("url")
|
||||
if not title:
|
||||
return web.json_response(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Missing required parameter: title (can be URL, ID, or any format accepted by the service)",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
profile = data.get("profile")
|
||||
|
||||
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(normalized_service), service_config)
|
||||
|
||||
@click.command()
|
||||
@click.pass_context
|
||||
def dummy_service(ctx: click.Context) -> None:
|
||||
pass
|
||||
|
||||
# Handle proxy configuration
|
||||
proxy_param = data.get("proxy")
|
||||
no_proxy = data.get("no_proxy", False)
|
||||
proxy_providers = []
|
||||
|
||||
if not no_proxy:
|
||||
proxy_providers = initialize_proxy_providers()
|
||||
|
||||
if proxy_param and not no_proxy:
|
||||
try:
|
||||
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||
proxy_param = resolved_proxy
|
||||
except ValueError as e:
|
||||
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||
|
||||
ctx = click.Context(dummy_service)
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||
|
||||
service_module = Services.load(normalized_service)
|
||||
|
||||
dummy_service.name = normalized_service
|
||||
dummy_service.params = [click.Argument([title], type=str)]
|
||||
ctx.invoked_subcommand = normalized_service
|
||||
|
||||
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {"title": title}
|
||||
|
||||
# Add additional parameters
|
||||
for key, value in data.items():
|
||||
if key not in ["title", "title_id", "url", "profile", "proxy", "no_proxy"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
# Get service parameters
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
# Extract defaults
|
||||
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||
for param in service_module.cli.params:
|
||||
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||
if hasattr(param, "default") and param.default is not None:
|
||||
service_kwargs[param.name] = param.default
|
||||
|
||||
# Handle required parameters
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
if param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
elif param_name == "movie":
|
||||
service_kwargs[param_name] = False
|
||||
|
||||
# Filter to valid parameters
|
||||
filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params}
|
||||
|
||||
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||
|
||||
# Authenticate with client-provided or server-side auth
|
||||
cookies, credential = get_auth_from_request(data, normalized_service, profile)
|
||||
service_instance.authenticate(cookies, credential)
|
||||
|
||||
# Get titles
|
||||
titles = service_instance.get_titles()
|
||||
|
||||
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||
first_title = list(titles)[0]
|
||||
else:
|
||||
first_title = titles
|
||||
|
||||
# Get chapters if service supports it
|
||||
chapters_data = []
|
||||
if hasattr(service_instance, "get_chapters"):
|
||||
chapters = service_instance.get_chapters(first_title)
|
||||
if chapters:
|
||||
for chapter in chapters:
|
||||
chapters_data.append(
|
||||
{
|
||||
"timestamp": chapter.timestamp,
|
||||
"name": chapter.name if hasattr(chapter, "name") else None,
|
||||
}
|
||||
)
|
||||
|
||||
# Serialize session data
|
||||
session_data = serialize_session(service_instance.session)
|
||||
|
||||
return web.json_response({"status": "success", "chapters": chapters_data, "session": session_data})
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error getting remote chapters")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
403
unshackle/core/api/routes.py
Normal file
403
unshackle/core/api/routes.py
Normal file
@@ -0,0 +1,403 @@
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp_swagger3 import SwaggerDocs, SwaggerInfo, SwaggerUiSettings
|
||||
|
||||
from unshackle.core import __version__
|
||||
from unshackle.core.api.handlers import (cancel_download_job_handler, download_handler, get_download_job_handler,
|
||||
list_download_jobs_handler, list_titles_handler, list_tracks_handler)
|
||||
from unshackle.core.api.remote_handlers import (remote_get_chapters, remote_get_titles, remote_get_tracks,
|
||||
remote_list_services, remote_search)
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.update_checker import UpdateChecker
|
||||
|
||||
|
||||
@web.middleware
|
||||
async def cors_middleware(request: web.Request, handler):
|
||||
"""Add CORS headers to all responses."""
|
||||
# Handle preflight requests
|
||||
if request.method == "OPTIONS":
|
||||
response = web.Response()
|
||||
else:
|
||||
response = await handler(request)
|
||||
|
||||
# Add CORS headers
|
||||
response.headers["Access-Control-Allow-Origin"] = "*"
|
||||
response.headers["Access-Control-Allow-Methods"] = "GET, POST, PUT, DELETE, OPTIONS"
|
||||
response.headers["Access-Control-Allow-Headers"] = "Content-Type, X-API-Key, Authorization"
|
||||
response.headers["Access-Control-Max-Age"] = "3600"
|
||||
|
||||
return response
|
||||
|
||||
|
||||
log = logging.getLogger("api")
|
||||
|
||||
|
||||
async def health(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Health check endpoint.
|
||||
---
|
||||
summary: Health check
|
||||
description: Get server health status, version info, and update availability
|
||||
responses:
|
||||
'200':
|
||||
description: Health status
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
example: ok
|
||||
version:
|
||||
type: string
|
||||
example: "2.0.0"
|
||||
update_check:
|
||||
type: object
|
||||
properties:
|
||||
update_available:
|
||||
type: boolean
|
||||
nullable: true
|
||||
current_version:
|
||||
type: string
|
||||
latest_version:
|
||||
type: string
|
||||
nullable: true
|
||||
"""
|
||||
try:
|
||||
latest_version = await UpdateChecker.check_for_updates(__version__)
|
||||
update_info = {
|
||||
"update_available": latest_version is not None,
|
||||
"current_version": __version__,
|
||||
"latest_version": latest_version,
|
||||
}
|
||||
except Exception as e:
|
||||
log.warning(f"Failed to check for updates: {e}")
|
||||
update_info = {"update_available": None, "current_version": __version__, "latest_version": None}
|
||||
|
||||
return web.json_response({"status": "ok", "version": __version__, "update_check": update_info})
|
||||
|
||||
|
||||
async def services(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List available services.
|
||||
---
|
||||
summary: List services
|
||||
description: Get all available streaming services with their details
|
||||
responses:
|
||||
'200':
|
||||
description: List of services
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
services:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
tag:
|
||||
type: string
|
||||
aliases:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
geofence:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
title_regex:
|
||||
type: string
|
||||
nullable: true
|
||||
help:
|
||||
type: string
|
||||
nullable: true
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
try:
|
||||
service_tags = Services.get_tags()
|
||||
services_info = []
|
||||
|
||||
for tag in service_tags:
|
||||
service_data = {"tag": tag, "aliases": [], "geofence": [], "title_regex": None, "help": None}
|
||||
|
||||
try:
|
||||
service_module = Services.load(tag)
|
||||
|
||||
if hasattr(service_module, "ALIASES"):
|
||||
service_data["aliases"] = list(service_module.ALIASES)
|
||||
|
||||
if hasattr(service_module, "GEOFENCE"):
|
||||
service_data["geofence"] = list(service_module.GEOFENCE)
|
||||
|
||||
if hasattr(service_module, "TITLE_RE"):
|
||||
service_data["title_regex"] = service_module.TITLE_RE
|
||||
|
||||
if service_module.__doc__:
|
||||
service_data["help"] = service_module.__doc__.strip()
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"Could not load details for service {tag}: {e}")
|
||||
|
||||
services_info.append(service_data)
|
||||
|
||||
return web.json_response({"services": services_info})
|
||||
except Exception as e:
|
||||
log.exception("Error listing services")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def list_titles(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List titles for a service and title ID.
|
||||
---
|
||||
summary: List titles
|
||||
description: Get available titles for a service and title ID
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- service
|
||||
- title_id
|
||||
properties:
|
||||
service:
|
||||
type: string
|
||||
description: Service tag
|
||||
title_id:
|
||||
type: string
|
||||
description: Title identifier
|
||||
responses:
|
||||
'200':
|
||||
description: List of titles
|
||||
'400':
|
||||
description: Invalid request
|
||||
"""
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
return await list_titles_handler(data)
|
||||
|
||||
|
||||
async def list_tracks(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List tracks for a title, separated by type.
|
||||
---
|
||||
summary: List tracks
|
||||
description: Get available video, audio, and subtitle tracks for a title
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- service
|
||||
- title_id
|
||||
properties:
|
||||
service:
|
||||
type: string
|
||||
description: Service tag
|
||||
title_id:
|
||||
type: string
|
||||
description: Title identifier
|
||||
wanted:
|
||||
type: string
|
||||
description: Specific episode/season (optional)
|
||||
proxy:
|
||||
type: string
|
||||
description: Proxy configuration (optional)
|
||||
responses:
|
||||
'200':
|
||||
description: Track information
|
||||
'400':
|
||||
description: Invalid request
|
||||
"""
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
return await list_tracks_handler(data)
|
||||
|
||||
|
||||
async def download(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Download content based on provided parameters.
|
||||
---
|
||||
summary: Download content
|
||||
description: Download video content based on specified parameters
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- service
|
||||
- title_id
|
||||
properties:
|
||||
service:
|
||||
type: string
|
||||
description: Service tag
|
||||
title_id:
|
||||
type: string
|
||||
description: Title identifier
|
||||
responses:
|
||||
'200':
|
||||
description: Download started
|
||||
'400':
|
||||
description: Invalid request
|
||||
"""
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
return await download_handler(data)
|
||||
|
||||
|
||||
async def download_jobs(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List all download jobs.
|
||||
---
|
||||
summary: List download jobs
|
||||
description: Get list of all download jobs with their status
|
||||
responses:
|
||||
'200':
|
||||
description: List of download jobs
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
jobs:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
job_id:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
created_time:
|
||||
type: string
|
||||
service:
|
||||
type: string
|
||||
title_id:
|
||||
type: string
|
||||
progress:
|
||||
type: number
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
return await list_download_jobs_handler({})
|
||||
|
||||
|
||||
async def download_job_detail(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Get download job details.
|
||||
---
|
||||
summary: Get download job
|
||||
description: Get detailed information about a specific download job
|
||||
parameters:
|
||||
- name: job_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Download job details
|
||||
'404':
|
||||
description: Job not found
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
job_id = request.match_info["job_id"]
|
||||
return await get_download_job_handler(job_id)
|
||||
|
||||
|
||||
async def cancel_download_job(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Cancel download job.
|
||||
---
|
||||
summary: Cancel download job
|
||||
description: Cancel a queued or running download job
|
||||
parameters:
|
||||
- name: job_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Job cancelled successfully
|
||||
'400':
|
||||
description: Job cannot be cancelled
|
||||
'404':
|
||||
description: Job not found
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
job_id = request.match_info["job_id"]
|
||||
return await cancel_download_job_handler(job_id)
|
||||
|
||||
|
||||
def setup_routes(app: web.Application) -> None:
|
||||
"""Setup all API routes."""
|
||||
app.router.add_get("/api/health", health)
|
||||
app.router.add_get("/api/services", services)
|
||||
app.router.add_post("/api/list-titles", list_titles)
|
||||
app.router.add_post("/api/list-tracks", list_tracks)
|
||||
app.router.add_post("/api/download", download)
|
||||
app.router.add_get("/api/download/jobs", download_jobs)
|
||||
app.router.add_get("/api/download/jobs/{job_id}", download_job_detail)
|
||||
app.router.add_delete("/api/download/jobs/{job_id}", cancel_download_job)
|
||||
|
||||
# Remote service endpoints
|
||||
app.router.add_get("/api/remote/services", remote_list_services)
|
||||
app.router.add_post("/api/remote/{service}/search", remote_search)
|
||||
app.router.add_post("/api/remote/{service}/titles", remote_get_titles)
|
||||
app.router.add_post("/api/remote/{service}/tracks", remote_get_tracks)
|
||||
app.router.add_post("/api/remote/{service}/chapters", remote_get_chapters)
|
||||
|
||||
|
||||
def setup_swagger(app: web.Application) -> None:
|
||||
"""Setup Swagger UI documentation."""
|
||||
swagger = SwaggerDocs(
|
||||
app,
|
||||
swagger_ui_settings=SwaggerUiSettings(path="/api/docs/"),
|
||||
info=SwaggerInfo(
|
||||
title="Unshackle REST API",
|
||||
version=__version__,
|
||||
description="REST API for Unshackle - Modular Movie, TV, and Music Archival Software",
|
||||
),
|
||||
)
|
||||
|
||||
# Add routes with OpenAPI documentation
|
||||
swagger.add_routes(
|
||||
[
|
||||
web.get("/api/health", health),
|
||||
web.get("/api/services", services),
|
||||
web.post("/api/list-titles", list_titles),
|
||||
web.post("/api/list-tracks", list_tracks),
|
||||
web.post("/api/download", download),
|
||||
web.get("/api/download/jobs", download_jobs),
|
||||
web.get("/api/download/jobs/{job_id}", download_job_detail),
|
||||
web.delete("/api/download/jobs/{job_id}", cancel_download_job),
|
||||
# Remote service routes
|
||||
web.get("/api/remote/services", remote_list_services),
|
||||
web.post("/api/remote/{service}/search", remote_search),
|
||||
web.post("/api/remote/{service}/titles", remote_get_titles),
|
||||
web.post("/api/remote/{service}/tracks", remote_get_tracks),
|
||||
web.post("/api/remote/{service}/chapters", remote_get_chapters),
|
||||
]
|
||||
)
|
||||
236
unshackle/core/api/session_serializer.py
Normal file
236
unshackle/core/api/session_serializer.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""Session serialization helpers for remote services."""
|
||||
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from unshackle.core.credential import Credential
|
||||
|
||||
|
||||
def serialize_session(session: requests.Session) -> Dict[str, Any]:
|
||||
"""
|
||||
Serialize a requests.Session into a JSON-serializable dictionary.
|
||||
|
||||
Extracts cookies, headers, and other session data that can be
|
||||
transferred to a remote client for downloading.
|
||||
|
||||
Args:
|
||||
session: The requests.Session to serialize
|
||||
|
||||
Returns:
|
||||
Dictionary containing serialized session data
|
||||
"""
|
||||
session_data = {
|
||||
"cookies": {},
|
||||
"headers": {},
|
||||
"proxies": session.proxies.copy() if session.proxies else {},
|
||||
}
|
||||
|
||||
# Serialize cookies
|
||||
if session.cookies:
|
||||
for cookie in session.cookies:
|
||||
session_data["cookies"][cookie.name] = {
|
||||
"value": cookie.value,
|
||||
"domain": cookie.domain,
|
||||
"path": cookie.path,
|
||||
"secure": cookie.secure,
|
||||
"expires": cookie.expires,
|
||||
}
|
||||
|
||||
# Serialize headers (exclude proxy-authorization for security)
|
||||
if session.headers:
|
||||
for key, value in session.headers.items():
|
||||
# Skip proxy-related headers as they're server-specific
|
||||
if key.lower() not in ["proxy-authorization"]:
|
||||
session_data["headers"][key] = value
|
||||
|
||||
return session_data
|
||||
|
||||
|
||||
def deserialize_session(
|
||||
session_data: Dict[str, Any], target_session: Optional[requests.Session] = None
|
||||
) -> requests.Session:
|
||||
"""
|
||||
Deserialize session data into a requests.Session.
|
||||
|
||||
Applies cookies, headers, and other session data from a remote server
|
||||
to a local session for downloading.
|
||||
|
||||
Args:
|
||||
session_data: Dictionary containing serialized session data
|
||||
target_session: Optional existing session to update (creates new if None)
|
||||
|
||||
Returns:
|
||||
requests.Session with applied session data
|
||||
"""
|
||||
if target_session is None:
|
||||
target_session = requests.Session()
|
||||
|
||||
# Apply cookies
|
||||
if "cookies" in session_data:
|
||||
for cookie_name, cookie_data in session_data["cookies"].items():
|
||||
target_session.cookies.set(
|
||||
name=cookie_name,
|
||||
value=cookie_data["value"],
|
||||
domain=cookie_data.get("domain"),
|
||||
path=cookie_data.get("path", "/"),
|
||||
secure=cookie_data.get("secure", False),
|
||||
expires=cookie_data.get("expires"),
|
||||
)
|
||||
|
||||
# Apply headers
|
||||
if "headers" in session_data:
|
||||
target_session.headers.update(session_data["headers"])
|
||||
|
||||
# Note: We don't apply proxies from remote as the local client
|
||||
# should use its own proxy configuration
|
||||
|
||||
return target_session
|
||||
|
||||
|
||||
def extract_session_tokens(session: requests.Session) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract authentication tokens and similar data from a session.
|
||||
|
||||
Looks for common authentication patterns like Bearer tokens,
|
||||
API keys in headers, etc.
|
||||
|
||||
Args:
|
||||
session: The requests.Session to extract tokens from
|
||||
|
||||
Returns:
|
||||
Dictionary containing extracted tokens
|
||||
"""
|
||||
tokens = {}
|
||||
|
||||
# Check for Authorization header
|
||||
if "Authorization" in session.headers:
|
||||
tokens["authorization"] = session.headers["Authorization"]
|
||||
|
||||
# Check for common API key headers
|
||||
for key in ["X-API-Key", "Api-Key", "X-Auth-Token"]:
|
||||
if key in session.headers:
|
||||
tokens[key.lower().replace("-", "_")] = session.headers[key]
|
||||
|
||||
return tokens
|
||||
|
||||
|
||||
def apply_session_tokens(tokens: Dict[str, Any], target_session: requests.Session) -> None:
|
||||
"""
|
||||
Apply authentication tokens to a session.
|
||||
|
||||
Args:
|
||||
tokens: Dictionary containing tokens to apply
|
||||
target_session: Session to apply tokens to
|
||||
"""
|
||||
# Apply Authorization header
|
||||
if "authorization" in tokens:
|
||||
target_session.headers["Authorization"] = tokens["authorization"]
|
||||
|
||||
# Apply other token headers
|
||||
token_header_map = {
|
||||
"x_api_key": "X-API-Key",
|
||||
"api_key": "Api-Key",
|
||||
"x_auth_token": "X-Auth-Token",
|
||||
}
|
||||
|
||||
for token_key, header_name in token_header_map.items():
|
||||
if token_key in tokens:
|
||||
target_session.headers[header_name] = tokens[token_key]
|
||||
|
||||
|
||||
def serialize_cookies(cookie_jar: Optional[CookieJar]) -> Dict[str, Any]:
|
||||
"""
|
||||
Serialize a CookieJar into a JSON-serializable dictionary.
|
||||
|
||||
Args:
|
||||
cookie_jar: The CookieJar to serialize
|
||||
|
||||
Returns:
|
||||
Dictionary containing serialized cookies
|
||||
"""
|
||||
if not cookie_jar:
|
||||
return {}
|
||||
|
||||
cookies = {}
|
||||
for cookie in cookie_jar:
|
||||
cookies[cookie.name] = {
|
||||
"value": cookie.value,
|
||||
"domain": cookie.domain,
|
||||
"path": cookie.path,
|
||||
"secure": cookie.secure,
|
||||
"expires": cookie.expires,
|
||||
}
|
||||
|
||||
return cookies
|
||||
|
||||
|
||||
def deserialize_cookies(cookies_data: Dict[str, Any]) -> CookieJar:
|
||||
"""
|
||||
Deserialize cookies into a CookieJar.
|
||||
|
||||
Args:
|
||||
cookies_data: Dictionary containing serialized cookies
|
||||
|
||||
Returns:
|
||||
CookieJar with cookies
|
||||
"""
|
||||
import http.cookiejar
|
||||
|
||||
cookie_jar = http.cookiejar.CookieJar()
|
||||
|
||||
for cookie_name, cookie_data in cookies_data.items():
|
||||
cookie = http.cookiejar.Cookie(
|
||||
version=0,
|
||||
name=cookie_name,
|
||||
value=cookie_data["value"],
|
||||
port=None,
|
||||
port_specified=False,
|
||||
domain=cookie_data.get("domain", ""),
|
||||
domain_specified=bool(cookie_data.get("domain")),
|
||||
domain_initial_dot=cookie_data.get("domain", "").startswith("."),
|
||||
path=cookie_data.get("path", "/"),
|
||||
path_specified=True,
|
||||
secure=cookie_data.get("secure", False),
|
||||
expires=cookie_data.get("expires"),
|
||||
discard=False,
|
||||
comment=None,
|
||||
comment_url=None,
|
||||
rest={},
|
||||
)
|
||||
cookie_jar.set_cookie(cookie)
|
||||
|
||||
return cookie_jar
|
||||
|
||||
|
||||
def serialize_credential(credential: Optional[Credential]) -> Optional[Dict[str, str]]:
|
||||
"""
|
||||
Serialize a Credential into a JSON-serializable dictionary.
|
||||
|
||||
Args:
|
||||
credential: The Credential to serialize
|
||||
|
||||
Returns:
|
||||
Dictionary containing username and password, or None
|
||||
"""
|
||||
if not credential:
|
||||
return None
|
||||
|
||||
return {"username": credential.username, "password": credential.password}
|
||||
|
||||
|
||||
def deserialize_credential(credential_data: Optional[Dict[str, str]]) -> Optional[Credential]:
|
||||
"""
|
||||
Deserialize credential data into a Credential object.
|
||||
|
||||
Args:
|
||||
credential_data: Dictionary containing username and password
|
||||
|
||||
Returns:
|
||||
Credential object or None
|
||||
"""
|
||||
if not credential_data:
|
||||
return None
|
||||
|
||||
return Credential(username=credential_data["username"], password=credential_data["password"])
|
||||
@@ -8,7 +8,25 @@ __shaka_platform = {"win32": "win", "darwin": "osx"}.get(sys.platform, sys.platf
|
||||
|
||||
def find(*names: str) -> Optional[Path]:
|
||||
"""Find the path of the first found binary name."""
|
||||
current_dir = Path(__file__).resolve().parent.parent
|
||||
local_binaries_dir = current_dir / "binaries"
|
||||
|
||||
ext = ".exe" if sys.platform == "win32" else ""
|
||||
|
||||
for name in names:
|
||||
if local_binaries_dir.exists():
|
||||
candidate_paths = [
|
||||
local_binaries_dir / f"{name}{ext}",
|
||||
local_binaries_dir / name / f"{name}{ext}"
|
||||
]
|
||||
|
||||
for path in candidate_paths:
|
||||
if path.is_file():
|
||||
# On Unix-like systems, check if file is executable
|
||||
if sys.platform == "win32" or (path.stat().st_mode & 0o111):
|
||||
return path
|
||||
|
||||
# Fall back to system PATH
|
||||
path = shutil.which(name)
|
||||
if path:
|
||||
return Path(path)
|
||||
@@ -32,6 +50,11 @@ HolaProxy = find("hola-proxy")
|
||||
MPV = find("mpv")
|
||||
Caddy = find("caddy")
|
||||
N_m3u8DL_RE = find("N_m3u8DL-RE", "n-m3u8dl-re")
|
||||
MKVToolNix = find("mkvmerge")
|
||||
Mkvpropedit = find("mkvpropedit")
|
||||
DoviTool = find("dovi_tool")
|
||||
HDR10PlusTool = find("hdr10plus_tool", "HDR10Plus_tool")
|
||||
Mp4decrypt = find("mp4decrypt")
|
||||
|
||||
|
||||
__all__ = (
|
||||
@@ -46,5 +69,10 @@ __all__ = (
|
||||
"MPV",
|
||||
"Caddy",
|
||||
"N_m3u8DL_RE",
|
||||
"MKVToolNix",
|
||||
"Mkvpropedit",
|
||||
"DoviTool",
|
||||
"HDR10PlusTool",
|
||||
"Mp4decrypt",
|
||||
"find",
|
||||
)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from .custom_remote_cdm import CustomRemoteCDM
|
||||
from .decrypt_labs_remote_cdm import DecryptLabsRemoteCDM
|
||||
|
||||
__all__ = ["DecryptLabsRemoteCDM"]
|
||||
__all__ = ["DecryptLabsRemoteCDM", "CustomRemoteCDM"]
|
||||
|
||||
1085
unshackle/core/cdm/custom_remote_cdm.py
Normal file
1085
unshackle/core/cdm/custom_remote_cdm.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,143 +1,747 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import secrets
|
||||
from typing import Optional, Type, Union
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
import requests
|
||||
from pywidevine import PSSH, Device, DeviceTypes, Key, RemoteCdm
|
||||
from pywidevine.license_protocol_pb2 import SignedDrmCertificate, SignedMessage
|
||||
from pywidevine.cdm import Cdm as WidevineCdm
|
||||
from pywidevine.device import DeviceTypes
|
||||
from requests import Session
|
||||
|
||||
# Copyright 2024 by DevYukine.
|
||||
from unshackle.core import __version__
|
||||
from unshackle.core.vaults import Vaults
|
||||
|
||||
|
||||
class DecryptLabsRemoteCDM(RemoteCdm):
|
||||
class MockCertificateChain:
|
||||
"""Mock certificate chain for PlayReady compatibility."""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self._name = name
|
||||
|
||||
def get_name(self) -> str:
|
||||
return self._name
|
||||
|
||||
|
||||
class Key:
|
||||
"""Key object compatible with pywidevine."""
|
||||
|
||||
def __init__(self, kid: str, key: str, type_: str = "CONTENT"):
|
||||
if isinstance(kid, str):
|
||||
clean_kid = kid.replace("-", "")
|
||||
if len(clean_kid) == 32:
|
||||
self.kid = UUID(hex=clean_kid)
|
||||
else:
|
||||
self.kid = UUID(hex=clean_kid.ljust(32, "0"))
|
||||
else:
|
||||
self.kid = kid
|
||||
|
||||
if isinstance(key, str):
|
||||
self.key = bytes.fromhex(key)
|
||||
else:
|
||||
self.key = key
|
||||
|
||||
self.type = type_
|
||||
|
||||
|
||||
class DecryptLabsRemoteCDMExceptions:
|
||||
"""Exception classes for compatibility with pywidevine CDM."""
|
||||
|
||||
class InvalidSession(Exception):
|
||||
"""Raised when session ID is invalid."""
|
||||
|
||||
class TooManySessions(Exception):
|
||||
"""Raised when session limit is reached."""
|
||||
|
||||
class InvalidInitData(Exception):
|
||||
"""Raised when PSSH/init data is invalid."""
|
||||
|
||||
class InvalidLicenseType(Exception):
|
||||
"""Raised when license type is invalid."""
|
||||
|
||||
class InvalidLicenseMessage(Exception):
|
||||
"""Raised when license message is invalid."""
|
||||
|
||||
class InvalidContext(Exception):
|
||||
"""Raised when session has no context data."""
|
||||
|
||||
class SignatureMismatch(Exception):
|
||||
"""Raised when signature verification fails."""
|
||||
|
||||
|
||||
class DecryptLabsRemoteCDM:
|
||||
"""
|
||||
Decrypt Labs Remote CDM implementation with intelligent caching system.
|
||||
|
||||
This class provides a drop-in replacement for pywidevine's local CDM using
|
||||
Decrypt Labs' KeyXtractor API service, enhanced with smart caching logic
|
||||
that minimizes unnecessary license requests.
|
||||
|
||||
Key Features:
|
||||
- Compatible with both Widevine and PlayReady DRM schemes
|
||||
- Intelligent caching that compares required vs. available keys
|
||||
- Optimized caching for L1/L2 devices (leverages API auto-optimization)
|
||||
- Automatic key combination for mixed cache/license scenarios
|
||||
- Seamless fallback to license requests when keys are missing
|
||||
|
||||
Intelligent Caching System:
|
||||
1. DRM classes (PlayReady/Widevine) provide required KIDs via set_required_kids()
|
||||
2. get_license_challenge() first checks for cached keys
|
||||
3. For L1/L2 devices, always attempts cached keys first (API optimized)
|
||||
4. If cached keys satisfy requirements, returns empty challenge (no license needed)
|
||||
5. If keys are missing, makes targeted license request for remaining keys
|
||||
6. parse_license() combines cached and license keys intelligently
|
||||
"""
|
||||
|
||||
service_certificate_challenge = b"\x08\x04"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device_type: Union[DeviceTypes, str],
|
||||
system_id: int,
|
||||
security_level: int,
|
||||
host: str,
|
||||
secret: str,
|
||||
device_name: str,
|
||||
service_name: str,
|
||||
host: str = "https://keyxtractor.decryptlabs.com",
|
||||
device_name: str = "ChromeCDM",
|
||||
service_name: Optional[str] = None,
|
||||
vaults: Optional[Vaults] = None,
|
||||
device_type: Optional[str] = None,
|
||||
system_id: Optional[int] = None,
|
||||
security_level: Optional[int] = None,
|
||||
**kwargs,
|
||||
):
|
||||
self.response_counter = 0
|
||||
self.pssh = None
|
||||
self.api_session_ids = {}
|
||||
self.license_request = None
|
||||
self.service_name = service_name
|
||||
self.keys = {}
|
||||
try:
|
||||
super().__init__(device_type, system_id, security_level, host, secret, device_name)
|
||||
except Exception:
|
||||
pass
|
||||
self.req_session = requests.Session()
|
||||
self.req_session.headers.update({"decrypt-labs-api-key": secret})
|
||||
"""
|
||||
Initialize Decrypt Labs Remote CDM for Widevine and PlayReady schemes.
|
||||
|
||||
@classmethod
|
||||
def from_device(cls, device: Device) -> Type["DecryptLabsRemoteCDM"]:
|
||||
raise NotImplementedError("You cannot load a DecryptLabsRemoteCDM from a local Device file.")
|
||||
Args:
|
||||
secret: Decrypt Labs API key (matches config format)
|
||||
host: Decrypt Labs API host URL (matches config format)
|
||||
device_name: DRM scheme (ChromeCDM, L1, L2 for Widevine; SL2, SL3 for PlayReady)
|
||||
service_name: Service name for key caching and vault operations
|
||||
vaults: Vaults instance for local key caching
|
||||
device_type: Device type (CHROME, ANDROID, PLAYREADY) - for compatibility
|
||||
system_id: System ID - for compatibility
|
||||
security_level: Security level - for compatibility
|
||||
"""
|
||||
_ = kwargs
|
||||
|
||||
def open(self) -> bytes:
|
||||
# We stub this method to return a random session ID for now, later we save the api session id and resolve by our random generated one.
|
||||
return bytes.fromhex(secrets.token_hex(16))
|
||||
self.secret = secret
|
||||
self.host = host.rstrip("/")
|
||||
self.device_name = device_name
|
||||
self.service_name = service_name or ""
|
||||
self.vaults = vaults
|
||||
self.uch = self.host != "https://keyxtractor.decryptlabs.com"
|
||||
|
||||
def close(self, session_id: bytes) -> None:
|
||||
# We stub this method to do nothing.
|
||||
pass
|
||||
self._device_type_str = device_type
|
||||
if device_type:
|
||||
self.device_type = self._get_device_type_enum(device_type)
|
||||
|
||||
def set_service_certificate(self, session_id: bytes, certificate: Optional[Union[bytes, str]]) -> str:
|
||||
if isinstance(certificate, bytes):
|
||||
certificate = base64.b64encode(certificate).decode()
|
||||
self._is_playready = (device_type and device_type.upper() == "PLAYREADY") or (device_name in ["SL2", "SL3"])
|
||||
|
||||
# certificate needs to be base64 to be sent off to the API.
|
||||
# it needs to intentionally be kept as base64 encoded SignedMessage.
|
||||
if self._is_playready:
|
||||
self.system_id = system_id or 0
|
||||
self.security_level = security_level or (2000 if device_name == "SL2" else 3000)
|
||||
else:
|
||||
self.system_id = system_id or 26830
|
||||
self.security_level = security_level or 3
|
||||
|
||||
self.req_session.signed_device_certificate = certificate
|
||||
self.req_session.privacy_mode = True
|
||||
|
||||
return "success"
|
||||
|
||||
def get_service_certificate(self, session_id: bytes) -> Optional[SignedDrmCertificate]:
|
||||
raise NotImplementedError("This method is not implemented in this CDM")
|
||||
|
||||
def get_license_challenge(
|
||||
self, session_id: bytes, pssh: PSSH, license_type: str = "STREAMING", privacy_mode: bool = True
|
||||
) -> bytes:
|
||||
self.pssh = pssh
|
||||
|
||||
res = self.session(
|
||||
self.host + "/get-request",
|
||||
self._sessions: Dict[bytes, Dict[str, Any]] = {}
|
||||
self._pssh_b64 = None
|
||||
self._required_kids: Optional[List[str]] = None
|
||||
self._http_session = Session()
|
||||
self._http_session.headers.update(
|
||||
{
|
||||
"init_data": self.pssh.dumps(),
|
||||
"service_certificate": self.req_session.signed_device_certificate,
|
||||
"scheme": "widevine",
|
||||
"service": self.service_name,
|
||||
},
|
||||
"decrypt-labs-api-key": self.secret,
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": f"unshackle-decrypt-labs-cdm/{__version__}",
|
||||
}
|
||||
)
|
||||
|
||||
self.license_request = res["challenge"]
|
||||
self.api_session_ids[session_id] = res["session_id"]
|
||||
|
||||
return base64.b64decode(self.license_request)
|
||||
|
||||
def parse_license(self, session_id: bytes, license_message: Union[SignedMessage, bytes, str]) -> None:
|
||||
session_id_api = self.api_session_ids[session_id]
|
||||
if session_id not in self.keys:
|
||||
self.keys[session_id] = []
|
||||
session_keys = self.keys[session_id]
|
||||
|
||||
if isinstance(license_message, dict) and "keys" in license_message:
|
||||
session_keys.extend(
|
||||
[
|
||||
Key(kid=Key.kid_to_uuid(x["kid"]), type_=x.get("type", "CONTENT"), key=bytes.fromhex(x["key"]))
|
||||
for x in license_message["keys"]
|
||||
]
|
||||
)
|
||||
|
||||
def _get_device_type_enum(self, device_type: str):
|
||||
"""Convert device type string to enum for compatibility."""
|
||||
device_type_upper = device_type.upper()
|
||||
if device_type_upper == "ANDROID":
|
||||
return DeviceTypes.ANDROID
|
||||
elif device_type_upper == "CHROME":
|
||||
return DeviceTypes.CHROME
|
||||
else:
|
||||
res = self.session(
|
||||
self.host + "/decrypt-response",
|
||||
{
|
||||
"session_id": session_id_api,
|
||||
"init_data": self.pssh.dumps(),
|
||||
"license_request": self.license_request,
|
||||
"license_response": license_message,
|
||||
"scheme": "widevine",
|
||||
},
|
||||
)
|
||||
return DeviceTypes.CHROME
|
||||
|
||||
original_keys = res["keys"].replace("\n", " ")
|
||||
keys_separated = original_keys.split("--key ")
|
||||
formatted_keys = []
|
||||
for k in keys_separated:
|
||||
if ":" in k:
|
||||
key = k.strip()
|
||||
formatted_keys.append(key)
|
||||
for keys in formatted_keys:
|
||||
session_keys.append(
|
||||
(
|
||||
Key(
|
||||
kid=UUID(bytes=bytes.fromhex(keys.split(":")[0])),
|
||||
type_="CONTENT",
|
||||
key=bytes.fromhex(keys.split(":")[1]),
|
||||
)
|
||||
@property
|
||||
def is_playready(self) -> bool:
|
||||
"""Check if this CDM is in PlayReady mode."""
|
||||
return self._is_playready
|
||||
|
||||
@property
|
||||
def certificate_chain(self) -> MockCertificateChain:
|
||||
"""Mock certificate chain for PlayReady compatibility."""
|
||||
return MockCertificateChain(f"{self.device_name}_Remote")
|
||||
|
||||
def set_pssh_b64(self, pssh_b64: str) -> None:
|
||||
"""Store base64-encoded PSSH data for PlayReady compatibility."""
|
||||
self._pssh_b64 = pssh_b64
|
||||
|
||||
def set_required_kids(self, kids: List[Union[str, UUID]]) -> None:
|
||||
"""
|
||||
Set the required Key IDs for intelligent caching decisions.
|
||||
|
||||
This method enables the CDM to make smart decisions about when to request
|
||||
additional keys via license challenges. When cached keys are available,
|
||||
the CDM will compare them against the required KIDs to determine if a
|
||||
license request is still needed for missing keys.
|
||||
|
||||
Args:
|
||||
kids: List of required Key IDs as UUIDs or hex strings
|
||||
|
||||
Note:
|
||||
Should be called by DRM classes (PlayReady/Widevine) before making
|
||||
license challenge requests to enable optimal caching behavior.
|
||||
"""
|
||||
self._required_kids = []
|
||||
for kid in kids:
|
||||
if isinstance(kid, UUID):
|
||||
self._required_kids.append(str(kid).replace("-", "").lower())
|
||||
else:
|
||||
self._required_kids.append(str(kid).replace("-", "").lower())
|
||||
|
||||
def _generate_session_id(self) -> bytes:
|
||||
"""Generate a unique session ID."""
|
||||
return secrets.token_bytes(16)
|
||||
|
||||
def _get_init_data_from_pssh(self, pssh: Any) -> str:
|
||||
"""Extract init data from various PSSH formats."""
|
||||
if self.is_playready and self._pssh_b64:
|
||||
return self._pssh_b64
|
||||
|
||||
if hasattr(pssh, "dumps"):
|
||||
dumps_result = pssh.dumps()
|
||||
|
||||
if isinstance(dumps_result, str):
|
||||
try:
|
||||
base64.b64decode(dumps_result)
|
||||
return dumps_result
|
||||
except Exception:
|
||||
return base64.b64encode(dumps_result.encode("utf-8")).decode("utf-8")
|
||||
else:
|
||||
return base64.b64encode(dumps_result).decode("utf-8")
|
||||
elif hasattr(pssh, "raw"):
|
||||
raw_data = pssh.raw
|
||||
if isinstance(raw_data, str):
|
||||
raw_data = raw_data.encode("utf-8")
|
||||
return base64.b64encode(raw_data).decode("utf-8")
|
||||
elif hasattr(pssh, "__class__") and "WrmHeader" in pssh.__class__.__name__:
|
||||
if self.is_playready:
|
||||
raise ValueError("PlayReady WRM header received but no PSSH B64 was set via set_pssh_b64()")
|
||||
|
||||
if hasattr(pssh, "raw_bytes"):
|
||||
return base64.b64encode(pssh.raw_bytes).decode("utf-8")
|
||||
elif hasattr(pssh, "bytes"):
|
||||
return base64.b64encode(pssh.bytes).decode("utf-8")
|
||||
else:
|
||||
raise ValueError(f"Cannot extract PSSH data from WRM header type: {type(pssh)}")
|
||||
else:
|
||||
raise ValueError(f"Unsupported PSSH type: {type(pssh)}")
|
||||
|
||||
def open(self) -> bytes:
|
||||
"""
|
||||
Open a new CDM session.
|
||||
|
||||
Returns:
|
||||
Session identifier as bytes
|
||||
"""
|
||||
session_id = self._generate_session_id()
|
||||
self._sessions[session_id] = {
|
||||
"service_certificate": None,
|
||||
"keys": [],
|
||||
"pssh": None,
|
||||
"challenge": None,
|
||||
"decrypt_labs_session_id": None,
|
||||
"tried_cache": False,
|
||||
"cached_keys": None,
|
||||
}
|
||||
return session_id
|
||||
|
||||
def close(self, session_id: bytes) -> None:
|
||||
"""
|
||||
Close a CDM session and perform comprehensive cleanup.
|
||||
|
||||
Args:
|
||||
session_id: Session identifier
|
||||
|
||||
Raises:
|
||||
ValueError: If session ID is invalid
|
||||
"""
|
||||
if session_id not in self._sessions:
|
||||
raise DecryptLabsRemoteCDMExceptions.InvalidSession(f"Invalid session ID: {session_id.hex()}")
|
||||
|
||||
session = self._sessions[session_id]
|
||||
session.clear()
|
||||
del self._sessions[session_id]
|
||||
|
||||
def get_service_certificate(self, session_id: bytes) -> Optional[bytes]:
|
||||
"""
|
||||
Get the service certificate for a session.
|
||||
|
||||
Args:
|
||||
session_id: Session identifier
|
||||
|
||||
Returns:
|
||||
Service certificate if set, None otherwise
|
||||
|
||||
Raises:
|
||||
ValueError: If session ID is invalid
|
||||
"""
|
||||
if session_id not in self._sessions:
|
||||
raise DecryptLabsRemoteCDMExceptions.InvalidSession(f"Invalid session ID: {session_id.hex()}")
|
||||
|
||||
return self._sessions[session_id]["service_certificate"]
|
||||
|
||||
def set_service_certificate(self, session_id: bytes, certificate: Optional[Union[bytes, str]]) -> str:
|
||||
"""
|
||||
Set the service certificate for a session.
|
||||
|
||||
Args:
|
||||
session_id: Session identifier
|
||||
certificate: Service certificate (bytes or base64 string)
|
||||
|
||||
Returns:
|
||||
Certificate status message
|
||||
|
||||
Raises:
|
||||
ValueError: If session ID is invalid
|
||||
"""
|
||||
if session_id not in self._sessions:
|
||||
raise DecryptLabsRemoteCDMExceptions.InvalidSession(f"Invalid session ID: {session_id.hex()}")
|
||||
|
||||
if certificate is None:
|
||||
if not self._is_playready and self.device_name == "L1":
|
||||
certificate = WidevineCdm.common_privacy_cert
|
||||
self._sessions[session_id]["service_certificate"] = base64.b64decode(certificate)
|
||||
return "Using default Widevine common privacy certificate for L1"
|
||||
else:
|
||||
self._sessions[session_id]["service_certificate"] = None
|
||||
return "No certificate set (not required for this device type)"
|
||||
|
||||
if isinstance(certificate, str):
|
||||
certificate = base64.b64decode(certificate)
|
||||
|
||||
self._sessions[session_id]["service_certificate"] = certificate
|
||||
return "Successfully set Service Certificate"
|
||||
|
||||
def has_cached_keys(self, session_id: bytes) -> bool:
|
||||
"""
|
||||
Check if cached keys are available for the session.
|
||||
|
||||
Args:
|
||||
session_id: Session identifier
|
||||
|
||||
Returns:
|
||||
True if cached keys are available
|
||||
|
||||
Raises:
|
||||
ValueError: If session ID is invalid
|
||||
"""
|
||||
if session_id not in self._sessions:
|
||||
raise DecryptLabsRemoteCDMExceptions.InvalidSession(f"Invalid session ID: {session_id.hex()}")
|
||||
|
||||
session = self._sessions[session_id]
|
||||
session_keys = session.get("keys", [])
|
||||
return len(session_keys) > 0
|
||||
|
||||
def get_license_challenge(
|
||||
self, session_id: bytes, pssh_or_wrm: Any, license_type: str = "STREAMING", privacy_mode: bool = True
|
||||
) -> bytes:
|
||||
"""
|
||||
Generate a license challenge using Decrypt Labs API with intelligent caching.
|
||||
|
||||
This method implements smart caching logic that:
|
||||
1. First checks local vaults for required keys
|
||||
2. Attempts to retrieve cached keys from the API
|
||||
3. If required KIDs are set, compares available keys (vault + cached) against requirements
|
||||
4. Only makes a license request if keys are missing
|
||||
5. Returns empty challenge if all required keys are available
|
||||
|
||||
The intelligent caching works as follows:
|
||||
- Local vaults: Always checked first if available
|
||||
- For L1/L2 devices: Always prioritizes cached keys (API automatically optimizes)
|
||||
- For other devices: Uses cache retry logic based on session state
|
||||
- With required KIDs set: Only requests license for missing keys
|
||||
- Without required KIDs: Returns any available cached keys
|
||||
- For PlayReady: Combines vault, cached, and license keys seamlessly
|
||||
|
||||
Args:
|
||||
session_id: Session identifier
|
||||
pssh_or_wrm: PSSH object or WRM header (for PlayReady compatibility)
|
||||
license_type: Type of license (STREAMING, OFFLINE, AUTOMATIC) - for compatibility only
|
||||
privacy_mode: Whether to use privacy mode - for compatibility only
|
||||
|
||||
Returns:
|
||||
License challenge as bytes, or empty bytes if available keys satisfy requirements
|
||||
|
||||
Raises:
|
||||
InvalidSession: If session ID is invalid
|
||||
requests.RequestException: If API request fails
|
||||
|
||||
Note:
|
||||
Call set_required_kids() before this method for optimal caching behavior.
|
||||
L1/L2 devices automatically use cached keys when available per API design.
|
||||
Local vault keys are always checked first when vaults are available.
|
||||
"""
|
||||
_ = license_type, privacy_mode
|
||||
|
||||
if session_id not in self._sessions:
|
||||
raise DecryptLabsRemoteCDMExceptions.InvalidSession(f"Invalid session ID: {session_id.hex()}")
|
||||
|
||||
session = self._sessions[session_id]
|
||||
|
||||
session["pssh"] = pssh_or_wrm
|
||||
init_data = self._get_init_data_from_pssh(pssh_or_wrm)
|
||||
already_tried_cache = session.get("tried_cache", False)
|
||||
|
||||
if self.vaults and self._required_kids:
|
||||
vault_keys = []
|
||||
for kid_str in self._required_kids:
|
||||
try:
|
||||
clean_kid = kid_str.replace("-", "")
|
||||
if len(clean_kid) == 32:
|
||||
kid_uuid = UUID(hex=clean_kid)
|
||||
else:
|
||||
kid_uuid = UUID(hex=clean_kid.ljust(32, "0"))
|
||||
key, _ = self.vaults.get_key(kid_uuid)
|
||||
if key and key.count("0") != len(key):
|
||||
vault_keys.append({"kid": kid_str, "key": key, "type": "CONTENT"})
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
|
||||
if vault_keys:
|
||||
vault_kids = set(k["kid"] for k in vault_keys)
|
||||
required_kids = set(self._required_kids)
|
||||
|
||||
if required_kids.issubset(vault_kids):
|
||||
session["keys"] = vault_keys
|
||||
return b""
|
||||
else:
|
||||
session["vault_keys"] = vault_keys
|
||||
|
||||
if self.device_name in ["L1", "L2"]:
|
||||
get_cached_keys = True
|
||||
else:
|
||||
get_cached_keys = not already_tried_cache
|
||||
|
||||
request_data = {
|
||||
"scheme": self.device_name,
|
||||
"init_data": init_data,
|
||||
"get_cached_keys_if_exists": get_cached_keys,
|
||||
}
|
||||
|
||||
if self.service_name:
|
||||
request_data["service"] = self.service_name
|
||||
|
||||
if session["service_certificate"]:
|
||||
request_data["service_certificate"] = base64.b64encode(session["service_certificate"]).decode("utf-8")
|
||||
|
||||
response = self._http_session.post(f"{self.host}/get-request", json=request_data, timeout=30)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise requests.RequestException(f"API request failed: {response.status_code} {response.text}")
|
||||
|
||||
data = response.json()
|
||||
|
||||
if data.get("message") != "success":
|
||||
error_msg = data.get("message", "Unknown error")
|
||||
if "details" in data:
|
||||
error_msg += f" - Details: {data['details']}"
|
||||
if "error" in data:
|
||||
error_msg += f" - Error: {data['error']}"
|
||||
|
||||
if "service_certificate is required" in str(data) and not session["service_certificate"]:
|
||||
error_msg += " (No service certificate was provided to the CDM session)"
|
||||
|
||||
raise requests.RequestException(f"API error: {error_msg}")
|
||||
|
||||
message_type = data.get("message_type")
|
||||
|
||||
if message_type == "cached-keys" or "cached_keys" in data:
|
||||
"""
|
||||
Handle cached keys response from API.
|
||||
|
||||
When the API returns cached keys, we need to determine if they satisfy
|
||||
our requirements or if we need to make an additional license request
|
||||
for missing keys.
|
||||
"""
|
||||
cached_keys = data.get("cached_keys", [])
|
||||
parsed_keys = self._parse_cached_keys(cached_keys)
|
||||
|
||||
all_available_keys = list(parsed_keys)
|
||||
if "vault_keys" in session:
|
||||
all_available_keys.extend(session["vault_keys"])
|
||||
|
||||
session["keys"] = all_available_keys
|
||||
session["tried_cache"] = True
|
||||
|
||||
if self._required_kids:
|
||||
available_kids = set()
|
||||
for key in all_available_keys:
|
||||
if isinstance(key, dict) and "kid" in key:
|
||||
available_kids.add(key["kid"].replace("-", "").lower())
|
||||
|
||||
required_kids = set(self._required_kids)
|
||||
missing_kids = required_kids - available_kids
|
||||
|
||||
if missing_kids:
|
||||
session["cached_keys"] = parsed_keys
|
||||
|
||||
if self.device_name in ["L1", "L2"]:
|
||||
license_request_data = {
|
||||
"scheme": self.device_name,
|
||||
"init_data": init_data,
|
||||
"get_cached_keys_if_exists": False,
|
||||
}
|
||||
if self.service_name:
|
||||
license_request_data["service"] = self.service_name
|
||||
if session["service_certificate"]:
|
||||
license_request_data["service_certificate"] = base64.b64encode(
|
||||
session["service_certificate"]
|
||||
).decode("utf-8")
|
||||
else:
|
||||
license_request_data = request_data.copy()
|
||||
license_request_data["get_cached_keys_if_exists"] = False
|
||||
|
||||
session["decrypt_labs_session_id"] = None
|
||||
session["challenge"] = None
|
||||
session["tried_cache"] = False
|
||||
|
||||
response = self._http_session.post(
|
||||
f"{self.host}/get-request", json=license_request_data, timeout=30
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if data.get("message") == "success" and "challenge" in data:
|
||||
challenge = base64.b64decode(data["challenge"])
|
||||
session["challenge"] = challenge
|
||||
session["decrypt_labs_session_id"] = data["session_id"]
|
||||
return challenge
|
||||
|
||||
return b""
|
||||
else:
|
||||
return b""
|
||||
else:
|
||||
return b""
|
||||
|
||||
if message_type == "license-request" or "challenge" in data:
|
||||
challenge = base64.b64decode(data["challenge"])
|
||||
session["challenge"] = challenge
|
||||
session["decrypt_labs_session_id"] = data["session_id"]
|
||||
return challenge
|
||||
|
||||
error_msg = f"Unexpected API response format. message_type={message_type}, available_fields={list(data.keys())}"
|
||||
if data.get("message"):
|
||||
error_msg = f"API response: {data['message']} - {error_msg}"
|
||||
if "details" in data:
|
||||
error_msg += f" - Details: {data['details']}"
|
||||
if "error" in data:
|
||||
error_msg += f" - Error: {data['error']}"
|
||||
|
||||
if already_tried_cache and data.get("message") == "success":
|
||||
return b""
|
||||
|
||||
raise requests.RequestException(error_msg)
|
||||
|
||||
def parse_license(self, session_id: bytes, license_message: Union[bytes, str]) -> None:
|
||||
"""
|
||||
Parse license response using Decrypt Labs API with intelligent key combination.
|
||||
|
||||
For PlayReady content with partial cached keys, this method intelligently
|
||||
combines the cached keys with newly obtained license keys, avoiding
|
||||
duplicates while ensuring all required keys are available.
|
||||
|
||||
The key combination process:
|
||||
1. Extracts keys from the license response
|
||||
2. If cached keys exist (PlayReady), combines them with license keys
|
||||
3. Removes duplicate keys by comparing normalized KIDs
|
||||
4. Updates the session with the complete key set
|
||||
|
||||
Args:
|
||||
session_id: Session identifier
|
||||
license_message: License response from license server
|
||||
|
||||
Raises:
|
||||
ValueError: If session ID is invalid or no challenge available
|
||||
requests.RequestException: If API request fails
|
||||
"""
|
||||
if session_id not in self._sessions:
|
||||
raise DecryptLabsRemoteCDMExceptions.InvalidSession(f"Invalid session ID: {session_id.hex()}")
|
||||
|
||||
session = self._sessions[session_id]
|
||||
|
||||
if session["keys"] and not (self.is_playready and "cached_keys" in session):
|
||||
return
|
||||
|
||||
if not session.get("challenge") or not session.get("decrypt_labs_session_id"):
|
||||
raise ValueError("No challenge available - call get_license_challenge first")
|
||||
|
||||
if isinstance(license_message, str):
|
||||
if self.is_playready and license_message.strip().startswith("<?xml"):
|
||||
license_message = license_message.encode("utf-8")
|
||||
else:
|
||||
try:
|
||||
license_message = base64.b64decode(license_message)
|
||||
except Exception:
|
||||
license_message = license_message.encode("utf-8")
|
||||
|
||||
pssh = session["pssh"]
|
||||
init_data = self._get_init_data_from_pssh(pssh)
|
||||
|
||||
license_request_b64 = base64.b64encode(session["challenge"]).decode("utf-8")
|
||||
license_response_b64 = base64.b64encode(license_message).decode("utf-8")
|
||||
|
||||
request_data = {
|
||||
"scheme": self.device_name,
|
||||
"session_id": session["decrypt_labs_session_id"],
|
||||
"init_data": init_data,
|
||||
"license_request": license_request_b64,
|
||||
"license_response": license_response_b64,
|
||||
}
|
||||
|
||||
response = self._http_session.post(f"{self.host}/decrypt-response", json=request_data, timeout=30)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise requests.RequestException(f"License decrypt failed: {response.status_code} {response.text}")
|
||||
|
||||
data = response.json()
|
||||
|
||||
if data.get("message") != "success":
|
||||
error_msg = data.get("message", "Unknown error")
|
||||
if "error" in data:
|
||||
error_msg += f" - Error: {data['error']}"
|
||||
if "details" in data:
|
||||
error_msg += f" - Details: {data['details']}"
|
||||
raise requests.RequestException(f"License decrypt error: {error_msg}")
|
||||
|
||||
license_keys = self._parse_keys_response(data)
|
||||
|
||||
all_keys = []
|
||||
|
||||
if "vault_keys" in session:
|
||||
all_keys.extend(session["vault_keys"])
|
||||
|
||||
if "cached_keys" in session:
|
||||
cached_keys = session.get("cached_keys", [])
|
||||
if cached_keys:
|
||||
for cached_key in cached_keys:
|
||||
all_keys.append(cached_key)
|
||||
|
||||
for license_key in license_keys:
|
||||
already_exists = False
|
||||
license_kid = None
|
||||
if isinstance(license_key, dict) and "kid" in license_key:
|
||||
license_kid = license_key["kid"].replace("-", "").lower()
|
||||
elif hasattr(license_key, "kid"):
|
||||
license_kid = str(license_key.kid).replace("-", "").lower()
|
||||
elif hasattr(license_key, "key_id"):
|
||||
license_kid = str(license_key.key_id).replace("-", "").lower()
|
||||
|
||||
if license_kid:
|
||||
for existing_key in all_keys:
|
||||
existing_kid = None
|
||||
if isinstance(existing_key, dict) and "kid" in existing_key:
|
||||
existing_kid = existing_key["kid"].replace("-", "").lower()
|
||||
elif hasattr(existing_key, "kid"):
|
||||
existing_kid = str(existing_key.kid).replace("-", "").lower()
|
||||
elif hasattr(existing_key, "key_id"):
|
||||
existing_kid = str(existing_key.key_id).replace("-", "").lower()
|
||||
|
||||
if existing_kid == license_kid:
|
||||
already_exists = True
|
||||
break
|
||||
|
||||
if not already_exists:
|
||||
all_keys.append(license_key)
|
||||
|
||||
session["keys"] = all_keys
|
||||
session.pop("cached_keys", None)
|
||||
session.pop("vault_keys", None)
|
||||
|
||||
if self.vaults and session["keys"]:
|
||||
key_dict = {}
|
||||
for key in session["keys"]:
|
||||
if key["type"] == "CONTENT":
|
||||
try:
|
||||
clean_kid = key["kid"].replace("-", "")
|
||||
if len(clean_kid) == 32:
|
||||
kid_uuid = UUID(hex=clean_kid)
|
||||
else:
|
||||
kid_uuid = UUID(hex=clean_kid.ljust(32, "0"))
|
||||
key_dict[kid_uuid] = key["key"]
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
if key_dict:
|
||||
self.vaults.add_keys(key_dict)
|
||||
|
||||
def get_keys(self, session_id: bytes, type_: Optional[str] = None) -> List[Key]:
|
||||
"""
|
||||
Get keys from the session.
|
||||
|
||||
Args:
|
||||
session_id: Session identifier
|
||||
type_: Optional key type filter (CONTENT, SIGNING, etc.)
|
||||
|
||||
Returns:
|
||||
List of Key objects
|
||||
|
||||
Raises:
|
||||
InvalidSession: If session ID is invalid
|
||||
"""
|
||||
if session_id not in self._sessions:
|
||||
raise DecryptLabsRemoteCDMExceptions.InvalidSession(f"Invalid session ID: {session_id.hex()}")
|
||||
|
||||
key_dicts = self._sessions[session_id]["keys"]
|
||||
keys = [Key(kid=k["kid"], key=k["key"], type_=k["type"]) for k in key_dicts]
|
||||
|
||||
if type_:
|
||||
keys = [key for key in keys if key.type == type_]
|
||||
|
||||
return keys
|
||||
|
||||
def _parse_cached_keys(self, cached_keys_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""Parse cached keys from API response.
|
||||
|
||||
Args:
|
||||
cached_keys_data: List of cached key objects from API
|
||||
|
||||
Returns:
|
||||
List of key dictionaries
|
||||
"""
|
||||
keys = []
|
||||
|
||||
try:
|
||||
if cached_keys_data and isinstance(cached_keys_data, list):
|
||||
for key_data in cached_keys_data:
|
||||
if "kid" in key_data and "key" in key_data:
|
||||
keys.append({"kid": key_data["kid"], "key": key_data["key"], "type": "CONTENT"})
|
||||
except Exception:
|
||||
pass
|
||||
return keys
|
||||
|
||||
def _parse_keys_response(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Parse keys from decrypt response."""
|
||||
keys = []
|
||||
|
||||
if "keys" in data and isinstance(data["keys"], str):
|
||||
keys_string = data["keys"]
|
||||
|
||||
for line in keys_string.split("\n"):
|
||||
line = line.strip()
|
||||
if line.startswith("--key "):
|
||||
key_part = line[6:]
|
||||
if ":" in key_part:
|
||||
kid, key = key_part.split(":", 1)
|
||||
keys.append({"kid": kid.strip(), "key": key.strip(), "type": "CONTENT"})
|
||||
elif "keys" in data and isinstance(data["keys"], list):
|
||||
for key_data in data["keys"]:
|
||||
keys.append(
|
||||
{"kid": key_data.get("kid"), "key": key_data.get("key"), "type": key_data.get("type", "CONTENT")}
|
||||
)
|
||||
|
||||
def get_keys(self, session_id: bytes, type_: Optional[Union[int, str]] = None) -> list[Key]:
|
||||
return self.keys[session_id]
|
||||
return keys
|
||||
|
||||
def session(self, url, data, retries=3):
|
||||
res = self.req_session.post(url, json=data).json()
|
||||
|
||||
if res.get("message") != "success":
|
||||
if "License Response Decryption Process Failed at the very beginning" in res.get("Error", ""):
|
||||
if retries > 0:
|
||||
return self.session(url, data, retries=retries - 1)
|
||||
else:
|
||||
raise ValueError(f"CDM API returned an error: {res['Error']}")
|
||||
else:
|
||||
raise ValueError(f"CDM API returned an error: {res['Error']}")
|
||||
|
||||
return res
|
||||
__all__ = ["DecryptLabsRemoteCDM"]
|
||||
|
||||
@@ -14,7 +14,7 @@ class Config:
|
||||
core_dir = Path(__file__).resolve().parent
|
||||
namespace_dir = core_dir.parent
|
||||
commands = namespace_dir / "commands"
|
||||
services = namespace_dir / "services"
|
||||
services = [namespace_dir / "services"]
|
||||
vaults = namespace_dir / "vaults"
|
||||
fonts = namespace_dir / "fonts"
|
||||
user_configs = core_dir.parent
|
||||
@@ -31,6 +31,7 @@ class Config:
|
||||
class _Filenames:
|
||||
# default filenames, do not modify here, set via config
|
||||
log = "unshackle_{name}_{time}.log" # Directories.logs
|
||||
debug_log = "unshackle_debug_{service}_{time}.jsonl" # Directories.logs
|
||||
config = "config.yaml" # Directories.services / tag
|
||||
root_config = "unshackle.yaml" # Directories.user_configs
|
||||
chapters = "Chapters_{title}_{random}.txt" # Directories.temp
|
||||
@@ -45,13 +46,17 @@ class Config:
|
||||
self.curl_impersonate: dict = kwargs.get("curl_impersonate") or {}
|
||||
self.remote_cdm: list[dict] = kwargs.get("remote_cdm") or []
|
||||
self.credentials: dict = kwargs.get("credentials") or {}
|
||||
self.subtitle: dict = kwargs.get("subtitle") or {}
|
||||
|
||||
self.directories = self._Directories()
|
||||
for name, path in (kwargs.get("directories") or {}).items():
|
||||
if name.lower() in ("app_dirs", "core_dir", "namespace_dir", "user_configs", "data"):
|
||||
# these must not be modified by the user
|
||||
continue
|
||||
setattr(self.directories, name, Path(path).expanduser())
|
||||
if name == "services" and isinstance(path, list):
|
||||
setattr(self.directories, name, [Path(p).expanduser() for p in path])
|
||||
else:
|
||||
setattr(self.directories, name, Path(path).expanduser())
|
||||
|
||||
downloader_cfg = kwargs.get("downloader") or "requests"
|
||||
if isinstance(downloader_cfg, dict):
|
||||
@@ -68,13 +73,37 @@ class Config:
|
||||
self.headers: dict = kwargs.get("headers") or {}
|
||||
self.key_vaults: list[dict[str, Any]] = kwargs.get("key_vaults", [])
|
||||
self.muxing: dict = kwargs.get("muxing") or {}
|
||||
self.nordvpn: dict = kwargs.get("nordvpn") or {}
|
||||
self.proxy_providers: dict = kwargs.get("proxy_providers") or {}
|
||||
self.serve: dict = kwargs.get("serve") or {}
|
||||
self.services: dict = kwargs.get("services") or {}
|
||||
decryption_cfg = kwargs.get("decryption") or {}
|
||||
if isinstance(decryption_cfg, dict):
|
||||
self.decryption_map = {k.upper(): v for k, v in decryption_cfg.items()}
|
||||
self.decryption = self.decryption_map.get("DEFAULT", "shaka")
|
||||
else:
|
||||
self.decryption_map = {}
|
||||
self.decryption = decryption_cfg or "shaka"
|
||||
|
||||
self.set_terminal_bg: bool = kwargs.get("set_terminal_bg", False)
|
||||
self.tag: str = kwargs.get("tag") or ""
|
||||
self.tag_group_name: bool = kwargs.get("tag_group_name", True)
|
||||
self.tag_imdb_tmdb: bool = kwargs.get("tag_imdb_tmdb", True)
|
||||
self.tmdb_api_key: str = kwargs.get("tmdb_api_key") or ""
|
||||
self.simkl_client_id: str = kwargs.get("simkl_client_id") or ""
|
||||
self.decrypt_labs_api_key: str = kwargs.get("decrypt_labs_api_key") or ""
|
||||
self.update_checks: bool = kwargs.get("update_checks", True)
|
||||
self.update_check_interval: int = kwargs.get("update_check_interval", 24)
|
||||
self.scene_naming: bool = kwargs.get("scene_naming", True)
|
||||
self.series_year: bool = kwargs.get("series_year", True)
|
||||
|
||||
self.title_cache_time: int = kwargs.get("title_cache_time", 1800) # 30 minutes default
|
||||
self.title_cache_max_retention: int = kwargs.get("title_cache_max_retention", 86400) # 24 hours default
|
||||
self.title_cache_enabled: bool = kwargs.get("title_cache_enabled", True)
|
||||
|
||||
self.debug: bool = kwargs.get("debug", False)
|
||||
self.debug_keys: bool = kwargs.get("debug_keys", False)
|
||||
|
||||
self.remote_services: list[dict] = kwargs.get("remote_services") or []
|
||||
|
||||
@classmethod
|
||||
def from_yaml(cls, path: Path) -> Config:
|
||||
@@ -91,8 +120,8 @@ POSSIBLE_CONFIG_PATHS = (
|
||||
Config._Directories.namespace_dir / Config._Filenames.root_config,
|
||||
# The Parent Folder to the unshackle Namespace Folder (e.g., %appdata%/Python/Python311/site-packages)
|
||||
Config._Directories.namespace_dir.parent / Config._Filenames.root_config,
|
||||
# The AppDirs User Config Folder (e.g., %localappdata%/unshackle)
|
||||
Config._Directories.user_configs / Config._Filenames.root_config,
|
||||
# The AppDirs User Config Folder (e.g., ~/.config/unshackle on Linux, %LOCALAPPDATA%\unshackle on Windows)
|
||||
Path(Config._Directories.app_dirs.user_config_dir) / Config._Filenames.root_config,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,9 @@ DOWNLOAD_LICENCE_ONLY = Event()
|
||||
|
||||
DRM_SORT_MAP = ["ClearKey", "Widevine"]
|
||||
LANGUAGE_MAX_DISTANCE = 5 # this is max to be considered "same", e.g., en, en-US, en-AU
|
||||
LANGUAGE_EXACT_DISTANCE = 0 # exact match only, no variants
|
||||
VIDEO_CODEC_MAP = {"AVC": "H.264", "HEVC": "H.265"}
|
||||
DYNAMIC_RANGE_MAP = {"HDR10": "HDR", "HDR10+": "HDR", "Dolby Vision": "DV"}
|
||||
DYNAMIC_RANGE_MAP = {"HDR10": "HDR", "HDR10+": "HDR10P", "Dolby Vision": "DV", "HDR10 / HDR10+": "HDR10P", "HDR10 / HDR10": "HDR"}
|
||||
AUDIO_CODEC_MAP = {"E-AC-3": "DDP", "AC-3": "DD"}
|
||||
|
||||
context_settings = dict(
|
||||
|
||||
@@ -76,6 +76,11 @@ def download(url: str, save_path: Path, session: Session, **kwargs: Any) -> Gene
|
||||
|
||||
try:
|
||||
content_length = int(stream.headers.get("Content-Length", "0"))
|
||||
|
||||
# Skip Content-Length validation for compressed responses since
|
||||
# curl_impersonate automatically decompresses but Content-Length shows compressed size
|
||||
if stream.headers.get("Content-Encoding", "").lower() in ["gzip", "deflate", "br"]:
|
||||
content_length = 0
|
||||
except ValueError:
|
||||
content_length = 0
|
||||
|
||||
|
||||
@@ -150,6 +150,7 @@ def download(
|
||||
|
||||
track_type = track.__class__.__name__
|
||||
thread_count = str(config.n_m3u8dl_re.get("thread_count", max_workers))
|
||||
retry_count = str(config.n_m3u8dl_re.get("retry_count", max_workers))
|
||||
ad_keyword = config.n_m3u8dl_re.get("ad_keyword")
|
||||
|
||||
arguments = [
|
||||
@@ -160,6 +161,8 @@ def download(
|
||||
output_dir,
|
||||
"--thread-count",
|
||||
thread_count,
|
||||
"--download-retry-count",
|
||||
retry_count,
|
||||
"--no-log",
|
||||
"--write-meta-json",
|
||||
"false",
|
||||
|
||||
@@ -90,6 +90,11 @@ def download(
|
||||
if not segmented:
|
||||
try:
|
||||
content_length = int(stream.headers.get("Content-Length", "0"))
|
||||
|
||||
# Skip Content-Length validation for compressed responses since
|
||||
# requests automatically decompresses but Content-Length shows compressed size
|
||||
if stream.headers.get("Content-Encoding", "").lower() in ["gzip", "deflate", "br"]:
|
||||
content_length = 0
|
||||
except ValueError:
|
||||
content_length = 0
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ from urllib.parse import urljoin
|
||||
|
||||
from Cryptodome.Cipher import AES
|
||||
from Cryptodome.Util.Padding import unpad
|
||||
from curl_cffi.requests import Session as CurlSession
|
||||
from m3u8.model import Key
|
||||
from requests import Session
|
||||
|
||||
@@ -69,8 +70,8 @@ class ClearKey:
|
||||
"""
|
||||
if not isinstance(m3u_key, Key):
|
||||
raise ValueError(f"Provided M3U Key is in an unexpected type {m3u_key!r}")
|
||||
if not isinstance(session, (Session, type(None))):
|
||||
raise TypeError(f"Expected session to be a {Session}, not a {type(session)}")
|
||||
if not isinstance(session, (Session, CurlSession, type(None))):
|
||||
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not a {type(session)}")
|
||||
|
||||
if not m3u_key.method.startswith("AES"):
|
||||
raise ValueError(f"Provided M3U Key is not an AES Clear Key, {m3u_key.method}")
|
||||
|
||||
@@ -39,17 +39,23 @@ class PlayReady:
|
||||
if not isinstance(pssh, PSSH):
|
||||
raise TypeError(f"Expected pssh to be a {PSSH}, not {pssh!r}")
|
||||
|
||||
kids: list[UUID] = []
|
||||
for header in pssh.wrm_headers:
|
||||
try:
|
||||
signed_ids, _, _, _ = header.read_attributes()
|
||||
except Exception:
|
||||
continue
|
||||
for signed_id in signed_ids:
|
||||
if pssh_b64:
|
||||
kids = self._extract_kids_from_pssh_b64(pssh_b64)
|
||||
else:
|
||||
kids = []
|
||||
|
||||
# Extract KIDs using pyplayready's method (may miss some KIDs)
|
||||
if not kids:
|
||||
for header in pssh.wrm_headers:
|
||||
try:
|
||||
kids.append(UUID(bytes_le=base64.b64decode(signed_id.value)))
|
||||
signed_ids, _, _, _ = header.read_attributes()
|
||||
except Exception:
|
||||
continue
|
||||
for signed_id in signed_ids:
|
||||
try:
|
||||
kids.append(UUID(bytes_le=base64.b64decode(signed_id.value)))
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if kid:
|
||||
if isinstance(kid, str):
|
||||
@@ -72,6 +78,66 @@ class PlayReady:
|
||||
if pssh_b64:
|
||||
self.data.setdefault("pssh_b64", pssh_b64)
|
||||
|
||||
def _extract_kids_from_pssh_b64(self, pssh_b64: str) -> list[UUID]:
|
||||
"""Extract all KIDs from base64-encoded PSSH data."""
|
||||
try:
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
# Decode the PSSH
|
||||
pssh_bytes = base64.b64decode(pssh_b64)
|
||||
|
||||
# Try to find XML in the PSSH data
|
||||
# PlayReady PSSH usually has XML embedded in it
|
||||
pssh_str = pssh_bytes.decode("utf-16le", errors="ignore")
|
||||
|
||||
# Find WRMHEADER
|
||||
xml_start = pssh_str.find("<WRMHEADER")
|
||||
if xml_start == -1:
|
||||
# Try UTF-8
|
||||
pssh_str = pssh_bytes.decode("utf-8", errors="ignore")
|
||||
xml_start = pssh_str.find("<WRMHEADER")
|
||||
|
||||
if xml_start != -1:
|
||||
clean_xml = pssh_str[xml_start:]
|
||||
xml_end = clean_xml.find("</WRMHEADER>") + len("</WRMHEADER>")
|
||||
clean_xml = clean_xml[:xml_end]
|
||||
|
||||
root = ET.fromstring(clean_xml)
|
||||
ns = {"pr": "http://schemas.microsoft.com/DRM/2007/03/PlayReadyHeader"}
|
||||
|
||||
kids = []
|
||||
|
||||
# Extract from CUSTOMATTRIBUTES/KIDS
|
||||
kid_elements = root.findall(".//pr:CUSTOMATTRIBUTES/pr:KIDS/pr:KID", ns)
|
||||
for kid_elem in kid_elements:
|
||||
value = kid_elem.get("VALUE")
|
||||
if value:
|
||||
try:
|
||||
kid_bytes = base64.b64decode(value + "==")
|
||||
kid_uuid = UUID(bytes_le=kid_bytes)
|
||||
kids.append(kid_uuid)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Also get individual KID
|
||||
individual_kids = root.findall(".//pr:DATA/pr:KID", ns)
|
||||
for kid_elem in individual_kids:
|
||||
if kid_elem.text:
|
||||
try:
|
||||
kid_bytes = base64.b64decode(kid_elem.text.strip() + "==")
|
||||
kid_uuid = UUID(bytes_le=kid_bytes)
|
||||
if kid_uuid not in kids:
|
||||
kids.append(kid_uuid)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return kids
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def from_track(cls, track: AnyTrack, session: Optional[Session] = None) -> PlayReady:
|
||||
if not session:
|
||||
@@ -158,43 +224,138 @@ class PlayReady:
|
||||
def kids(self) -> list[UUID]:
|
||||
return self._kids
|
||||
|
||||
def get_content_keys(self, cdm: PlayReadyCdm, certificate: Callable, licence: Callable) -> None:
|
||||
for kid in self.kids:
|
||||
if kid in self.content_keys:
|
||||
def _extract_keys_from_cdm(self, cdm: PlayReadyCdm, session_id: bytes) -> dict:
|
||||
"""Extract keys from CDM session with cross-library compatibility.
|
||||
|
||||
Args:
|
||||
cdm: CDM instance
|
||||
session_id: Session identifier
|
||||
|
||||
Returns:
|
||||
Dictionary mapping KID UUIDs to hex keys
|
||||
"""
|
||||
keys = {}
|
||||
for key in cdm.get_keys(session_id):
|
||||
if hasattr(key, "key_id"):
|
||||
kid = key.key_id
|
||||
elif hasattr(key, "kid"):
|
||||
kid = key.kid
|
||||
else:
|
||||
continue
|
||||
session_id = cdm.open()
|
||||
try:
|
||||
challenge = cdm.get_license_challenge(session_id, self.pssh.wrm_headers[0])
|
||||
license_res = licence(challenge=challenge)
|
||||
|
||||
if isinstance(license_res, bytes):
|
||||
license_str = license_res.decode(errors="ignore")
|
||||
else:
|
||||
license_str = str(license_res)
|
||||
if hasattr(key, "key") and hasattr(key.key, "hex"):
|
||||
key_hex = key.key.hex()
|
||||
elif hasattr(key, "key") and isinstance(key.key, bytes):
|
||||
key_hex = key.key.hex()
|
||||
elif hasattr(key, "key") and isinstance(key.key, str):
|
||||
key_hex = key.key
|
||||
else:
|
||||
continue
|
||||
|
||||
if "<License>" not in license_str:
|
||||
try:
|
||||
license_str = base64.b64decode(license_str + "===").decode()
|
||||
except Exception:
|
||||
pass
|
||||
keys[kid] = key_hex
|
||||
return keys
|
||||
|
||||
cdm.parse_license(session_id, license_str)
|
||||
keys = {key.key_id: key.key.hex() for key in cdm.get_keys(session_id)}
|
||||
self.content_keys.update(keys)
|
||||
finally:
|
||||
cdm.close(session_id)
|
||||
def get_content_keys(self, cdm: PlayReadyCdm, certificate: Callable, licence: Callable) -> None:
|
||||
session_id = cdm.open()
|
||||
try:
|
||||
if hasattr(cdm, "set_pssh_b64") and self.pssh_b64:
|
||||
cdm.set_pssh_b64(self.pssh_b64)
|
||||
|
||||
if hasattr(cdm, "set_required_kids"):
|
||||
cdm.set_required_kids(self.kids)
|
||||
|
||||
challenge = cdm.get_license_challenge(session_id, self.pssh.wrm_headers[0])
|
||||
|
||||
if challenge:
|
||||
try:
|
||||
license_res = licence(challenge=challenge)
|
||||
if isinstance(license_res, bytes):
|
||||
license_str = license_res.decode(errors="ignore")
|
||||
else:
|
||||
license_str = str(license_res)
|
||||
|
||||
if "<License>" not in license_str:
|
||||
try:
|
||||
license_str = base64.b64decode(license_str + "===").decode()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
cdm.parse_license(session_id, license_str)
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
keys = self._extract_keys_from_cdm(cdm, session_id)
|
||||
self.content_keys.update(keys)
|
||||
finally:
|
||||
cdm.close(session_id)
|
||||
|
||||
if not self.content_keys:
|
||||
raise PlayReady.Exceptions.EmptyLicense("No Content Keys were within the License")
|
||||
|
||||
def decrypt(self, path: Path) -> None:
|
||||
"""
|
||||
Decrypt a Track with PlayReady DRM.
|
||||
Args:
|
||||
path: Path to the encrypted file to decrypt
|
||||
Raises:
|
||||
EnvironmentError if the required decryption executable could not be found.
|
||||
ValueError if the track has not yet been downloaded.
|
||||
SubprocessError if the decryption process returned a non-zero exit code.
|
||||
"""
|
||||
if not self.content_keys:
|
||||
raise ValueError("Cannot decrypt a Track without any Content Keys...")
|
||||
if not binaries.ShakaPackager:
|
||||
raise EnvironmentError("Shaka Packager executable not found but is required.")
|
||||
|
||||
if not path or not path.exists():
|
||||
raise ValueError("Tried to decrypt a file that does not exist.")
|
||||
|
||||
decrypter = str(getattr(config, "decryption", "")).lower()
|
||||
|
||||
if decrypter == "mp4decrypt":
|
||||
return self._decrypt_with_mp4decrypt(path)
|
||||
else:
|
||||
return self._decrypt_with_shaka_packager(path)
|
||||
|
||||
def _decrypt_with_mp4decrypt(self, path: Path) -> None:
|
||||
"""Decrypt using mp4decrypt"""
|
||||
if not binaries.Mp4decrypt:
|
||||
raise EnvironmentError("mp4decrypt executable not found but is required.")
|
||||
|
||||
output_path = path.with_stem(f"{path.stem}_decrypted")
|
||||
|
||||
# Build key arguments
|
||||
key_args = []
|
||||
for kid, key in self.content_keys.items():
|
||||
kid_hex = kid.hex if hasattr(kid, "hex") else str(kid).replace("-", "")
|
||||
key_hex = key if isinstance(key, str) else key.hex()
|
||||
key_args.extend(["--key", f"{kid_hex}:{key_hex}"])
|
||||
|
||||
cmd = [
|
||||
str(binaries.Mp4decrypt),
|
||||
"--show-progress",
|
||||
*key_args,
|
||||
str(path),
|
||||
str(output_path),
|
||||
]
|
||||
|
||||
try:
|
||||
subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_msg = e.stderr if e.stderr else f"mp4decrypt failed with exit code {e.returncode}"
|
||||
raise subprocess.CalledProcessError(e.returncode, cmd, output=e.stdout, stderr=error_msg)
|
||||
|
||||
if not output_path.exists():
|
||||
raise RuntimeError(f"mp4decrypt failed: output file {output_path} was not created")
|
||||
if output_path.stat().st_size == 0:
|
||||
raise RuntimeError(f"mp4decrypt failed: output file {output_path} is empty")
|
||||
|
||||
path.unlink()
|
||||
shutil.move(output_path, path)
|
||||
|
||||
def _decrypt_with_shaka_packager(self, path: Path) -> None:
|
||||
"""Decrypt using Shaka Packager (original method)"""
|
||||
if not binaries.ShakaPackager:
|
||||
raise EnvironmentError("Shaka Packager executable not found but is required.")
|
||||
|
||||
output_path = path.with_stem(f"{path.stem}_decrypted")
|
||||
config.directories.temp.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
@@ -185,7 +185,15 @@ class Widevine:
|
||||
if cert and hasattr(cdm, "set_service_certificate"):
|
||||
cdm.set_service_certificate(session_id, cert)
|
||||
|
||||
cdm.parse_license(session_id, licence(challenge=cdm.get_license_challenge(session_id, self.pssh)))
|
||||
if hasattr(cdm, "set_required_kids"):
|
||||
cdm.set_required_kids(self.kids)
|
||||
|
||||
challenge = cdm.get_license_challenge(session_id, self.pssh)
|
||||
|
||||
if hasattr(cdm, "has_cached_keys") and cdm.has_cached_keys(session_id):
|
||||
pass
|
||||
else:
|
||||
cdm.parse_license(session_id, licence(challenge=challenge))
|
||||
|
||||
self.content_keys = {key.kid: key.key.hex() for key in cdm.get_keys(session_id, "CONTENT")}
|
||||
if not self.content_keys:
|
||||
@@ -213,10 +221,18 @@ class Widevine:
|
||||
if cert and hasattr(cdm, "set_service_certificate"):
|
||||
cdm.set_service_certificate(session_id, cert)
|
||||
|
||||
cdm.parse_license(
|
||||
session_id,
|
||||
licence(session_id=session_id, challenge=cdm.get_license_challenge(session_id, self.pssh)),
|
||||
)
|
||||
if hasattr(cdm, "set_required_kids"):
|
||||
cdm.set_required_kids(self.kids)
|
||||
|
||||
challenge = cdm.get_license_challenge(session_id, self.pssh)
|
||||
|
||||
if hasattr(cdm, "has_cached_keys") and cdm.has_cached_keys(session_id):
|
||||
pass
|
||||
else:
|
||||
cdm.parse_license(
|
||||
session_id,
|
||||
licence(session_id=session_id, challenge=challenge),
|
||||
)
|
||||
|
||||
self.content_keys = {key.kid: key.key.hex() for key in cdm.get_keys(session_id, "CONTENT")}
|
||||
if not self.content_keys:
|
||||
@@ -230,19 +246,67 @@ class Widevine:
|
||||
def decrypt(self, path: Path) -> None:
|
||||
"""
|
||||
Decrypt a Track with Widevine DRM.
|
||||
Args:
|
||||
path: Path to the encrypted file to decrypt
|
||||
Raises:
|
||||
EnvironmentError if the Shaka Packager executable could not be found.
|
||||
EnvironmentError if the required decryption executable could not be found.
|
||||
ValueError if the track has not yet been downloaded.
|
||||
SubprocessError if Shaka Packager returned a non-zero exit code.
|
||||
SubprocessError if the decryption process returned a non-zero exit code.
|
||||
"""
|
||||
if not self.content_keys:
|
||||
raise ValueError("Cannot decrypt a Track without any Content Keys...")
|
||||
|
||||
if not binaries.ShakaPackager:
|
||||
raise EnvironmentError("Shaka Packager executable not found but is required.")
|
||||
if not path or not path.exists():
|
||||
raise ValueError("Tried to decrypt a file that does not exist.")
|
||||
|
||||
decrypter = str(getattr(config, "decryption", "")).lower()
|
||||
|
||||
if decrypter == "mp4decrypt":
|
||||
return self._decrypt_with_mp4decrypt(path)
|
||||
else:
|
||||
return self._decrypt_with_shaka_packager(path)
|
||||
|
||||
def _decrypt_with_mp4decrypt(self, path: Path) -> None:
|
||||
"""Decrypt using mp4decrypt"""
|
||||
if not binaries.Mp4decrypt:
|
||||
raise EnvironmentError("mp4decrypt executable not found but is required.")
|
||||
|
||||
output_path = path.with_stem(f"{path.stem}_decrypted")
|
||||
|
||||
# Build key arguments
|
||||
key_args = []
|
||||
for kid, key in self.content_keys.items():
|
||||
kid_hex = kid.hex if hasattr(kid, "hex") else str(kid).replace("-", "")
|
||||
key_hex = key if isinstance(key, str) else key.hex()
|
||||
key_args.extend(["--key", f"{kid_hex}:{key_hex}"])
|
||||
|
||||
cmd = [
|
||||
str(binaries.Mp4decrypt),
|
||||
"--show-progress",
|
||||
*key_args,
|
||||
str(path),
|
||||
str(output_path),
|
||||
]
|
||||
|
||||
try:
|
||||
subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_msg = e.stderr if e.stderr else f"mp4decrypt failed with exit code {e.returncode}"
|
||||
raise subprocess.CalledProcessError(e.returncode, cmd, output=e.stdout, stderr=error_msg)
|
||||
|
||||
if not output_path.exists():
|
||||
raise RuntimeError(f"mp4decrypt failed: output file {output_path} was not created")
|
||||
if output_path.stat().st_size == 0:
|
||||
raise RuntimeError(f"mp4decrypt failed: output file {output_path} is empty")
|
||||
|
||||
path.unlink()
|
||||
shutil.move(output_path, path)
|
||||
|
||||
def _decrypt_with_shaka_packager(self, path: Path) -> None:
|
||||
"""Decrypt using Shaka Packager (original method)"""
|
||||
if not binaries.ShakaPackager:
|
||||
raise EnvironmentError("Shaka Packager executable not found but is required.")
|
||||
|
||||
output_path = path.with_stem(f"{path.stem}_decrypted")
|
||||
config.directories.temp.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from uuid import UUID
|
||||
from zlib import crc32
|
||||
|
||||
import requests
|
||||
from curl_cffi.requests import Session as CurlSession
|
||||
from langcodes import Language, tag_is_valid
|
||||
from lxml.etree import Element, ElementTree
|
||||
from pyplayready.system.pssh import PSSH as PR_PSSH
|
||||
@@ -47,7 +48,7 @@ class DASH:
|
||||
self.url = url
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, url: str, session: Optional[Session] = None, **args: Any) -> DASH:
|
||||
def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **args: Any) -> DASH:
|
||||
if not url:
|
||||
raise requests.URLRequired("DASH manifest URL must be provided for relative path computations.")
|
||||
if not isinstance(url, str):
|
||||
@@ -55,8 +56,8 @@ class DASH:
|
||||
|
||||
if not session:
|
||||
session = Session()
|
||||
elif not isinstance(session, Session):
|
||||
raise TypeError(f"Expected session to be a {Session}, not {session!r}")
|
||||
elif not isinstance(session, (Session, CurlSession)):
|
||||
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
|
||||
|
||||
res = session.get(url, **args)
|
||||
if res.url != url:
|
||||
@@ -103,6 +104,10 @@ class DASH:
|
||||
continue
|
||||
if next(iter(period.xpath("SegmentType/@value")), "content") != "content":
|
||||
continue
|
||||
if "urn:amazon:primevideo:cachingBreadth" in [
|
||||
x.get("schemeIdUri") for x in period.findall("SupplementalProperty")
|
||||
]:
|
||||
continue
|
||||
|
||||
for adaptation_set in period.findall("AdaptationSet"):
|
||||
if self.is_trick_mode(adaptation_set):
|
||||
@@ -248,8 +253,8 @@ class DASH:
|
||||
):
|
||||
if not session:
|
||||
session = Session()
|
||||
elif not isinstance(session, Session):
|
||||
raise TypeError(f"Expected session to be a {Session}, not {session!r}")
|
||||
elif not isinstance(session, (Session, CurlSession)):
|
||||
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
|
||||
|
||||
if proxy:
|
||||
session.proxies.update({"all": proxy})
|
||||
|
||||
@@ -4,6 +4,7 @@ import base64
|
||||
import html
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -13,9 +14,10 @@ from typing import Any, Callable, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
from zlib import crc32
|
||||
|
||||
import httpx
|
||||
import m3u8
|
||||
import requests
|
||||
from curl_cffi.requests import Response as CurlResponse
|
||||
from curl_cffi.requests import Session as CurlSession
|
||||
from langcodes import Language, tag_is_valid
|
||||
from m3u8 import M3U8
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
@@ -34,7 +36,7 @@ from unshackle.core.utilities import get_extension, is_close_match, try_ensure_u
|
||||
|
||||
|
||||
class HLS:
|
||||
def __init__(self, manifest: M3U8, session: Optional[Union[Session, httpx.Client]] = None):
|
||||
def __init__(self, manifest: M3U8, session: Optional[Union[Session, CurlSession]] = None):
|
||||
if not manifest:
|
||||
raise ValueError("HLS manifest must be provided.")
|
||||
if not isinstance(manifest, M3U8):
|
||||
@@ -46,7 +48,7 @@ class HLS:
|
||||
self.session = session or Session()
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, url: str, session: Optional[Union[Session, httpx.Client]] = None, **args: Any) -> HLS:
|
||||
def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **args: Any) -> HLS:
|
||||
if not url:
|
||||
raise requests.URLRequired("HLS manifest URL must be provided.")
|
||||
if not isinstance(url, str):
|
||||
@@ -54,22 +56,22 @@ class HLS:
|
||||
|
||||
if not session:
|
||||
session = Session()
|
||||
elif not isinstance(session, (Session, httpx.Client)):
|
||||
raise TypeError(f"Expected session to be a {Session} or {httpx.Client}, not {session!r}")
|
||||
elif not isinstance(session, (Session, CurlSession)):
|
||||
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
|
||||
|
||||
res = session.get(url, **args)
|
||||
|
||||
# Handle both requests and httpx response objects
|
||||
# Handle requests and curl_cffi response objects
|
||||
if isinstance(res, requests.Response):
|
||||
if not res.ok:
|
||||
raise requests.ConnectionError("Failed to request the M3U(8) document.", response=res)
|
||||
content = res.text
|
||||
elif isinstance(res, httpx.Response):
|
||||
if res.status_code >= 400:
|
||||
elif isinstance(res, CurlResponse):
|
||||
if not res.ok:
|
||||
raise requests.ConnectionError("Failed to request the M3U(8) document.", response=res)
|
||||
content = res.text
|
||||
else:
|
||||
raise TypeError(f"Expected response to be a requests.Response or httpx.Response, not {type(res)}")
|
||||
raise TypeError(f"Expected response to be a requests.Response or curl_cffi.Response, not {type(res)}")
|
||||
|
||||
master = m3u8.loads(content, uri=url)
|
||||
|
||||
@@ -228,7 +230,7 @@ class HLS:
|
||||
save_path: Path,
|
||||
save_dir: Path,
|
||||
progress: partial,
|
||||
session: Optional[Union[Session, httpx.Client]] = None,
|
||||
session: Optional[Union[Session, CurlSession]] = None,
|
||||
proxy: Optional[str] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
license_widevine: Optional[Callable] = None,
|
||||
@@ -237,15 +239,13 @@ class HLS:
|
||||
) -> None:
|
||||
if not session:
|
||||
session = Session()
|
||||
elif not isinstance(session, (Session, httpx.Client)):
|
||||
raise TypeError(f"Expected session to be a {Session} or {httpx.Client}, not {session!r}")
|
||||
elif not isinstance(session, (Session, CurlSession)):
|
||||
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
|
||||
|
||||
if proxy:
|
||||
# Handle proxies differently based on session type
|
||||
if isinstance(session, Session):
|
||||
session.proxies.update({"all": proxy})
|
||||
elif isinstance(session, httpx.Client):
|
||||
session.proxies = {"http://": proxy, "https://": proxy}
|
||||
|
||||
log = logging.getLogger("HLS")
|
||||
|
||||
@@ -256,13 +256,8 @@ class HLS:
|
||||
log.error(f"Failed to request the invariant M3U8 playlist: {response.status_code}")
|
||||
sys.exit(1)
|
||||
playlist_text = response.text
|
||||
elif isinstance(response, httpx.Response):
|
||||
if response.status_code >= 400:
|
||||
log.error(f"Failed to request the invariant M3U8 playlist: {response.status_code}")
|
||||
sys.exit(1)
|
||||
playlist_text = response.text
|
||||
else:
|
||||
raise TypeError(f"Expected response to be a requests.Response or httpx.Response, not {type(response)}")
|
||||
raise TypeError(f"Expected response to be a requests.Response or curl_cffi.Response, not {type(response)}")
|
||||
|
||||
master = m3u8.loads(playlist_text, uri=track.url)
|
||||
|
||||
@@ -444,7 +439,7 @@ class HLS:
|
||||
elif len(files) != range_len:
|
||||
raise ValueError(f"Missing {range_len - len(files)} segment files for {segment_range}...")
|
||||
|
||||
if isinstance(drm, Widevine):
|
||||
if isinstance(drm, (Widevine, PlayReady)):
|
||||
# with widevine we can merge all segments and decrypt once
|
||||
merge(to=merged_path, via=files, delete=True, include_map_data=True)
|
||||
drm.decrypt(merged_path)
|
||||
@@ -532,13 +527,9 @@ class HLS:
|
||||
if isinstance(res, requests.Response):
|
||||
res.raise_for_status()
|
||||
init_content = res.content
|
||||
elif isinstance(res, httpx.Response):
|
||||
if res.status_code >= 400:
|
||||
raise requests.HTTPError(f"HTTP Error: {res.status_code}", response=res)
|
||||
init_content = res.content
|
||||
else:
|
||||
raise TypeError(
|
||||
f"Expected response to be requests.Response or httpx.Response, not {type(res)}"
|
||||
f"Expected response to be requests.Response or curl_cffi.Response, not {type(res)}"
|
||||
)
|
||||
|
||||
map_data = (segment.init_section, init_content)
|
||||
@@ -584,11 +575,24 @@ class HLS:
|
||||
if DOWNLOAD_LICENCE_ONLY.is_set():
|
||||
return
|
||||
|
||||
if segment_save_dir.exists():
|
||||
segment_save_dir.rmdir()
|
||||
def find_segments_recursively(directory: Path) -> list[Path]:
|
||||
"""Find all segment files recursively in any directory structure created by downloaders."""
|
||||
segments = []
|
||||
|
||||
# First check direct files in the directory
|
||||
if directory.exists():
|
||||
segments.extend([x for x in directory.iterdir() if x.is_file()])
|
||||
|
||||
# If no direct files, recursively search subdirectories
|
||||
if not segments:
|
||||
for subdir in directory.iterdir():
|
||||
if subdir.is_dir():
|
||||
segments.extend(find_segments_recursively(subdir))
|
||||
|
||||
return sorted(segments)
|
||||
|
||||
# finally merge all the discontinuity save files together to the final path
|
||||
segments_to_merge = [x for x in sorted(save_dir.iterdir()) if x.is_file()]
|
||||
segments_to_merge = find_segments_recursively(save_dir)
|
||||
if len(segments_to_merge) == 1:
|
||||
shutil.move(segments_to_merge[0], save_path)
|
||||
else:
|
||||
@@ -601,9 +605,16 @@ class HLS:
|
||||
discontinuity_data = discontinuity_file.read_bytes()
|
||||
f.write(discontinuity_data)
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
discontinuity_file.unlink()
|
||||
|
||||
save_dir.rmdir()
|
||||
# Clean up empty segment directory
|
||||
if save_dir.exists() and save_dir.name.endswith("_segments"):
|
||||
try:
|
||||
save_dir.rmdir()
|
||||
except OSError:
|
||||
# Directory might not be empty, try removing recursively
|
||||
shutil.rmtree(save_dir, ignore_errors=True)
|
||||
|
||||
progress(downloaded="Downloaded")
|
||||
|
||||
@@ -613,45 +624,80 @@ class HLS:
|
||||
@staticmethod
|
||||
def merge_segments(segments: list[Path], save_path: Path) -> int:
|
||||
"""
|
||||
Concatenate Segments by first demuxing with FFmpeg.
|
||||
Concatenate Segments using FFmpeg concat with binary fallback.
|
||||
|
||||
Returns the file size of the merged file.
|
||||
"""
|
||||
if not binaries.FFMPEG:
|
||||
raise EnvironmentError("FFmpeg executable was not found but is required to merge HLS segments.")
|
||||
|
||||
demuxer_file = segments[0].parent / "ffmpeg_concat_demuxer.txt"
|
||||
demuxer_file.write_text("\n".join([f"file '{segment}'" for segment in segments]))
|
||||
|
||||
subprocess.check_call(
|
||||
[
|
||||
binaries.FFMPEG,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"panic",
|
||||
"-f",
|
||||
"concat",
|
||||
"-safe",
|
||||
"0",
|
||||
"-i",
|
||||
demuxer_file,
|
||||
"-map",
|
||||
"0",
|
||||
"-c",
|
||||
"copy",
|
||||
save_path,
|
||||
]
|
||||
)
|
||||
demuxer_file.unlink()
|
||||
|
||||
# Track segment directories for cleanup
|
||||
segment_dirs = set()
|
||||
for segment in segments:
|
||||
segment.unlink()
|
||||
# Track all parent directories that contain segments
|
||||
current_dir = segment.parent
|
||||
while current_dir.name and "_segments" in str(current_dir):
|
||||
segment_dirs.add(current_dir)
|
||||
current_dir = current_dir.parent
|
||||
|
||||
def cleanup_segments_and_dirs():
|
||||
"""Clean up segments and directories after successful merge."""
|
||||
for segment in segments:
|
||||
segment.unlink(missing_ok=True)
|
||||
for segment_dir in segment_dirs:
|
||||
if segment_dir.exists():
|
||||
try:
|
||||
shutil.rmtree(segment_dir)
|
||||
except OSError:
|
||||
pass # Directory cleanup failed, but merge succeeded
|
||||
|
||||
# Try FFmpeg concat first (preferred method)
|
||||
if binaries.FFMPEG:
|
||||
try:
|
||||
demuxer_file = save_path.parent / f"ffmpeg_concat_demuxer_{save_path.stem}.txt"
|
||||
demuxer_file.write_text("\n".join([f"file '{segment.absolute()}'" for segment in segments]))
|
||||
|
||||
subprocess.check_call(
|
||||
[
|
||||
binaries.FFMPEG,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"error",
|
||||
"-f",
|
||||
"concat",
|
||||
"-safe",
|
||||
"0",
|
||||
"-i",
|
||||
demuxer_file,
|
||||
"-map",
|
||||
"0",
|
||||
"-c",
|
||||
"copy",
|
||||
save_path,
|
||||
],
|
||||
timeout=300, # 5 minute timeout
|
||||
)
|
||||
demuxer_file.unlink(missing_ok=True)
|
||||
cleanup_segments_and_dirs()
|
||||
return save_path.stat().st_size
|
||||
|
||||
except (subprocess.CalledProcessError, subprocess.TimeoutExpired, OSError) as e:
|
||||
# FFmpeg failed, clean up demuxer file and fall back to binary concat
|
||||
logging.getLogger("HLS").debug(f"FFmpeg concat failed ({e}), falling back to binary concatenation")
|
||||
demuxer_file.unlink(missing_ok=True)
|
||||
# Remove partial output file if it exists
|
||||
save_path.unlink(missing_ok=True)
|
||||
|
||||
# Fallback: Binary concatenation
|
||||
logging.getLogger("HLS").debug(f"Using binary concatenation for {len(segments)} segments")
|
||||
with open(save_path, "wb") as output_file:
|
||||
for segment in segments:
|
||||
with open(segment, "rb") as segment_file:
|
||||
output_file.write(segment_file.read())
|
||||
|
||||
cleanup_segments_and_dirs()
|
||||
return save_path.stat().st_size
|
||||
|
||||
@staticmethod
|
||||
def parse_session_data_keys(
|
||||
manifest: M3U8, session: Optional[Union[Session, httpx.Client]] = None
|
||||
manifest: M3U8, session: Optional[Union[Session, CurlSession]] = None
|
||||
) -> list[m3u8.model.Key]:
|
||||
"""Parse `com.apple.hls.keys` session data and return Key objects."""
|
||||
keys: list[m3u8.model.Key] = []
|
||||
@@ -742,7 +788,8 @@ class HLS:
|
||||
|
||||
@staticmethod
|
||||
def get_drm(
|
||||
key: Union[m3u8.model.SessionKey, m3u8.model.Key], session: Optional[Union[Session, httpx.Client]] = None
|
||||
key: Union[m3u8.model.SessionKey, m3u8.model.Key],
|
||||
session: Optional[Union[Session, CurlSession]] = None,
|
||||
) -> DRM_T:
|
||||
"""
|
||||
Convert HLS EXT-X-KEY data to an initialized DRM object.
|
||||
@@ -754,8 +801,8 @@ class HLS:
|
||||
|
||||
Raises a NotImplementedError if the key system is not supported.
|
||||
"""
|
||||
if not isinstance(session, (Session, httpx.Client, type(None))):
|
||||
raise TypeError(f"Expected session to be a {Session} or {httpx.Client}, not {type(session)}")
|
||||
if not isinstance(session, (Session, CurlSession, type(None))):
|
||||
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {type(session)}")
|
||||
if not session:
|
||||
session = Session()
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from pathlib import Path
|
||||
from typing import Any, Callable, Optional, Union
|
||||
|
||||
import requests
|
||||
from curl_cffi.requests import Session as CurlSession
|
||||
from langcodes import Language, tag_is_valid
|
||||
from lxml.etree import Element
|
||||
from pyplayready.system.pssh import PSSH as PR_PSSH
|
||||
@@ -34,11 +35,13 @@ class ISM:
|
||||
self.url = url
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, url: str, session: Optional[Session] = None, **kwargs: Any) -> "ISM":
|
||||
def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **kwargs: Any) -> "ISM":
|
||||
if not url:
|
||||
raise requests.URLRequired("ISM manifest URL must be provided")
|
||||
if not session:
|
||||
session = Session()
|
||||
elif not isinstance(session, (Session, CurlSession)):
|
||||
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}")
|
||||
res = session.get(url, **kwargs)
|
||||
if res.url != url:
|
||||
url = res.url
|
||||
|
||||
@@ -4,15 +4,10 @@ from __future__ import annotations
|
||||
|
||||
from typing import Optional, Union
|
||||
|
||||
import httpx
|
||||
import m3u8
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
from pyplayready.system.pssh import PSSH as PR_PSSH
|
||||
from pywidevine.cdm import Cdm as WidevineCdm
|
||||
from pywidevine.pssh import PSSH as WV_PSSH
|
||||
from curl_cffi.requests import Session as CurlSession
|
||||
from requests import Session
|
||||
|
||||
from unshackle.core.drm import PlayReady, Widevine
|
||||
from unshackle.core.manifests.hls import HLS
|
||||
from unshackle.core.tracks import Tracks
|
||||
|
||||
@@ -21,54 +16,17 @@ def parse(
|
||||
master: m3u8.M3U8,
|
||||
language: str,
|
||||
*,
|
||||
session: Optional[Union[Session, httpx.Client]] = None,
|
||||
session: Optional[Union[Session, CurlSession]] = None,
|
||||
) -> Tracks:
|
||||
"""Parse a variant playlist to ``Tracks`` with DRM information."""
|
||||
"""Parse a variant playlist to ``Tracks`` with basic information, defer DRM loading."""
|
||||
tracks = HLS(master, session=session).to_tracks(language)
|
||||
|
||||
need_wv = not any(isinstance(d, Widevine) for t in tracks for d in (t.drm or []))
|
||||
need_pr = not any(isinstance(d, PlayReady) for t in tracks for d in (t.drm or []))
|
||||
bool(master.session_keys or HLS.parse_session_data_keys(master, session or Session()))
|
||||
|
||||
if (need_wv or need_pr) and tracks.videos:
|
||||
if not session:
|
||||
session = Session()
|
||||
|
||||
session_keys = list(master.session_keys or [])
|
||||
session_keys.extend(HLS.parse_session_data_keys(master, session))
|
||||
|
||||
for drm_obj in HLS.get_all_drm(session_keys):
|
||||
if need_wv and isinstance(drm_obj, Widevine):
|
||||
for t in tracks.videos + tracks.audio:
|
||||
t.drm = [d for d in (t.drm or []) if not isinstance(d, Widevine)] + [drm_obj]
|
||||
need_wv = False
|
||||
elif need_pr and isinstance(drm_obj, PlayReady):
|
||||
for t in tracks.videos + tracks.audio:
|
||||
t.drm = [d for d in (t.drm or []) if not isinstance(d, PlayReady)] + [drm_obj]
|
||||
need_pr = False
|
||||
if not need_wv and not need_pr:
|
||||
break
|
||||
|
||||
if (need_wv or need_pr) and tracks.videos:
|
||||
first_video = tracks.videos[0]
|
||||
playlist = m3u8.load(first_video.url)
|
||||
for key in playlist.keys or []:
|
||||
if not key or not key.keyformat:
|
||||
continue
|
||||
fmt = key.keyformat.lower()
|
||||
if need_wv and fmt == WidevineCdm.urn:
|
||||
pssh_b64 = key.uri.split(",")[-1]
|
||||
drm = Widevine(pssh=WV_PSSH(pssh_b64))
|
||||
for t in tracks.videos + tracks.audio:
|
||||
t.drm = [d for d in (t.drm or []) if not isinstance(d, Widevine)] + [drm]
|
||||
need_wv = False
|
||||
elif need_pr and (fmt == PlayReadyCdm or "com.microsoft.playready" in fmt):
|
||||
pssh_b64 = key.uri.split(",")[-1]
|
||||
drm = PlayReady(pssh=PR_PSSH(pssh_b64), pssh_b64=pssh_b64)
|
||||
for t in tracks.videos + tracks.audio:
|
||||
t.drm = [d for d in (t.drm or []) if not isinstance(d, PlayReady)] + [drm]
|
||||
need_pr = False
|
||||
if not need_wv and not need_pr:
|
||||
break
|
||||
if True:
|
||||
for t in tracks.videos + tracks.audio:
|
||||
t.needs_drm_loading = True
|
||||
t.session = session
|
||||
|
||||
return tracks
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from .basic import Basic
|
||||
from .hola import Hola
|
||||
from .nordvpn import NordVPN
|
||||
from .surfsharkvpn import SurfsharkVPN
|
||||
from .windscribevpn import WindscribeVPN
|
||||
|
||||
__all__ = ("Basic", "Hola", "NordVPN")
|
||||
__all__ = ("Basic", "Hola", "NordVPN", "SurfsharkVPN", "WindscribeVPN")
|
||||
|
||||
124
unshackle/core/proxies/surfsharkvpn.py
Normal file
124
unshackle/core/proxies/surfsharkvpn.py
Normal file
@@ -0,0 +1,124 @@
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
from unshackle.core.proxies.proxy import Proxy
|
||||
|
||||
|
||||
class SurfsharkVPN(Proxy):
|
||||
def __init__(self, username: str, password: str, server_map: Optional[dict[str, int]] = None):
|
||||
"""
|
||||
Proxy Service using SurfsharkVPN Service Credentials.
|
||||
|
||||
A username and password must be provided. These are Service Credentials, not your Login Credentials.
|
||||
The Service Credentials can be found here: https://my.surfshark.com/vpn/manual-setup/main/openvpn
|
||||
"""
|
||||
if not username:
|
||||
raise ValueError("No Username was provided to the SurfsharkVPN Proxy Service.")
|
||||
if not password:
|
||||
raise ValueError("No Password was provided to the SurfsharkVPN Proxy Service.")
|
||||
if not re.match(r"^[a-z0-9]{48}$", username + password, re.IGNORECASE) or "@" in username:
|
||||
raise ValueError(
|
||||
"The Username and Password must be SurfsharkVPN Service Credentials, not your Login Credentials. "
|
||||
"The Service Credentials can be found here: https://my.surfshark.com/vpn/manual-setup/main/openvpn"
|
||||
)
|
||||
|
||||
if server_map is not None and not isinstance(server_map, dict):
|
||||
raise TypeError(f"Expected server_map to be a dict mapping a region to a server ID, not '{server_map!r}'.")
|
||||
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.server_map = server_map or {}
|
||||
|
||||
self.countries = self.get_countries()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
countries = len(set(x.get("country") for x in self.countries if x.get("country")))
|
||||
servers = sum(1 for x in self.countries if x.get("connectionName"))
|
||||
|
||||
return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})"
|
||||
|
||||
def get_proxy(self, query: str) -> Optional[str]:
|
||||
"""
|
||||
Get an HTTP(SSL) proxy URI for a SurfsharkVPN server.
|
||||
"""
|
||||
query = query.lower()
|
||||
if re.match(r"^[a-z]{2}\d+$", query):
|
||||
# country and surfsharkvpn server id, e.g., au-per, be-anr, us-bos
|
||||
hostname = f"{query}.prod.surfshark.com"
|
||||
else:
|
||||
if query.isdigit():
|
||||
# country id
|
||||
country = self.get_country(by_id=int(query))
|
||||
elif re.match(r"^[a-z]+$", query):
|
||||
# country code
|
||||
country = self.get_country(by_code=query)
|
||||
else:
|
||||
raise ValueError(f"The query provided is unsupported and unrecognized: {query}")
|
||||
if not country:
|
||||
# SurfsharkVPN doesnt have servers in this region
|
||||
return
|
||||
|
||||
server_mapping = self.server_map.get(country["countryCode"].lower())
|
||||
if server_mapping:
|
||||
# country was set to a specific server ID in config
|
||||
hostname = f"{country['code'].lower()}{server_mapping}.prod.surfshark.com"
|
||||
else:
|
||||
# get the random server ID
|
||||
random_server = self.get_random_server(country["countryCode"])
|
||||
if not random_server:
|
||||
raise ValueError(
|
||||
f"The SurfsharkVPN Country {query} currently has no random servers. "
|
||||
"Try again later. If the issue persists, double-check the query."
|
||||
)
|
||||
hostname = random_server
|
||||
|
||||
return f"https://{self.username}:{self.password}@{hostname}:443"
|
||||
|
||||
def get_country(self, by_id: Optional[int] = None, by_code: Optional[str] = None) -> Optional[dict]:
|
||||
"""Search for a Country and it's metadata."""
|
||||
if all(x is None for x in (by_id, by_code)):
|
||||
raise ValueError("At least one search query must be made.")
|
||||
|
||||
for country in self.countries:
|
||||
if all(
|
||||
[
|
||||
by_id is None or country["id"] == int(by_id),
|
||||
by_code is None or country["countryCode"] == by_code.upper(),
|
||||
]
|
||||
):
|
||||
return country
|
||||
|
||||
def get_random_server(self, country_id: str):
|
||||
"""
|
||||
Get the list of random Server for a Country.
|
||||
|
||||
Note: There may not always be more than one recommended server.
|
||||
"""
|
||||
country = [x["connectionName"] for x in self.countries if x["countryCode"].lower() == country_id.lower()]
|
||||
try:
|
||||
country = random.choice(country)
|
||||
return country
|
||||
except Exception:
|
||||
raise ValueError("Could not get random countrycode from the countries list.")
|
||||
|
||||
@staticmethod
|
||||
def get_countries() -> list[dict]:
|
||||
"""Get a list of available Countries and their metadata."""
|
||||
res = requests.get(
|
||||
url="https://api.surfshark.com/v3/server/clusters/all",
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
if not res.ok:
|
||||
raise ValueError(f"Failed to get a list of SurfsharkVPN countries [{res.status_code}]")
|
||||
|
||||
try:
|
||||
return res.json()
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError("Could not decode list of SurfsharkVPN countries, not JSON data.")
|
||||
99
unshackle/core/proxies/windscribevpn.py
Normal file
99
unshackle/core/proxies/windscribevpn.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
from unshackle.core.proxies.proxy import Proxy
|
||||
|
||||
|
||||
class WindscribeVPN(Proxy):
|
||||
def __init__(self, username: str, password: str, server_map: Optional[dict[str, str]] = None):
|
||||
"""
|
||||
Proxy Service using WindscribeVPN Service Credentials.
|
||||
|
||||
A username and password must be provided. These are Service Credentials, not your Login Credentials.
|
||||
The Service Credentials can be found here: https://windscribe.com/getconfig/openvpn
|
||||
"""
|
||||
if not username:
|
||||
raise ValueError("No Username was provided to the WindscribeVPN Proxy Service.")
|
||||
if not password:
|
||||
raise ValueError("No Password was provided to the WindscribeVPN Proxy Service.")
|
||||
|
||||
if server_map is not None and not isinstance(server_map, dict):
|
||||
raise TypeError(f"Expected server_map to be a dict mapping a region to a hostname, not '{server_map!r}'.")
|
||||
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.server_map = server_map or {}
|
||||
|
||||
self.countries = self.get_countries()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
countries = len(set(x.get("country_code") for x in self.countries if x.get("country_code")))
|
||||
servers = sum(
|
||||
len(host)
|
||||
for location in self.countries
|
||||
for group in location.get("groups", [])
|
||||
for host in group.get("hosts", [])
|
||||
)
|
||||
|
||||
return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})"
|
||||
|
||||
def get_proxy(self, query: str) -> Optional[str]:
|
||||
"""
|
||||
Get an HTTPS proxy URI for a WindscribeVPN server.
|
||||
"""
|
||||
query = query.lower()
|
||||
|
||||
if query in self.server_map:
|
||||
hostname = self.server_map[query]
|
||||
else:
|
||||
if re.match(r"^[a-z]+$", query):
|
||||
hostname = self.get_random_server(query)
|
||||
else:
|
||||
raise ValueError(f"The query provided is unsupported and unrecognized: {query}")
|
||||
|
||||
if not hostname:
|
||||
return None
|
||||
|
||||
return f"https://{self.username}:{self.password}@{hostname}:443"
|
||||
|
||||
def get_random_server(self, country_code: str) -> Optional[str]:
|
||||
"""
|
||||
Get a random server hostname for a country.
|
||||
|
||||
Returns None if no servers are available for the country.
|
||||
"""
|
||||
for location in self.countries:
|
||||
if location.get("country_code", "").lower() == country_code.lower():
|
||||
hostnames = []
|
||||
for group in location.get("groups", []):
|
||||
for host in group.get("hosts", []):
|
||||
if hostname := host.get("hostname"):
|
||||
hostnames.append(hostname)
|
||||
|
||||
if hostnames:
|
||||
return random.choice(hostnames)
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_countries() -> list[dict]:
|
||||
"""Get a list of available Countries and their metadata."""
|
||||
res = requests.get(
|
||||
url="https://assets.windscribe.com/serverlist/firefox/1/1",
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
if not res.ok:
|
||||
raise ValueError(f"Failed to get a list of WindscribeVPN locations [{res.status_code}]")
|
||||
|
||||
try:
|
||||
data = res.json()
|
||||
return data.get("data", [])
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError("Could not decode list of WindscribeVPN locations, not JSON data.")
|
||||
427
unshackle/core/remote_service.py
Normal file
427
unshackle/core/remote_service.py
Normal file
@@ -0,0 +1,427 @@
|
||||
"""Remote service implementation for connecting to remote unshackle servers."""
|
||||
|
||||
import logging
|
||||
from collections.abc import Generator
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Any, Dict, Optional, Union
|
||||
|
||||
import click
|
||||
import requests
|
||||
from rich.padding import Padding
|
||||
from rich.rule import Rule
|
||||
|
||||
from unshackle.core.api.session_serializer import deserialize_session
|
||||
from unshackle.core.console import console
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
from unshackle.core.tracks.audio import Audio
|
||||
from unshackle.core.tracks.subtitle import Subtitle
|
||||
from unshackle.core.tracks.video import Video
|
||||
|
||||
|
||||
class RemoteService:
|
||||
"""
|
||||
Remote Service wrapper that connects to a remote unshackle server.
|
||||
|
||||
This class mimics the Service interface but delegates all operations
|
||||
to a remote unshackle server via API calls. It receives session data
|
||||
from the remote server which is then used locally for downloading.
|
||||
"""
|
||||
|
||||
ALIASES: tuple[str, ...] = ()
|
||||
GEOFENCE: tuple[str, ...] = ()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ctx: click.Context,
|
||||
remote_url: str,
|
||||
api_key: str,
|
||||
service_tag: str,
|
||||
service_metadata: Dict[str, Any],
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Initialize remote service.
|
||||
|
||||
Args:
|
||||
ctx: Click context
|
||||
remote_url: Base URL of the remote unshackle server
|
||||
api_key: API key for authentication
|
||||
service_tag: The service tag on the remote server (e.g., "DSNP")
|
||||
service_metadata: Metadata about the service from remote discovery
|
||||
**kwargs: Additional service-specific parameters
|
||||
"""
|
||||
console.print(Padding(Rule(f"[rule.text]Remote Service: {service_tag}"), (1, 2)))
|
||||
|
||||
self.log = logging.getLogger(f"RemoteService.{service_tag}")
|
||||
self.remote_url = remote_url.rstrip("/")
|
||||
self.api_key = api_key
|
||||
self.service_tag = service_tag
|
||||
self.service_metadata = service_metadata
|
||||
self.ctx = ctx
|
||||
self.kwargs = kwargs
|
||||
|
||||
# Set GEOFENCE and ALIASES from metadata
|
||||
if "geofence" in service_metadata:
|
||||
self.GEOFENCE = tuple(service_metadata["geofence"])
|
||||
if "aliases" in service_metadata:
|
||||
self.ALIASES = tuple(service_metadata["aliases"])
|
||||
|
||||
# Create a session for API calls to the remote server
|
||||
self.api_session = requests.Session()
|
||||
self.api_session.headers.update({"X-API-Key": self.api_key, "Content-Type": "application/json"})
|
||||
|
||||
# This session will receive data from remote for actual downloading
|
||||
self.session = requests.Session()
|
||||
|
||||
# Store authentication state
|
||||
self.authenticated = False
|
||||
self.credential = None
|
||||
self.cookies_content = None # Raw cookie file content to send to remote
|
||||
|
||||
def _make_request(self, endpoint: str, data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Make an API request to the remote server.
|
||||
|
||||
Automatically includes cookies and credentials in the request.
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint path (e.g., "/api/remote/DSNP/titles")
|
||||
data: Optional JSON data to send
|
||||
|
||||
Returns:
|
||||
Response JSON data
|
||||
|
||||
Raises:
|
||||
ConnectionError: If the request fails
|
||||
"""
|
||||
url = f"{self.remote_url}{endpoint}"
|
||||
|
||||
# Ensure data is a dictionary
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
# Add cookies and credentials to request if available
|
||||
if self.cookies_content:
|
||||
data["cookies"] = self.cookies_content
|
||||
|
||||
if self.credential:
|
||||
data["credential"] = {"username": self.credential.username, "password": self.credential.password}
|
||||
|
||||
try:
|
||||
if data:
|
||||
response = self.api_session.post(url, json=data)
|
||||
else:
|
||||
response = self.api_session.get(url)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
# Apply session data if present
|
||||
if "session" in result:
|
||||
deserialize_session(result["session"], self.session)
|
||||
|
||||
return result
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.log.error(f"Remote API request failed: {e}")
|
||||
raise ConnectionError(f"Failed to communicate with remote server: {e}")
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
"""
|
||||
Prepare authentication data to send to remote service.
|
||||
|
||||
Stores cookies and credentials to send with each API request.
|
||||
The remote server will use these for authentication.
|
||||
|
||||
Args:
|
||||
cookies: Cookie jar from local configuration
|
||||
credential: Credentials from local configuration
|
||||
"""
|
||||
self.log.info("Preparing authentication for remote server...")
|
||||
self.credential = credential
|
||||
|
||||
# Read cookies file content if cookies provided
|
||||
if cookies and hasattr(cookies, "filename") and cookies.filename:
|
||||
try:
|
||||
from pathlib import Path
|
||||
|
||||
cookie_file = Path(cookies.filename)
|
||||
if cookie_file.exists():
|
||||
self.cookies_content = cookie_file.read_text()
|
||||
self.log.info(f"Loaded cookies from {cookie_file}")
|
||||
except Exception as e:
|
||||
self.log.warning(f"Could not read cookie file: {e}")
|
||||
|
||||
self.authenticated = True
|
||||
self.log.info("Authentication data ready for remote server")
|
||||
|
||||
def search(self, query: Optional[str] = None) -> Generator[SearchResult, None, None]:
|
||||
"""
|
||||
Search for content on the remote service.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
|
||||
Yields:
|
||||
SearchResult objects
|
||||
"""
|
||||
if query is None:
|
||||
query = self.kwargs.get("query", "")
|
||||
|
||||
self.log.info(f"Searching remote service for: {query}")
|
||||
|
||||
data = {"query": query}
|
||||
|
||||
# Add any additional parameters
|
||||
if hasattr(self.ctx, "params"):
|
||||
if self.ctx.params.get("proxy"):
|
||||
data["proxy"] = self.ctx.params["proxy"]
|
||||
if self.ctx.params.get("no_proxy"):
|
||||
data["no_proxy"] = True
|
||||
|
||||
response = self._make_request(f"/api/remote/{self.service_tag}/search", data)
|
||||
|
||||
if response.get("status") == "success" and "results" in response:
|
||||
for result in response["results"]:
|
||||
yield SearchResult(
|
||||
id_=result["id"],
|
||||
title=result["title"],
|
||||
description=result.get("description"),
|
||||
label=result.get("label"),
|
||||
url=result.get("url"),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
"""
|
||||
Get titles from the remote service.
|
||||
|
||||
Returns:
|
||||
Movies or Series object containing title information
|
||||
"""
|
||||
title = self.kwargs.get("title")
|
||||
|
||||
if not title:
|
||||
raise ValueError("No title provided")
|
||||
|
||||
self.log.info(f"Getting titles from remote service for: {title}")
|
||||
|
||||
data = {"title": title}
|
||||
|
||||
# Add additional parameters
|
||||
for key, value in self.kwargs.items():
|
||||
if key not in ["title"]:
|
||||
data[key] = value
|
||||
|
||||
# Add context parameters
|
||||
if hasattr(self.ctx, "params"):
|
||||
if self.ctx.params.get("proxy"):
|
||||
data["proxy"] = self.ctx.params["proxy"]
|
||||
if self.ctx.params.get("no_proxy"):
|
||||
data["no_proxy"] = True
|
||||
|
||||
response = self._make_request(f"/api/remote/{self.service_tag}/titles", data)
|
||||
|
||||
if response.get("status") != "success" or "titles" not in response:
|
||||
raise ValueError(f"Failed to get titles from remote: {response.get('message', 'Unknown error')}")
|
||||
|
||||
titles_data = response["titles"]
|
||||
|
||||
# Deserialize titles
|
||||
titles = []
|
||||
for title_info in titles_data:
|
||||
if title_info["type"] == "movie":
|
||||
titles.append(
|
||||
Movie(
|
||||
id_=title_info.get("id", title),
|
||||
service=self.__class__,
|
||||
name=title_info["name"],
|
||||
year=title_info.get("year"),
|
||||
data=title_info,
|
||||
)
|
||||
)
|
||||
elif title_info["type"] == "episode":
|
||||
titles.append(
|
||||
Episode(
|
||||
id_=title_info.get("id", title),
|
||||
service=self.__class__,
|
||||
title=title_info.get("series_title", title_info["name"]),
|
||||
season=title_info.get("season", 0),
|
||||
number=title_info.get("number", 0),
|
||||
name=title_info.get("name"),
|
||||
year=title_info.get("year"),
|
||||
data=title_info,
|
||||
)
|
||||
)
|
||||
|
||||
# Return appropriate container
|
||||
if titles and isinstance(titles[0], Episode):
|
||||
return Series(titles)
|
||||
else:
|
||||
return Movies(titles)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
"""
|
||||
Get tracks from the remote service.
|
||||
|
||||
Args:
|
||||
title: Title object to get tracks for
|
||||
|
||||
Returns:
|
||||
Tracks object containing video, audio, and subtitle tracks
|
||||
"""
|
||||
self.log.info(f"Getting tracks from remote service for: {title}")
|
||||
|
||||
title_input = self.kwargs.get("title")
|
||||
data = {"title": title_input}
|
||||
|
||||
# Add episode information if applicable
|
||||
if isinstance(title, Episode):
|
||||
data["season"] = title.season
|
||||
data["episode"] = title.number
|
||||
|
||||
# Add additional parameters
|
||||
for key, value in self.kwargs.items():
|
||||
if key not in ["title"]:
|
||||
data[key] = value
|
||||
|
||||
# Add context parameters
|
||||
if hasattr(self.ctx, "params"):
|
||||
if self.ctx.params.get("proxy"):
|
||||
data["proxy"] = self.ctx.params["proxy"]
|
||||
if self.ctx.params.get("no_proxy"):
|
||||
data["no_proxy"] = True
|
||||
|
||||
response = self._make_request(f"/api/remote/{self.service_tag}/tracks", data)
|
||||
|
||||
if response.get("status") != "success":
|
||||
raise ValueError(f"Failed to get tracks from remote: {response.get('message', 'Unknown error')}")
|
||||
|
||||
# Handle multiple episodes response
|
||||
if "episodes" in response:
|
||||
# For multiple episodes, return tracks for the matching title
|
||||
for episode_data in response["episodes"]:
|
||||
episode_title = episode_data["title"]
|
||||
if (
|
||||
isinstance(title, Episode)
|
||||
and episode_title.get("season") == title.season
|
||||
and episode_title.get("number") == title.number
|
||||
):
|
||||
return self._deserialize_tracks(episode_data, title)
|
||||
|
||||
raise ValueError(f"Could not find tracks for {title.season}x{title.number} in remote response")
|
||||
|
||||
# Single title response
|
||||
return self._deserialize_tracks(response, title)
|
||||
|
||||
def _deserialize_tracks(self, data: Dict[str, Any], title: Union[Movie, Episode]) -> Tracks:
|
||||
"""
|
||||
Deserialize tracks from API response.
|
||||
|
||||
Args:
|
||||
data: Track data from API
|
||||
title: Title object these tracks belong to
|
||||
|
||||
Returns:
|
||||
Tracks object
|
||||
"""
|
||||
tracks = Tracks()
|
||||
|
||||
# Deserialize video tracks
|
||||
for video_data in data.get("video", []):
|
||||
video = Video(
|
||||
id_=video_data["id"],
|
||||
url="", # URL will be populated during download from manifests
|
||||
codec=Video.Codec[video_data["codec"]],
|
||||
bitrate=video_data.get("bitrate", 0) * 1000 if video_data.get("bitrate") else None,
|
||||
width=video_data.get("width"),
|
||||
height=video_data.get("height"),
|
||||
fps=video_data.get("fps"),
|
||||
range_=Video.Range[video_data["range"]] if video_data.get("range") else None,
|
||||
language=video_data.get("language"),
|
||||
drm=video_data.get("drm"),
|
||||
)
|
||||
tracks.add(video)
|
||||
|
||||
# Deserialize audio tracks
|
||||
for audio_data in data.get("audio", []):
|
||||
audio = Audio(
|
||||
id_=audio_data["id"],
|
||||
url="", # URL will be populated during download
|
||||
codec=Audio.Codec[audio_data["codec"]],
|
||||
bitrate=audio_data.get("bitrate", 0) * 1000 if audio_data.get("bitrate") else None,
|
||||
channels=audio_data.get("channels"),
|
||||
language=audio_data.get("language"),
|
||||
descriptive=audio_data.get("descriptive", False),
|
||||
drm=audio_data.get("drm"),
|
||||
)
|
||||
if audio_data.get("atmos"):
|
||||
audio.atmos = True
|
||||
tracks.add(audio)
|
||||
|
||||
# Deserialize subtitle tracks
|
||||
for subtitle_data in data.get("subtitles", []):
|
||||
subtitle = Subtitle(
|
||||
id_=subtitle_data["id"],
|
||||
url="", # URL will be populated during download
|
||||
codec=Subtitle.Codec[subtitle_data["codec"]],
|
||||
language=subtitle_data.get("language"),
|
||||
forced=subtitle_data.get("forced", False),
|
||||
sdh=subtitle_data.get("sdh", False),
|
||||
cc=subtitle_data.get("cc", False),
|
||||
)
|
||||
tracks.add(subtitle)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||
"""
|
||||
Get chapters from the remote service.
|
||||
|
||||
Args:
|
||||
title: Title object to get chapters for
|
||||
|
||||
Returns:
|
||||
Chapters object
|
||||
"""
|
||||
self.log.info(f"Getting chapters from remote service for: {title}")
|
||||
|
||||
title_input = self.kwargs.get("title")
|
||||
data = {"title": title_input}
|
||||
|
||||
# Add episode information if applicable
|
||||
if isinstance(title, Episode):
|
||||
data["season"] = title.season
|
||||
data["episode"] = title.number
|
||||
|
||||
# Add context parameters
|
||||
if hasattr(self.ctx, "params"):
|
||||
if self.ctx.params.get("proxy"):
|
||||
data["proxy"] = self.ctx.params["proxy"]
|
||||
if self.ctx.params.get("no_proxy"):
|
||||
data["no_proxy"] = True
|
||||
|
||||
response = self._make_request(f"/api/remote/{self.service_tag}/chapters", data)
|
||||
|
||||
if response.get("status") != "success":
|
||||
self.log.warning(f"Failed to get chapters from remote: {response.get('message', 'Unknown error')}")
|
||||
return Chapters()
|
||||
|
||||
chapters = Chapters()
|
||||
for chapter_data in response.get("chapters", []):
|
||||
chapters.add(Chapter(timestamp=chapter_data["timestamp"], name=chapter_data.get("name")))
|
||||
|
||||
return chapters
|
||||
|
||||
@staticmethod
|
||||
def get_session() -> requests.Session:
|
||||
"""
|
||||
Create a session for the remote service.
|
||||
|
||||
Returns:
|
||||
A requests.Session object
|
||||
"""
|
||||
session = requests.Session()
|
||||
return session
|
||||
245
unshackle/core/remote_services.py
Normal file
245
unshackle/core/remote_services.py
Normal file
@@ -0,0 +1,245 @@
|
||||
"""Remote service discovery and management."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.remote_service import RemoteService
|
||||
|
||||
log = logging.getLogger("RemoteServices")
|
||||
|
||||
|
||||
class RemoteServiceManager:
|
||||
"""
|
||||
Manages discovery and registration of remote services.
|
||||
|
||||
This class connects to configured remote unshackle servers,
|
||||
discovers available services, and creates RemoteService instances
|
||||
that can be used like local services.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the remote service manager."""
|
||||
self.remote_services: Dict[str, type] = {}
|
||||
self.remote_configs: List[Dict[str, Any]] = []
|
||||
|
||||
def discover_services(self) -> None:
|
||||
"""
|
||||
Discover services from all configured remote servers.
|
||||
|
||||
Reads the remote_services configuration, connects to each server,
|
||||
retrieves available services, and creates RemoteService classes
|
||||
for each discovered service.
|
||||
"""
|
||||
if not config.remote_services:
|
||||
log.debug("No remote services configured")
|
||||
return
|
||||
|
||||
log.info(f"Discovering services from {len(config.remote_services)} remote server(s)...")
|
||||
|
||||
for remote_config in config.remote_services:
|
||||
try:
|
||||
self._discover_from_server(remote_config)
|
||||
except Exception as e:
|
||||
log.error(f"Failed to discover services from {remote_config.get('url')}: {e}")
|
||||
continue
|
||||
|
||||
log.info(f"Discovered {len(self.remote_services)} remote service(s)")
|
||||
|
||||
def _discover_from_server(self, remote_config: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Discover services from a single remote server.
|
||||
|
||||
Args:
|
||||
remote_config: Configuration for the remote server
|
||||
(must contain 'url' and 'api_key')
|
||||
"""
|
||||
url = remote_config.get("url", "").rstrip("/")
|
||||
api_key = remote_config.get("api_key", "")
|
||||
server_name = remote_config.get("name", url)
|
||||
|
||||
if not url:
|
||||
log.warning("Remote service configuration missing 'url', skipping")
|
||||
return
|
||||
|
||||
if not api_key:
|
||||
log.warning(f"Remote service {url} missing 'api_key', skipping")
|
||||
return
|
||||
|
||||
log.info(f"Connecting to remote server: {server_name}")
|
||||
|
||||
try:
|
||||
# Query the remote server for available services
|
||||
response = requests.get(
|
||||
f"{url}/api/remote/services",
|
||||
headers={"X-API-Key": api_key, "Content-Type": "application/json"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if data.get("status") != "success" or "services" not in data:
|
||||
log.error(f"Invalid response from {url}: {data}")
|
||||
return
|
||||
|
||||
services = data["services"]
|
||||
log.info(f"Found {len(services)} service(s) on {server_name}")
|
||||
|
||||
# Create RemoteService classes for each service
|
||||
for service_info in services:
|
||||
self._register_remote_service(url, api_key, service_info, server_name)
|
||||
|
||||
except requests.RequestException as e:
|
||||
log.error(f"Failed to connect to remote server {url}: {e}")
|
||||
raise
|
||||
|
||||
def _register_remote_service(
|
||||
self, remote_url: str, api_key: str, service_info: Dict[str, Any], server_name: str
|
||||
) -> None:
|
||||
"""
|
||||
Register a remote service as a local service class.
|
||||
|
||||
Args:
|
||||
remote_url: Base URL of the remote server
|
||||
api_key: API key for authentication
|
||||
service_info: Service metadata from the remote server
|
||||
server_name: Friendly name of the remote server
|
||||
"""
|
||||
service_tag = service_info.get("tag")
|
||||
if not service_tag:
|
||||
log.warning(f"Service info missing 'tag': {service_info}")
|
||||
return
|
||||
|
||||
# Create a unique tag for the remote service
|
||||
# Use "remote_" prefix to distinguish from local services
|
||||
remote_tag = f"remote_{service_tag}"
|
||||
|
||||
# Check if this remote service is already registered
|
||||
if remote_tag in self.remote_services:
|
||||
log.debug(f"Remote service {remote_tag} already registered, skipping")
|
||||
return
|
||||
|
||||
log.info(f"Registering remote service: {remote_tag} from {server_name}")
|
||||
|
||||
# Create a dynamic class that inherits from RemoteService
|
||||
# This allows us to create instances with the cli() method for Click integration
|
||||
class DynamicRemoteService(RemoteService):
|
||||
"""Dynamically created remote service class."""
|
||||
|
||||
def __init__(self, ctx, **kwargs):
|
||||
super().__init__(
|
||||
ctx=ctx,
|
||||
remote_url=remote_url,
|
||||
api_key=api_key,
|
||||
service_tag=service_tag,
|
||||
service_metadata=service_info,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def cli():
|
||||
"""CLI method for Click integration."""
|
||||
import click
|
||||
|
||||
# Create a dynamic Click command for this service
|
||||
@click.command(
|
||||
name=remote_tag,
|
||||
short_help=f"Remote: {service_info.get('help', service_tag)}",
|
||||
help=service_info.get("help", f"Remote service for {service_tag}"),
|
||||
)
|
||||
@click.argument("title", type=str, required=False)
|
||||
@click.option("-q", "--query", type=str, help="Search query")
|
||||
@click.pass_context
|
||||
def remote_service_cli(ctx, title=None, query=None, **kwargs):
|
||||
# Combine title and kwargs
|
||||
params = {**kwargs}
|
||||
if title:
|
||||
params["title"] = title
|
||||
if query:
|
||||
params["query"] = query
|
||||
|
||||
return DynamicRemoteService(ctx, **params)
|
||||
|
||||
return remote_service_cli
|
||||
|
||||
# Set class name for better debugging
|
||||
DynamicRemoteService.__name__ = remote_tag
|
||||
DynamicRemoteService.__module__ = "unshackle.remote_services"
|
||||
|
||||
# Set GEOFENCE and ALIASES
|
||||
if "geofence" in service_info:
|
||||
DynamicRemoteService.GEOFENCE = tuple(service_info["geofence"])
|
||||
if "aliases" in service_info:
|
||||
# Add "remote_" prefix to aliases too
|
||||
DynamicRemoteService.ALIASES = tuple(f"remote_{alias}" for alias in service_info["aliases"])
|
||||
|
||||
# Register the service
|
||||
self.remote_services[remote_tag] = DynamicRemoteService
|
||||
|
||||
def get_service(self, tag: str) -> Optional[type]:
|
||||
"""
|
||||
Get a remote service class by tag.
|
||||
|
||||
Args:
|
||||
tag: Service tag (e.g., "remote_DSNP")
|
||||
|
||||
Returns:
|
||||
RemoteService class or None if not found
|
||||
"""
|
||||
return self.remote_services.get(tag)
|
||||
|
||||
def get_all_services(self) -> Dict[str, type]:
|
||||
"""
|
||||
Get all registered remote services.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping service tags to RemoteService classes
|
||||
"""
|
||||
return self.remote_services.copy()
|
||||
|
||||
def get_service_path(self, tag: str) -> Optional[Path]:
|
||||
"""
|
||||
Get the path for a remote service.
|
||||
|
||||
Remote services don't have local paths, so this returns None.
|
||||
This method exists for compatibility with the Services interface.
|
||||
|
||||
Args:
|
||||
tag: Service tag
|
||||
|
||||
Returns:
|
||||
None (remote services have no local path)
|
||||
"""
|
||||
return None
|
||||
|
||||
|
||||
# Global instance
|
||||
_remote_service_manager: Optional[RemoteServiceManager] = None
|
||||
|
||||
|
||||
def get_remote_service_manager() -> RemoteServiceManager:
|
||||
"""
|
||||
Get the global RemoteServiceManager instance.
|
||||
|
||||
Creates the instance on first call and discovers services.
|
||||
|
||||
Returns:
|
||||
RemoteServiceManager instance
|
||||
"""
|
||||
global _remote_service_manager
|
||||
|
||||
if _remote_service_manager is None:
|
||||
_remote_service_manager = RemoteServiceManager()
|
||||
try:
|
||||
_remote_service_manager.discover_services()
|
||||
except Exception as e:
|
||||
log.error(f"Failed to discover remote services: {e}")
|
||||
|
||||
return _remote_service_manager
|
||||
|
||||
|
||||
__all__ = ("RemoteServiceManager", "get_remote_service_manager")
|
||||
@@ -21,9 +21,10 @@ from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.drm import DRM_T
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.title_cacher import TitleCacher, get_account_hash, get_region_from_proxy
|
||||
from unshackle.core.titles import Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapters, Tracks
|
||||
from unshackle.core.utilities import get_ip_info
|
||||
from unshackle.core.utilities import get_cached_ip_info, get_ip_info
|
||||
|
||||
|
||||
class Service(metaclass=ABCMeta):
|
||||
@@ -42,6 +43,12 @@ class Service(metaclass=ABCMeta):
|
||||
|
||||
self.session = self.get_session()
|
||||
self.cache = Cacher(self.__class__.__name__)
|
||||
self.title_cache = TitleCacher(self.__class__.__name__)
|
||||
|
||||
# Store context for cache control flags and credential
|
||||
self.ctx = ctx
|
||||
self.credential = None # Will be set in authenticate()
|
||||
self.current_region = None # Will be set based on proxy/geolocation
|
||||
|
||||
if not ctx.parent or not ctx.parent.params.get("no_proxy"):
|
||||
if ctx.parent:
|
||||
@@ -53,18 +60,24 @@ class Service(metaclass=ABCMeta):
|
||||
# don't override the explicit proxy set by the user, even if they may be geoblocked
|
||||
with console.status("Checking if current region is Geoblocked...", spinner="dots"):
|
||||
if self.GEOFENCE:
|
||||
# no explicit proxy, let's get one to GEOFENCE if needed
|
||||
current_region = get_ip_info(self.session)["country"].lower()
|
||||
if any(x.lower() == current_region for x in self.GEOFENCE):
|
||||
self.log.info("Service is not Geoblocked in your region")
|
||||
else:
|
||||
requested_proxy = self.GEOFENCE[0] # first is likely main region
|
||||
self.log.info(f"Service is Geoblocked in your region, getting a Proxy to {requested_proxy}")
|
||||
for proxy_provider in ctx.obj.proxy_providers:
|
||||
proxy = proxy_provider.get_proxy(requested_proxy)
|
||||
if proxy:
|
||||
self.log.info(f"Got Proxy from {proxy_provider.__class__.__name__}")
|
||||
break
|
||||
# Service has geofence - need fresh IP check to determine if proxy needed
|
||||
try:
|
||||
current_region = get_ip_info(self.session)["country"].lower()
|
||||
if any(x.lower() == current_region for x in self.GEOFENCE):
|
||||
self.log.info("Service is not Geoblocked in your region")
|
||||
else:
|
||||
requested_proxy = self.GEOFENCE[0] # first is likely main region
|
||||
self.log.info(
|
||||
f"Service is Geoblocked in your region, getting a Proxy to {requested_proxy}"
|
||||
)
|
||||
for proxy_provider in ctx.obj.proxy_providers:
|
||||
proxy = proxy_provider.get_proxy(requested_proxy)
|
||||
if proxy:
|
||||
self.log.info(f"Got Proxy from {proxy_provider.__class__.__name__}")
|
||||
break
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to check geofence: {e}")
|
||||
current_region = None
|
||||
else:
|
||||
self.log.info("Service has no Geofence")
|
||||
|
||||
@@ -79,6 +92,22 @@ class Service(metaclass=ABCMeta):
|
||||
).decode()
|
||||
}
|
||||
)
|
||||
# Always verify proxy IP - proxies can change exit nodes
|
||||
try:
|
||||
proxy_ip_info = get_ip_info(self.session)
|
||||
self.current_region = proxy_ip_info.get("country", "").lower() if proxy_ip_info else None
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to verify proxy IP: {e}")
|
||||
# Fallback to extracting region from proxy config
|
||||
self.current_region = get_region_from_proxy(proxy)
|
||||
else:
|
||||
# No proxy, use cached IP info for title caching (non-critical)
|
||||
try:
|
||||
ip_info = get_cached_ip_info(self.session)
|
||||
self.current_region = ip_info.get("country", "").lower() if ip_info else None
|
||||
except Exception as e:
|
||||
self.log.debug(f"Failed to get cached IP info: {e}")
|
||||
self.current_region = None
|
||||
|
||||
# Optional Abstract functions
|
||||
# The following functions may be implemented by the Service.
|
||||
@@ -123,6 +152,9 @@ class Service(metaclass=ABCMeta):
|
||||
raise TypeError(f"Expected cookies to be a {CookieJar}, not {cookies!r}.")
|
||||
self.session.cookies.update(cookies)
|
||||
|
||||
# Store credential for cache key generation
|
||||
self.credential = credential
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
"""
|
||||
Search by query for titles from the Service.
|
||||
@@ -187,6 +219,52 @@ class Service(metaclass=ABCMeta):
|
||||
This can be useful to store information on each title that will be required like any sub-asset IDs, or such.
|
||||
"""
|
||||
|
||||
def get_titles_cached(self, title_id: str = None) -> Titles_T:
|
||||
"""
|
||||
Cached wrapper around get_titles() to reduce redundant API calls.
|
||||
|
||||
This method checks the cache before calling get_titles() and handles
|
||||
fallback to cached data when API calls fail.
|
||||
|
||||
Args:
|
||||
title_id: Optional title ID for cache key generation.
|
||||
If not provided, will try to extract from service instance.
|
||||
|
||||
Returns:
|
||||
Titles object (Movies, Series, or Album)
|
||||
"""
|
||||
# Try to get title_id from service instance if not provided
|
||||
if title_id is None:
|
||||
# Different services store the title ID in different attributes
|
||||
if hasattr(self, "title"):
|
||||
title_id = self.title
|
||||
elif hasattr(self, "title_id"):
|
||||
title_id = self.title_id
|
||||
else:
|
||||
# If we can't determine title_id, just call get_titles directly
|
||||
self.log.debug("Cannot determine title_id for caching, bypassing cache")
|
||||
return self.get_titles()
|
||||
|
||||
# Get cache control flags from context
|
||||
no_cache = False
|
||||
reset_cache = False
|
||||
if self.ctx and self.ctx.parent:
|
||||
no_cache = self.ctx.parent.params.get("no_cache", False)
|
||||
reset_cache = self.ctx.parent.params.get("reset_cache", False)
|
||||
|
||||
# Get account hash for cache key
|
||||
account_hash = get_account_hash(self.credential)
|
||||
|
||||
# Use title cache to get titles with fallback support
|
||||
return self.title_cache.get_cached_titles(
|
||||
title_id=str(title_id),
|
||||
fetch_function=self.get_titles,
|
||||
region=self.current_region,
|
||||
account_hash=account_hash,
|
||||
no_cache=no_cache,
|
||||
reset_cache=reset_cache,
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
"""
|
||||
|
||||
@@ -6,7 +6,14 @@ from unshackle.core.config import config
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.utilities import import_module_by_path
|
||||
|
||||
_SERVICES = sorted((path for path in config.directories.services.glob("*/__init__.py")), key=lambda x: x.parent.stem)
|
||||
_service_dirs = config.directories.services
|
||||
if not isinstance(_service_dirs, list):
|
||||
_service_dirs = [_service_dirs]
|
||||
|
||||
_SERVICES = sorted(
|
||||
(path for service_dir in _service_dirs for path in service_dir.glob("*/__init__.py")),
|
||||
key=lambda x: x.parent.stem,
|
||||
)
|
||||
|
||||
_MODULES = {path.parent.stem: getattr(import_module_by_path(path), path.parent.stem) for path in _SERVICES}
|
||||
|
||||
@@ -18,6 +25,17 @@ class Services(click.MultiCommand):
|
||||
|
||||
# Click-specific methods
|
||||
|
||||
@staticmethod
|
||||
def _get_remote_services():
|
||||
"""Get remote services from the manager (lazy import to avoid circular dependency)."""
|
||||
try:
|
||||
from unshackle.core.remote_services import get_remote_service_manager
|
||||
|
||||
manager = get_remote_service_manager()
|
||||
return manager.get_all_services()
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
def list_commands(self, ctx: click.Context) -> list[str]:
|
||||
"""Returns a list of all available Services as command names for Click."""
|
||||
return Services.get_tags()
|
||||
@@ -36,6 +54,8 @@ class Services(click.MultiCommand):
|
||||
raise click.ClickException(f"{e}. Available Services: {', '.join(available_services)}")
|
||||
|
||||
if hasattr(service, "cli"):
|
||||
if callable(service.cli):
|
||||
return service.cli()
|
||||
return service.cli
|
||||
|
||||
raise click.ClickException(f"Service '{tag}' has no 'cli' method configured.")
|
||||
@@ -44,13 +64,25 @@ class Services(click.MultiCommand):
|
||||
|
||||
@staticmethod
|
||||
def get_tags() -> list[str]:
|
||||
"""Returns a list of service tags from all available Services."""
|
||||
return [x.parent.stem for x in _SERVICES]
|
||||
"""Returns a list of service tags from all available Services (local + remote)."""
|
||||
local_tags = [x.parent.stem for x in _SERVICES]
|
||||
remote_services = Services._get_remote_services()
|
||||
remote_tags = list(remote_services.keys())
|
||||
return local_tags + remote_tags
|
||||
|
||||
@staticmethod
|
||||
def get_path(name: str) -> Path:
|
||||
"""Get the directory path of a command."""
|
||||
tag = Services.get_tag(name)
|
||||
|
||||
# Check if it's a remote service
|
||||
remote_services = Services._get_remote_services()
|
||||
if tag in remote_services:
|
||||
# Remote services don't have local paths
|
||||
# Return a dummy path or raise an appropriate error
|
||||
# For now, we'll raise KeyError to indicate no path exists
|
||||
raise KeyError(f"Remote service '{tag}' has no local path")
|
||||
|
||||
for service in _SERVICES:
|
||||
if service.parent.stem == tag:
|
||||
return service.parent
|
||||
@@ -65,19 +97,38 @@ class Services(click.MultiCommand):
|
||||
"""
|
||||
original_value = value
|
||||
value = value.lower()
|
||||
|
||||
# Check local services
|
||||
for path in _SERVICES:
|
||||
tag = path.parent.stem
|
||||
if value in (tag.lower(), *_ALIASES.get(tag, [])):
|
||||
return tag
|
||||
|
||||
# Check remote services
|
||||
remote_services = Services._get_remote_services()
|
||||
for tag, service_class in remote_services.items():
|
||||
if value == tag.lower():
|
||||
return tag
|
||||
if hasattr(service_class, "ALIASES"):
|
||||
if value in (alias.lower() for alias in service_class.ALIASES):
|
||||
return tag
|
||||
|
||||
return original_value
|
||||
|
||||
@staticmethod
|
||||
def load(tag: str) -> Service:
|
||||
"""Load a Service module by Service tag."""
|
||||
"""Load a Service module by Service tag (local or remote)."""
|
||||
# Check local services first
|
||||
module = _MODULES.get(tag)
|
||||
if not module:
|
||||
raise KeyError(f"There is no Service added by the Tag '{tag}'")
|
||||
return module
|
||||
if module:
|
||||
return module
|
||||
|
||||
# Check remote services
|
||||
remote_services = Services._get_remote_services()
|
||||
if tag in remote_services:
|
||||
return remote_services[tag]
|
||||
|
||||
raise KeyError(f"There is no Service added by the Tag '{tag}'")
|
||||
|
||||
|
||||
__all__ = ("Services",)
|
||||
|
||||
170
unshackle/core/session.py
Normal file
170
unshackle/core/session.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""Session utilities for creating HTTP sessions with different backends."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
import warnings
|
||||
from datetime import datetime, timezone
|
||||
from email.utils import parsedate_to_datetime
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from curl_cffi.requests import Response, Session, exceptions
|
||||
|
||||
from unshackle.core.config import config
|
||||
|
||||
# Globally suppress curl_cffi HTTPS proxy warnings since some proxy providers
|
||||
# (like NordVPN) require HTTPS URLs but curl_cffi expects HTTP format
|
||||
warnings.filterwarnings(
|
||||
"ignore", message="Make sure you are using https over https proxy.*", category=RuntimeWarning, module="curl_cffi.*"
|
||||
)
|
||||
|
||||
|
||||
class MaxRetriesError(exceptions.RequestException):
|
||||
def __init__(self, message, cause=None):
|
||||
super().__init__(message)
|
||||
self.__cause__ = cause
|
||||
|
||||
|
||||
class CurlSession(Session):
|
||||
def __init__(
|
||||
self,
|
||||
max_retries: int = 10,
|
||||
backoff_factor: float = 0.2,
|
||||
max_backoff: float = 60.0,
|
||||
status_forcelist: list[int] | None = None,
|
||||
allowed_methods: set[str] | None = None,
|
||||
catch_exceptions: tuple[type[Exception], ...] | None = None,
|
||||
**session_kwargs: Any,
|
||||
):
|
||||
super().__init__(**session_kwargs)
|
||||
|
||||
self.max_retries = max_retries
|
||||
self.backoff_factor = backoff_factor
|
||||
self.max_backoff = max_backoff
|
||||
self.status_forcelist = status_forcelist or [429, 500, 502, 503, 504]
|
||||
self.allowed_methods = allowed_methods or {"GET", "POST", "HEAD", "OPTIONS", "PUT", "DELETE", "TRACE"}
|
||||
self.catch_exceptions = catch_exceptions or (
|
||||
exceptions.ConnectionError,
|
||||
exceptions.SSLError,
|
||||
exceptions.Timeout,
|
||||
)
|
||||
self.log = logging.getLogger(self.__class__.__name__)
|
||||
|
||||
def _get_sleep_time(self, response: Response | None, attempt: int) -> float | None:
|
||||
if response:
|
||||
retry_after = response.headers.get("Retry-After")
|
||||
if retry_after:
|
||||
try:
|
||||
return float(retry_after)
|
||||
except ValueError:
|
||||
if retry_date := parsedate_to_datetime(retry_after):
|
||||
return (retry_date - datetime.now(timezone.utc)).total_seconds()
|
||||
|
||||
if attempt == 0:
|
||||
return 0.0
|
||||
|
||||
backoff_value = self.backoff_factor * (2 ** (attempt - 1))
|
||||
jitter = backoff_value * 0.1
|
||||
sleep_time = backoff_value + random.uniform(-jitter, jitter)
|
||||
return min(sleep_time, self.max_backoff)
|
||||
|
||||
def request(self, method: str, url: str, **kwargs: Any) -> Response:
|
||||
if method.upper() not in self.allowed_methods:
|
||||
return super().request(method, url, **kwargs)
|
||||
|
||||
last_exception = None
|
||||
response = None
|
||||
|
||||
for attempt in range(self.max_retries + 1):
|
||||
try:
|
||||
response = super().request(method, url, **kwargs)
|
||||
if response.status_code not in self.status_forcelist:
|
||||
return response
|
||||
last_exception = exceptions.HTTPError(f"Received status code: {response.status_code}")
|
||||
self.log.warning(
|
||||
f"{response.status_code} {response.reason}({urlparse(url).path}). Retrying... "
|
||||
f"({attempt + 1}/{self.max_retries})"
|
||||
)
|
||||
|
||||
except self.catch_exceptions as e:
|
||||
last_exception = e
|
||||
response = None
|
||||
self.log.warning(
|
||||
f"{e.__class__.__name__}({urlparse(url).path}). Retrying... ({attempt + 1}/{self.max_retries})"
|
||||
)
|
||||
|
||||
if attempt < self.max_retries:
|
||||
if sleep_duration := self._get_sleep_time(response, attempt + 1):
|
||||
if sleep_duration > 0:
|
||||
time.sleep(sleep_duration)
|
||||
else:
|
||||
break
|
||||
|
||||
raise MaxRetriesError(f"Max retries exceeded for {method} {url}", cause=last_exception)
|
||||
|
||||
|
||||
def session(browser: str | None = None, **kwargs) -> CurlSession:
|
||||
"""
|
||||
Create a curl_cffi session that impersonates a browser.
|
||||
|
||||
This is a full replacement for requests.Session with browser impersonation
|
||||
and anti-bot capabilities. The session uses curl-impersonate under the hood
|
||||
to mimic real browser behavior.
|
||||
|
||||
Args:
|
||||
browser: Browser to impersonate (e.g. "chrome124", "firefox", "safari").
|
||||
Uses the configured default from curl_impersonate.browser if not specified.
|
||||
See https://github.com/lexiforest/curl_cffi#sessions for available options.
|
||||
**kwargs: Additional arguments passed to CurlSession constructor:
|
||||
- headers: Additional headers (dict)
|
||||
- cookies: Cookie jar or dict
|
||||
- auth: HTTP basic auth tuple (username, password)
|
||||
- proxies: Proxy configuration dict
|
||||
- verify: SSL certificate verification (bool, default True)
|
||||
- timeout: Request timeout in seconds (float or tuple)
|
||||
- allow_redirects: Follow redirects (bool, default True)
|
||||
- max_redirects: Maximum redirect count (int)
|
||||
- cert: Client certificate (str or tuple)
|
||||
- ja3: JA3 fingerprint (str)
|
||||
- akamai: Akamai fingerprint (str)
|
||||
|
||||
Extra arguments for retry handler:
|
||||
- max_retries: Maximum number of retries (int, default 10)
|
||||
- backoff_factor: Backoff factor (float, default 0.2)
|
||||
- max_backoff: Maximum backoff time (float, default 60.0)
|
||||
- status_forcelist: List of status codes to force retry (list, default [429, 500, 502, 503, 504])
|
||||
- allowed_methods: List of allowed HTTP methods (set, default {"GET", "POST", "HEAD", "OPTIONS", "PUT", "DELETE", "TRACE"})
|
||||
- catch_exceptions: List of exceptions to catch (tuple, default (exceptions.ConnectionError, exceptions.SSLError, exceptions.Timeout))
|
||||
|
||||
Returns:
|
||||
curl_cffi.requests.Session configured with browser impersonation, common headers,
|
||||
and equivalent retry behavior to requests.Session.
|
||||
|
||||
Example:
|
||||
from unshackle.core.session import session as CurlSession
|
||||
|
||||
class MyService(Service):
|
||||
@staticmethod
|
||||
def get_session() -> CurlSession:
|
||||
session = CurlSession(
|
||||
impersonate="chrome",
|
||||
ja3="...",
|
||||
akamai="...",
|
||||
max_retries=5,
|
||||
status_forcelist=[429, 500],
|
||||
allowed_methods={"GET", "HEAD", "OPTIONS"},
|
||||
)
|
||||
return session # Uses config default browser
|
||||
"""
|
||||
|
||||
session_config = {
|
||||
"impersonate": browser or config.curl_impersonate.get("browser", "chrome"),
|
||||
**kwargs,
|
||||
}
|
||||
|
||||
session_obj = CurlSession(**session_config)
|
||||
session_obj.headers.update(config.headers)
|
||||
return session_obj
|
||||
240
unshackle/core/title_cacher.py
Normal file
240
unshackle/core/title_cacher.py
Normal file
@@ -0,0 +1,240 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
from unshackle.core.cacher import Cacher
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.titles import Titles_T
|
||||
|
||||
|
||||
class TitleCacher:
|
||||
"""
|
||||
Handles caching of Title objects to reduce redundant API calls.
|
||||
|
||||
This wrapper provides:
|
||||
- Region-aware caching to handle geo-restricted content
|
||||
- Automatic fallback to cached data when API calls fail
|
||||
- Cache lifetime extension during failures
|
||||
- Cache hit/miss statistics for debugging
|
||||
"""
|
||||
|
||||
def __init__(self, service_name: str):
|
||||
self.service_name = service_name
|
||||
self.log = logging.getLogger(f"{service_name}.TitleCache")
|
||||
self.cacher = Cacher(service_name)
|
||||
self.stats = {"hits": 0, "misses": 0, "fallbacks": 0}
|
||||
|
||||
def _generate_cache_key(
|
||||
self, title_id: str, region: Optional[str] = None, account_hash: Optional[str] = None
|
||||
) -> str:
|
||||
"""
|
||||
Generate a unique cache key for title data.
|
||||
|
||||
Args:
|
||||
title_id: The title identifier
|
||||
region: The region/proxy identifier
|
||||
account_hash: Hash of account credentials (if applicable)
|
||||
|
||||
Returns:
|
||||
A unique cache key string
|
||||
"""
|
||||
# Hash the title_id to handle complex IDs (URLs, dots, special chars)
|
||||
# This ensures consistent length and filesystem-safe keys
|
||||
title_hash = hashlib.sha256(title_id.encode()).hexdigest()[:16]
|
||||
|
||||
# Start with base key using hash
|
||||
key_parts = ["titles", title_hash]
|
||||
|
||||
# Add region if available
|
||||
if region:
|
||||
key_parts.append(region.lower())
|
||||
|
||||
# Add account hash if available
|
||||
if account_hash:
|
||||
key_parts.append(account_hash[:8]) # Use first 8 chars of hash
|
||||
|
||||
# Join with underscores
|
||||
cache_key = "_".join(key_parts)
|
||||
|
||||
# Log the mapping for debugging
|
||||
self.log.debug(f"Cache key mapping: {title_id} -> {cache_key}")
|
||||
|
||||
return cache_key
|
||||
|
||||
def get_cached_titles(
|
||||
self,
|
||||
title_id: str,
|
||||
fetch_function,
|
||||
region: Optional[str] = None,
|
||||
account_hash: Optional[str] = None,
|
||||
no_cache: bool = False,
|
||||
reset_cache: bool = False,
|
||||
) -> Optional[Titles_T]:
|
||||
"""
|
||||
Get titles from cache or fetch from API with fallback support.
|
||||
|
||||
Args:
|
||||
title_id: The title identifier
|
||||
fetch_function: Function to call to fetch fresh titles
|
||||
region: The region/proxy identifier
|
||||
account_hash: Hash of account credentials
|
||||
no_cache: Bypass cache completely
|
||||
reset_cache: Clear cache before fetching
|
||||
|
||||
Returns:
|
||||
Titles object (Movies, Series, or Album)
|
||||
"""
|
||||
# If caching is globally disabled or no_cache flag is set
|
||||
if not config.title_cache_enabled or no_cache:
|
||||
self.log.debug("Cache bypassed, fetching fresh titles")
|
||||
return fetch_function()
|
||||
|
||||
# Generate cache key
|
||||
cache_key = self._generate_cache_key(title_id, region, account_hash)
|
||||
|
||||
# If reset_cache flag is set, clear the cache entry
|
||||
if reset_cache:
|
||||
self.log.info(f"Clearing cache for {cache_key}")
|
||||
cache_path = (config.directories.cache / self.service_name / cache_key).with_suffix(".json")
|
||||
if cache_path.exists():
|
||||
cache_path.unlink()
|
||||
|
||||
# Try to get from cache
|
||||
cache = self.cacher.get(cache_key, version=1)
|
||||
|
||||
# Check if we have valid cached data
|
||||
if cache and not cache.expired:
|
||||
self.stats["hits"] += 1
|
||||
self.log.debug(f"Cache hit for {title_id} (hits: {self.stats['hits']}, misses: {self.stats['misses']})")
|
||||
return cache.data
|
||||
|
||||
# Cache miss or expired, try to fetch fresh data
|
||||
self.stats["misses"] += 1
|
||||
self.log.debug(f"Cache miss for {title_id}, fetching fresh data")
|
||||
|
||||
try:
|
||||
# Attempt to fetch fresh titles
|
||||
titles = fetch_function()
|
||||
|
||||
if titles:
|
||||
# Successfully fetched, update cache
|
||||
self.log.debug(f"Successfully fetched titles for {title_id}, updating cache")
|
||||
cache = self.cacher.get(cache_key, version=1)
|
||||
cache.set(titles, expiration=datetime.now() + timedelta(seconds=config.title_cache_time))
|
||||
|
||||
return titles
|
||||
|
||||
except Exception as e:
|
||||
# API call failed, check if we have fallback cached data
|
||||
if cache and cache.data:
|
||||
# We have expired cached data, use it as fallback
|
||||
current_time = datetime.now()
|
||||
max_retention_time = cache.expiration + timedelta(
|
||||
seconds=config.title_cache_max_retention - config.title_cache_time
|
||||
)
|
||||
|
||||
if current_time < max_retention_time:
|
||||
self.stats["fallbacks"] += 1
|
||||
self.log.warning(
|
||||
f"API call failed for {title_id}, using cached data as fallback "
|
||||
f"(fallbacks: {self.stats['fallbacks']})"
|
||||
)
|
||||
self.log.debug(f"Error was: {e}")
|
||||
|
||||
# Extend cache lifetime
|
||||
extended_expiration = current_time + timedelta(minutes=5)
|
||||
if extended_expiration < max_retention_time:
|
||||
cache.expiration = extended_expiration
|
||||
cache.set(cache.data, expiration=extended_expiration)
|
||||
|
||||
return cache.data
|
||||
else:
|
||||
self.log.error(f"API call failed and cached data for {title_id} exceeded maximum retention time")
|
||||
|
||||
# Re-raise the exception if no fallback available
|
||||
raise
|
||||
|
||||
def clear_all_title_cache(self):
|
||||
"""Clear all title caches for this service."""
|
||||
cache_dir = config.directories.cache / self.service_name
|
||||
if cache_dir.exists():
|
||||
for cache_file in cache_dir.glob("titles_*.json"):
|
||||
cache_file.unlink()
|
||||
self.log.info(f"Cleared cache file: {cache_file.name}")
|
||||
|
||||
def get_cache_stats(self) -> dict:
|
||||
"""Get cache statistics."""
|
||||
total = sum(self.stats.values())
|
||||
if total > 0:
|
||||
hit_rate = (self.stats["hits"] / total) * 100
|
||||
else:
|
||||
hit_rate = 0
|
||||
|
||||
return {
|
||||
"hits": self.stats["hits"],
|
||||
"misses": self.stats["misses"],
|
||||
"fallbacks": self.stats["fallbacks"],
|
||||
"hit_rate": f"{hit_rate:.1f}%",
|
||||
}
|
||||
|
||||
|
||||
def get_region_from_proxy(proxy_url: Optional[str]) -> Optional[str]:
|
||||
"""
|
||||
Extract region identifier from proxy URL.
|
||||
|
||||
Args:
|
||||
proxy_url: The proxy URL string
|
||||
|
||||
Returns:
|
||||
Region identifier or None
|
||||
"""
|
||||
if not proxy_url:
|
||||
return None
|
||||
|
||||
# Try to extract region from common proxy patterns
|
||||
# e.g., "us123.nordvpn.com", "gb-proxy.example.com"
|
||||
import re
|
||||
|
||||
# Pattern for NordVPN style
|
||||
nord_match = re.search(r"([a-z]{2})\d+\.nordvpn", proxy_url.lower())
|
||||
if nord_match:
|
||||
return nord_match.group(1)
|
||||
|
||||
# Pattern for country code at start
|
||||
cc_match = re.search(r"([a-z]{2})[-_]", proxy_url.lower())
|
||||
if cc_match:
|
||||
return cc_match.group(1)
|
||||
|
||||
# Pattern for country code subdomain
|
||||
subdomain_match = re.search(r"://([a-z]{2})\.", proxy_url.lower())
|
||||
if subdomain_match:
|
||||
return subdomain_match.group(1)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_account_hash(credential) -> Optional[str]:
|
||||
"""
|
||||
Generate a hash for account identification.
|
||||
|
||||
Args:
|
||||
credential: Credential object
|
||||
|
||||
Returns:
|
||||
SHA1 hash of the credential or None
|
||||
"""
|
||||
if not credential:
|
||||
return None
|
||||
|
||||
# Use existing sha1 property if available
|
||||
if hasattr(credential, "sha1"):
|
||||
return credential.sha1
|
||||
|
||||
# Otherwise generate hash from username
|
||||
if hasattr(credential, "username"):
|
||||
return hashlib.sha1(credential.username.encode()).hexdigest()
|
||||
|
||||
return None
|
||||
@@ -81,7 +81,7 @@ class Episode(Title):
|
||||
def __str__(self) -> str:
|
||||
return "{title}{year} S{season:02}E{number:02} {name}".format(
|
||||
title=self.title,
|
||||
year=f" {self.year}" if self.year else "",
|
||||
year=f" {self.year}" if self.year and config.series_year else "",
|
||||
season=self.season,
|
||||
number=self.number,
|
||||
name=self.name or "",
|
||||
@@ -89,85 +89,115 @@ class Episode(Title):
|
||||
|
||||
def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str:
|
||||
primary_video_track = next(iter(media_info.video_tracks), None)
|
||||
primary_audio_track = next(iter(media_info.audio_tracks), None)
|
||||
primary_audio_track = None
|
||||
if media_info.audio_tracks:
|
||||
sorted_audio = sorted(
|
||||
media_info.audio_tracks,
|
||||
key=lambda x: (
|
||||
float(x.bit_rate) if x.bit_rate else 0,
|
||||
bool(x.format_additionalfeatures and "JOC" in x.format_additionalfeatures)
|
||||
),
|
||||
reverse=True
|
||||
)
|
||||
primary_audio_track = sorted_audio[0]
|
||||
unique_audio_languages = len({x.language.split("-")[0] for x in media_info.audio_tracks if x.language})
|
||||
|
||||
# Title [Year] SXXEXX Name (or Title [Year] SXX if folder)
|
||||
if folder:
|
||||
name = f"{self.title}"
|
||||
if self.year:
|
||||
if self.year and config.series_year:
|
||||
name += f" {self.year}"
|
||||
name += f" S{self.season:02}"
|
||||
else:
|
||||
name = "{title}{year} S{season:02}E{number:02} {name}".format(
|
||||
title=self.title.replace("$", "S"), # e.g., Arli$$
|
||||
year=f" {self.year}" if self.year else "",
|
||||
year=f" {self.year}" if self.year and config.series_year else "",
|
||||
season=self.season,
|
||||
number=self.number,
|
||||
name=self.name or "",
|
||||
).strip()
|
||||
|
||||
# MULTi
|
||||
if unique_audio_languages > 1:
|
||||
name += " MULTi"
|
||||
if config.scene_naming:
|
||||
# Resolution
|
||||
if primary_video_track:
|
||||
resolution = primary_video_track.height
|
||||
aspect_ratio = [
|
||||
int(float(plane)) for plane in primary_video_track.other_display_aspect_ratio[0].split(":")
|
||||
]
|
||||
if len(aspect_ratio) == 1:
|
||||
# e.g., aspect ratio of 2 (2.00:1) would end up as `(2.0,)`, add 1
|
||||
aspect_ratio.append(1)
|
||||
if aspect_ratio[0] / aspect_ratio[1] not in (16 / 9, 4 / 3):
|
||||
# We want the resolution represented in a 4:3 or 16:9 canvas.
|
||||
# If it's not 4:3 or 16:9, calculate as if it's inside a 16:9 canvas,
|
||||
# otherwise the track's height value is fine.
|
||||
# We are assuming this title is some weird aspect ratio so most
|
||||
# likely a movie or HD source, so it's most likely widescreen so
|
||||
# 16:9 canvas makes the most sense.
|
||||
resolution = int(primary_video_track.width * (9 / 16))
|
||||
name += f" {resolution}p"
|
||||
|
||||
# Resolution
|
||||
if primary_video_track:
|
||||
resolution = primary_video_track.height
|
||||
aspect_ratio = [int(float(plane)) for plane in primary_video_track.other_display_aspect_ratio[0].split(":")]
|
||||
if len(aspect_ratio) == 1:
|
||||
# e.g., aspect ratio of 2 (2.00:1) would end up as `(2.0,)`, add 1
|
||||
aspect_ratio.append(1)
|
||||
if aspect_ratio[0] / aspect_ratio[1] not in (16 / 9, 4 / 3):
|
||||
# We want the resolution represented in a 4:3 or 16:9 canvas.
|
||||
# If it's not 4:3 or 16:9, calculate as if it's inside a 16:9 canvas,
|
||||
# otherwise the track's height value is fine.
|
||||
# We are assuming this title is some weird aspect ratio so most
|
||||
# likely a movie or HD source, so it's most likely widescreen so
|
||||
# 16:9 canvas makes the most sense.
|
||||
resolution = int(primary_video_track.width * (9 / 16))
|
||||
name += f" {resolution}p"
|
||||
# Service
|
||||
if show_service:
|
||||
name += f" {self.service.__name__}"
|
||||
|
||||
# Service
|
||||
if show_service:
|
||||
name += f" {self.service.__name__}"
|
||||
# 'WEB-DL'
|
||||
name += " WEB-DL"
|
||||
|
||||
# 'WEB-DL'
|
||||
name += " WEB-DL"
|
||||
# DUAL
|
||||
if unique_audio_languages == 2:
|
||||
name += " DUAL"
|
||||
|
||||
# Audio Codec + Channels (+ feature)
|
||||
if primary_audio_track:
|
||||
codec = primary_audio_track.format
|
||||
channel_layout = primary_audio_track.channel_layout or primary_audio_track.channellayout_original
|
||||
if channel_layout:
|
||||
channels = float(sum({"LFE": 0.1}.get(position.upper(), 1) for position in channel_layout.split(" ")))
|
||||
else:
|
||||
channel_count = primary_audio_track.channel_s or primary_audio_track.channels or 0
|
||||
channels = float(channel_count)
|
||||
# MULTi
|
||||
if unique_audio_languages > 2:
|
||||
name += " MULTi"
|
||||
|
||||
features = primary_audio_track.format_additionalfeatures or ""
|
||||
name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
|
||||
if "JOC" in features or primary_audio_track.joc:
|
||||
name += " Atmos"
|
||||
# Audio Codec + Channels (+ feature)
|
||||
if primary_audio_track:
|
||||
codec = primary_audio_track.format
|
||||
channel_layout = primary_audio_track.channel_layout or primary_audio_track.channellayout_original
|
||||
if channel_layout:
|
||||
channels = float(
|
||||
sum({"LFE": 0.1}.get(position.upper(), 1) for position in channel_layout.split(" "))
|
||||
)
|
||||
else:
|
||||
channel_count = primary_audio_track.channel_s or primary_audio_track.channels or 0
|
||||
channels = float(channel_count)
|
||||
|
||||
# Video (dynamic range + hfr +) Codec
|
||||
if primary_video_track:
|
||||
codec = primary_video_track.format
|
||||
hdr_format = primary_video_track.hdr_format_commercial
|
||||
trc = primary_video_track.transfer_characteristics or primary_video_track.transfer_characteristics_original
|
||||
frame_rate = float(primary_video_track.frame_rate)
|
||||
if hdr_format:
|
||||
name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} "
|
||||
elif trc and "HLG" in trc:
|
||||
name += " HLG"
|
||||
if frame_rate > 30:
|
||||
name += " HFR"
|
||||
name += f" {VIDEO_CODEC_MAP.get(codec, codec)}"
|
||||
features = primary_audio_track.format_additionalfeatures or ""
|
||||
name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
|
||||
if "JOC" in features or primary_audio_track.joc:
|
||||
name += " Atmos"
|
||||
|
||||
if config.tag:
|
||||
name += f"-{config.tag}"
|
||||
# Video (dynamic range + hfr +) Codec
|
||||
if primary_video_track:
|
||||
codec = primary_video_track.format
|
||||
hdr_format = primary_video_track.hdr_format_commercial
|
||||
trc = (
|
||||
primary_video_track.transfer_characteristics
|
||||
or primary_video_track.transfer_characteristics_original
|
||||
)
|
||||
frame_rate = float(primary_video_track.frame_rate)
|
||||
if hdr_format:
|
||||
if (primary_video_track.hdr_format or "").startswith("Dolby Vision"):
|
||||
name += " DV"
|
||||
if DYNAMIC_RANGE_MAP.get(hdr_format) and DYNAMIC_RANGE_MAP.get(hdr_format) != "DV":
|
||||
name += " HDR"
|
||||
else:
|
||||
name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} "
|
||||
elif trc and "HLG" in trc:
|
||||
name += " HLG"
|
||||
if frame_rate > 30:
|
||||
name += " HFR"
|
||||
name += f" {VIDEO_CODEC_MAP.get(codec, codec)}"
|
||||
|
||||
return sanitize_filename(name)
|
||||
if config.tag:
|
||||
name += f"-{config.tag}"
|
||||
|
||||
return sanitize_filename(name)
|
||||
else:
|
||||
# Simple naming style without technical details - use spaces instead of dots
|
||||
return sanitize_filename(name, " ")
|
||||
|
||||
|
||||
class Series(SortedKeyList, ABC):
|
||||
@@ -177,14 +207,15 @@ class Series(SortedKeyList, ABC):
|
||||
def __str__(self) -> str:
|
||||
if not self:
|
||||
return super().__str__()
|
||||
return self[0].title + (f" ({self[0].year})" if self[0].year else "")
|
||||
return self[0].title + (f" ({self[0].year})" if self[0].year and config.series_year else "")
|
||||
|
||||
def tree(self, verbose: bool = False) -> Tree:
|
||||
seasons = Counter(x.season for x in self)
|
||||
num_seasons = len(seasons)
|
||||
num_episodes = sum(seasons.values())
|
||||
sum(seasons.values())
|
||||
season_breakdown = ", ".join(f"S{season}({count})" for season, count in sorted(seasons.items()))
|
||||
tree = Tree(
|
||||
f"{num_seasons} Season{['s', ''][num_seasons == 1]}, {num_episodes} Episode{['s', ''][num_episodes == 1]}",
|
||||
f"{num_seasons} seasons, {season_breakdown}",
|
||||
guide_style="bright_black",
|
||||
)
|
||||
if verbose:
|
||||
|
||||
@@ -52,73 +52,103 @@ class Movie(Title):
|
||||
|
||||
def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str:
|
||||
primary_video_track = next(iter(media_info.video_tracks), None)
|
||||
primary_audio_track = next(iter(media_info.audio_tracks), None)
|
||||
primary_audio_track = None
|
||||
if media_info.audio_tracks:
|
||||
sorted_audio = sorted(
|
||||
media_info.audio_tracks,
|
||||
key=lambda x: (
|
||||
float(x.bit_rate) if x.bit_rate else 0,
|
||||
bool(x.format_additionalfeatures and "JOC" in x.format_additionalfeatures)
|
||||
),
|
||||
reverse=True
|
||||
)
|
||||
primary_audio_track = sorted_audio[0]
|
||||
unique_audio_languages = len({x.language.split("-")[0] for x in media_info.audio_tracks if x.language})
|
||||
|
||||
# Name (Year)
|
||||
name = str(self).replace("$", "S") # e.g., Arli$$
|
||||
|
||||
# MULTi
|
||||
if unique_audio_languages > 1:
|
||||
name += " MULTi"
|
||||
if config.scene_naming:
|
||||
# Resolution
|
||||
if primary_video_track:
|
||||
resolution = primary_video_track.height
|
||||
aspect_ratio = [
|
||||
int(float(plane)) for plane in primary_video_track.other_display_aspect_ratio[0].split(":")
|
||||
]
|
||||
if len(aspect_ratio) == 1:
|
||||
# e.g., aspect ratio of 2 (2.00:1) would end up as `(2.0,)`, add 1
|
||||
aspect_ratio.append(1)
|
||||
if aspect_ratio[0] / aspect_ratio[1] not in (16 / 9, 4 / 3):
|
||||
# We want the resolution represented in a 4:3 or 16:9 canvas.
|
||||
# If it's not 4:3 or 16:9, calculate as if it's inside a 16:9 canvas,
|
||||
# otherwise the track's height value is fine.
|
||||
# We are assuming this title is some weird aspect ratio so most
|
||||
# likely a movie or HD source, so it's most likely widescreen so
|
||||
# 16:9 canvas makes the most sense.
|
||||
resolution = int(primary_video_track.width * (9 / 16))
|
||||
name += f" {resolution}p"
|
||||
|
||||
# Resolution
|
||||
if primary_video_track:
|
||||
resolution = primary_video_track.height
|
||||
aspect_ratio = [int(float(plane)) for plane in primary_video_track.other_display_aspect_ratio[0].split(":")]
|
||||
if len(aspect_ratio) == 1:
|
||||
# e.g., aspect ratio of 2 (2.00:1) would end up as `(2.0,)`, add 1
|
||||
aspect_ratio.append(1)
|
||||
if aspect_ratio[0] / aspect_ratio[1] not in (16 / 9, 4 / 3):
|
||||
# We want the resolution represented in a 4:3 or 16:9 canvas.
|
||||
# If it's not 4:3 or 16:9, calculate as if it's inside a 16:9 canvas,
|
||||
# otherwise the track's height value is fine.
|
||||
# We are assuming this title is some weird aspect ratio so most
|
||||
# likely a movie or HD source, so it's most likely widescreen so
|
||||
# 16:9 canvas makes the most sense.
|
||||
resolution = int(primary_video_track.width * (9 / 16))
|
||||
name += f" {resolution}p"
|
||||
# Service
|
||||
if show_service:
|
||||
name += f" {self.service.__name__}"
|
||||
|
||||
# Service
|
||||
if show_service:
|
||||
name += f" {self.service.__name__}"
|
||||
# 'WEB-DL'
|
||||
name += " WEB-DL"
|
||||
|
||||
# 'WEB-DL'
|
||||
name += " WEB-DL"
|
||||
# DUAL
|
||||
if unique_audio_languages == 2:
|
||||
name += " DUAL"
|
||||
|
||||
# Audio Codec + Channels (+ feature)
|
||||
if primary_audio_track:
|
||||
codec = primary_audio_track.format
|
||||
channel_layout = primary_audio_track.channel_layout or primary_audio_track.channellayout_original
|
||||
if channel_layout:
|
||||
channels = float(sum({"LFE": 0.1}.get(position.upper(), 1) for position in channel_layout.split(" ")))
|
||||
else:
|
||||
channel_count = primary_audio_track.channel_s or primary_audio_track.channels or 0
|
||||
channels = float(channel_count)
|
||||
# MULTi
|
||||
if unique_audio_languages > 2:
|
||||
name += " MULTi"
|
||||
|
||||
features = primary_audio_track.format_additionalfeatures or ""
|
||||
name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
|
||||
if "JOC" in features or primary_audio_track.joc:
|
||||
name += " Atmos"
|
||||
# Audio Codec + Channels (+ feature)
|
||||
if primary_audio_track:
|
||||
codec = primary_audio_track.format
|
||||
channel_layout = primary_audio_track.channel_layout or primary_audio_track.channellayout_original
|
||||
if channel_layout:
|
||||
channels = float(
|
||||
sum({"LFE": 0.1}.get(position.upper(), 1) for position in channel_layout.split(" "))
|
||||
)
|
||||
else:
|
||||
channel_count = primary_audio_track.channel_s or primary_audio_track.channels or 0
|
||||
channels = float(channel_count)
|
||||
|
||||
# Video (dynamic range + hfr +) Codec
|
||||
if primary_video_track:
|
||||
codec = primary_video_track.format
|
||||
hdr_format = primary_video_track.hdr_format_commercial
|
||||
trc = primary_video_track.transfer_characteristics or primary_video_track.transfer_characteristics_original
|
||||
frame_rate = float(primary_video_track.frame_rate)
|
||||
if hdr_format:
|
||||
name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} "
|
||||
elif trc and "HLG" in trc:
|
||||
name += " HLG"
|
||||
if frame_rate > 30:
|
||||
name += " HFR"
|
||||
name += f" {VIDEO_CODEC_MAP.get(codec, codec)}"
|
||||
features = primary_audio_track.format_additionalfeatures or ""
|
||||
name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
|
||||
if "JOC" in features or primary_audio_track.joc:
|
||||
name += " Atmos"
|
||||
|
||||
if config.tag:
|
||||
name += f"-{config.tag}"
|
||||
# Video (dynamic range + hfr +) Codec
|
||||
if primary_video_track:
|
||||
codec = primary_video_track.format
|
||||
hdr_format = primary_video_track.hdr_format_commercial
|
||||
trc = (
|
||||
primary_video_track.transfer_characteristics
|
||||
or primary_video_track.transfer_characteristics_original
|
||||
)
|
||||
frame_rate = float(primary_video_track.frame_rate)
|
||||
if hdr_format:
|
||||
if (primary_video_track.hdr_format or "").startswith("Dolby Vision"):
|
||||
name += " DV"
|
||||
if DYNAMIC_RANGE_MAP.get(hdr_format) and DYNAMIC_RANGE_MAP.get(hdr_format) != "DV":
|
||||
name += " HDR"
|
||||
else:
|
||||
name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} "
|
||||
elif trc and "HLG" in trc:
|
||||
name += " HLG"
|
||||
if frame_rate > 30:
|
||||
name += " HFR"
|
||||
name += f" {VIDEO_CODEC_MAP.get(codec, codec)}"
|
||||
|
||||
return sanitize_filename(name)
|
||||
if config.tag:
|
||||
name += f"-{config.tag}"
|
||||
|
||||
return sanitize_filename(name)
|
||||
else:
|
||||
# Simple naming style without technical details - use spaces instead of dots
|
||||
return sanitize_filename(name, " ")
|
||||
|
||||
|
||||
class Movies(SortedKeyList, ABC):
|
||||
|
||||
@@ -100,22 +100,26 @@ class Song(Title):
|
||||
# NN. Song Name
|
||||
name = str(self).split(" / ")[1]
|
||||
|
||||
# Service
|
||||
if show_service:
|
||||
name += f" {self.service.__name__}"
|
||||
if config.scene_naming:
|
||||
# Service
|
||||
if show_service:
|
||||
name += f" {self.service.__name__}"
|
||||
|
||||
# 'WEB-DL'
|
||||
name += " WEB-DL"
|
||||
# 'WEB-DL'
|
||||
name += " WEB-DL"
|
||||
|
||||
# Audio Codec + Channels (+ feature)
|
||||
name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
|
||||
if "JOC" in features or audio_track.joc:
|
||||
name += " Atmos"
|
||||
# Audio Codec + Channels (+ feature)
|
||||
name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
|
||||
if "JOC" in features or audio_track.joc:
|
||||
name += " Atmos"
|
||||
|
||||
if config.tag:
|
||||
name += f"-{config.tag}"
|
||||
if config.tag:
|
||||
name += f"-{config.tag}"
|
||||
|
||||
return sanitize_filename(name, " ")
|
||||
return sanitize_filename(name, " ")
|
||||
else:
|
||||
# Simple naming style without technical details
|
||||
return sanitize_filename(name, " ")
|
||||
|
||||
|
||||
class Album(SortedKeyList, ABC):
|
||||
|
||||
@@ -2,9 +2,10 @@ from .attachment import Attachment
|
||||
from .audio import Audio
|
||||
from .chapter import Chapter
|
||||
from .chapters import Chapters
|
||||
from .hybrid import Hybrid
|
||||
from .subtitle import Subtitle
|
||||
from .track import Track
|
||||
from .tracks import Tracks
|
||||
from .video import Video
|
||||
|
||||
__all__ = ("Audio", "Attachment", "Chapter", "Chapters", "Subtitle", "Track", "Tracks", "Video")
|
||||
__all__ = ("Audio", "Attachment", "Chapter", "Chapters", "Hybrid", "Subtitle", "Track", "Tracks", "Video")
|
||||
|
||||
@@ -62,6 +62,7 @@ class Attachment:
|
||||
session = session or requests.Session()
|
||||
response = session.get(url, stream=True)
|
||||
response.raise_for_status()
|
||||
config.directories.temp.mkdir(parents=True, exist_ok=True)
|
||||
download_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(download_path, "wb") as f:
|
||||
|
||||
@@ -12,6 +12,7 @@ class Audio(Track):
|
||||
AAC = "AAC" # https://wikipedia.org/wiki/Advanced_Audio_Coding
|
||||
AC3 = "DD" # https://wikipedia.org/wiki/Dolby_Digital
|
||||
EC3 = "DD+" # https://wikipedia.org/wiki/Dolby_Digital_Plus
|
||||
AC4 = "AC-4" # https://wikipedia.org/wiki/Dolby_AC-4
|
||||
OPUS = "OPUS" # https://wikipedia.org/wiki/Opus_(audio_format)
|
||||
OGG = "VORB" # https://wikipedia.org/wiki/Vorbis
|
||||
DTS = "DTS" # https://en.wikipedia.org/wiki/DTS_(company)#DTS_Digital_Surround
|
||||
@@ -31,6 +32,8 @@ class Audio(Track):
|
||||
return Audio.Codec.AC3
|
||||
if mime == "ec-3":
|
||||
return Audio.Codec.EC3
|
||||
if mime == "ac-4":
|
||||
return Audio.Codec.AC4
|
||||
if mime == "opus":
|
||||
return Audio.Codec.OPUS
|
||||
if mime == "dtsc":
|
||||
@@ -60,6 +63,8 @@ class Audio(Track):
|
||||
return Audio.Codec.AC3
|
||||
if profile.startswith("ddplus"):
|
||||
return Audio.Codec.EC3
|
||||
if profile.startswith("ac4"):
|
||||
return Audio.Codec.AC4
|
||||
if profile.startswith("playready-oggvorbis"):
|
||||
return Audio.Codec.OGG
|
||||
raise ValueError(f"The Content Profile '{profile}' is not a supported Audio Codec")
|
||||
|
||||
327
unshackle/core/tracks/hybrid.py
Normal file
327
unshackle/core/tracks/hybrid.py
Normal file
@@ -0,0 +1,327 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from rich.padding import Padding
|
||||
from rich.rule import Rule
|
||||
|
||||
from unshackle.core.binaries import DoviTool, HDR10PlusTool
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.console import console
|
||||
|
||||
|
||||
class Hybrid:
|
||||
def __init__(self, videos, source) -> None:
|
||||
self.log = logging.getLogger("hybrid")
|
||||
|
||||
"""
|
||||
Takes the Dolby Vision and HDR10(+) streams out of the VideoTracks.
|
||||
It will then attempt to inject the Dolby Vision metadata layer to the HDR10(+) stream.
|
||||
If no DV track is available but HDR10+ is present, it will convert HDR10+ to DV.
|
||||
"""
|
||||
global directories
|
||||
from unshackle.core.tracks import Video
|
||||
|
||||
self.videos = videos
|
||||
self.source = source
|
||||
self.rpu_file = "RPU.bin"
|
||||
self.hdr_type = "HDR10"
|
||||
self.hevc_file = f"{self.hdr_type}-DV.hevc"
|
||||
self.hdr10plus_to_dv = False
|
||||
self.hdr10plus_file = "HDR10Plus.json"
|
||||
|
||||
# Get resolution info from HDR10 track for display
|
||||
hdr10_track = next((v for v in videos if v.range == Video.Range.HDR10), None)
|
||||
hdr10p_track = next((v for v in videos if v.range == Video.Range.HDR10P), None)
|
||||
track_for_res = hdr10_track or hdr10p_track
|
||||
self.resolution = f"{track_for_res.height}p" if track_for_res and track_for_res.height else "Unknown"
|
||||
|
||||
console.print(Padding(Rule(f"[rule.text]HDR10+DV Hybrid ({self.resolution})"), (1, 2)))
|
||||
|
||||
for video in self.videos:
|
||||
if not video.path or not os.path.exists(video.path):
|
||||
raise ValueError(f"Video track {video.id} was not downloaded before injection.")
|
||||
|
||||
# Check if we have DV track available
|
||||
has_dv = any(video.range == Video.Range.DV for video in self.videos)
|
||||
has_hdr10 = any(video.range == Video.Range.HDR10 for video in self.videos)
|
||||
has_hdr10p = any(video.range == Video.Range.HDR10P for video in self.videos)
|
||||
|
||||
if not has_hdr10:
|
||||
raise ValueError("No HDR10 track available for hybrid processing.")
|
||||
|
||||
# If we have HDR10+ but no DV, we can convert HDR10+ to DV
|
||||
if not has_dv and has_hdr10p:
|
||||
self.log.info("✓ No DV track found, but HDR10+ is available. Will convert HDR10+ to DV.")
|
||||
self.hdr10plus_to_dv = True
|
||||
elif not has_dv:
|
||||
raise ValueError("No DV track available and no HDR10+ to convert.")
|
||||
|
||||
if os.path.isfile(config.directories.temp / self.hevc_file):
|
||||
self.log.info("✓ Already Injected")
|
||||
return
|
||||
|
||||
for video in videos:
|
||||
# Use the actual path from the video track
|
||||
save_path = video.path
|
||||
if not save_path or not os.path.exists(save_path):
|
||||
raise ValueError(f"Video track {video.id} was not downloaded or path not found: {save_path}")
|
||||
|
||||
if video.range == Video.Range.HDR10:
|
||||
self.extract_stream(save_path, "HDR10")
|
||||
elif video.range == Video.Range.HDR10P:
|
||||
self.extract_stream(save_path, "HDR10")
|
||||
self.hdr_type = "HDR10+"
|
||||
elif video.range == Video.Range.DV:
|
||||
self.extract_stream(save_path, "DV")
|
||||
|
||||
if self.hdr10plus_to_dv:
|
||||
# Extract HDR10+ metadata and convert to DV
|
||||
hdr10p_video = next(v for v in videos if v.range == Video.Range.HDR10P)
|
||||
self.extract_hdr10plus(hdr10p_video)
|
||||
self.convert_hdr10plus_to_dv()
|
||||
else:
|
||||
# Regular DV extraction
|
||||
dv_video = next(v for v in videos if v.range == Video.Range.DV)
|
||||
self.extract_rpu(dv_video)
|
||||
if os.path.isfile(config.directories.temp / "RPU_UNT.bin"):
|
||||
self.rpu_file = "RPU_UNT.bin"
|
||||
self.level_6()
|
||||
# Mode 3 conversion already done during extraction when not untouched
|
||||
elif os.path.isfile(config.directories.temp / "RPU.bin"):
|
||||
# RPU already extracted with mode 3
|
||||
pass
|
||||
|
||||
self.injecting()
|
||||
|
||||
self.log.info("✓ Injection Completed")
|
||||
if self.source == ("itunes" or "appletvplus"):
|
||||
Path.unlink(config.directories.temp / "hdr10.mkv")
|
||||
Path.unlink(config.directories.temp / "dv.mkv")
|
||||
Path.unlink(config.directories.temp / "HDR10.hevc", missing_ok=True)
|
||||
Path.unlink(config.directories.temp / "DV.hevc", missing_ok=True)
|
||||
Path.unlink(config.directories.temp / f"{self.rpu_file}", missing_ok=True)
|
||||
|
||||
def ffmpeg_simple(self, save_path, output):
|
||||
"""Simple ffmpeg execution without progress tracking"""
|
||||
p = subprocess.run(
|
||||
[
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-i",
|
||||
str(save_path),
|
||||
"-c:v",
|
||||
"copy",
|
||||
str(output),
|
||||
"-y", # overwrite output
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
return p.returncode
|
||||
|
||||
def extract_stream(self, save_path, type_):
|
||||
output = Path(config.directories.temp / f"{type_}.hevc")
|
||||
|
||||
with console.status(f"Extracting {type_} stream...", spinner="dots"):
|
||||
returncode = self.ffmpeg_simple(save_path, output)
|
||||
|
||||
if returncode:
|
||||
output.unlink(missing_ok=True)
|
||||
self.log.error(f"x Failed extracting {type_} stream")
|
||||
sys.exit(1)
|
||||
|
||||
self.log.info(f"Extracted {type_} stream")
|
||||
|
||||
def extract_rpu(self, video, untouched=False):
|
||||
if os.path.isfile(config.directories.temp / "RPU.bin") or os.path.isfile(
|
||||
config.directories.temp / "RPU_UNT.bin"
|
||||
):
|
||||
return
|
||||
|
||||
with console.status(
|
||||
f"Extracting{' untouched ' if untouched else ' '}RPU from Dolby Vision stream...", spinner="dots"
|
||||
):
|
||||
extraction_args = [str(DoviTool)]
|
||||
if not untouched:
|
||||
extraction_args += ["-m", "3"]
|
||||
extraction_args += [
|
||||
"extract-rpu",
|
||||
config.directories.temp / "DV.hevc",
|
||||
"-o",
|
||||
config.directories.temp / f"{'RPU' if not untouched else 'RPU_UNT'}.bin",
|
||||
]
|
||||
|
||||
rpu_extraction = subprocess.run(
|
||||
extraction_args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
if rpu_extraction.returncode:
|
||||
Path.unlink(config.directories.temp / f"{'RPU' if not untouched else 'RPU_UNT'}.bin")
|
||||
if b"MAX_PQ_LUMINANCE" in rpu_extraction.stderr:
|
||||
self.extract_rpu(video, untouched=True)
|
||||
elif b"Invalid PPS index" in rpu_extraction.stderr:
|
||||
raise ValueError("Dolby Vision VideoTrack seems to be corrupt")
|
||||
else:
|
||||
raise ValueError(f"Failed extracting{' untouched ' if untouched else ' '}RPU from Dolby Vision stream")
|
||||
|
||||
self.log.info(f"Extracted{' untouched ' if untouched else ' '}RPU from Dolby Vision stream")
|
||||
|
||||
def level_6(self):
|
||||
"""Edit RPU Level 6 values"""
|
||||
with open(config.directories.temp / "L6.json", "w+") as level6_file:
|
||||
level6 = {
|
||||
"cm_version": "V29",
|
||||
"length": 0,
|
||||
"level6": {
|
||||
"max_display_mastering_luminance": 1000,
|
||||
"min_display_mastering_luminance": 1,
|
||||
"max_content_light_level": 0,
|
||||
"max_frame_average_light_level": 0,
|
||||
},
|
||||
}
|
||||
|
||||
json.dump(level6, level6_file, indent=3)
|
||||
|
||||
if not os.path.isfile(config.directories.temp / "RPU_L6.bin"):
|
||||
with console.status("Editing RPU Level 6 values...", spinner="dots"):
|
||||
level6 = subprocess.run(
|
||||
[
|
||||
str(DoviTool),
|
||||
"editor",
|
||||
"-i",
|
||||
config.directories.temp / self.rpu_file,
|
||||
"-j",
|
||||
config.directories.temp / "L6.json",
|
||||
"-o",
|
||||
config.directories.temp / "RPU_L6.bin",
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
if level6.returncode:
|
||||
Path.unlink(config.directories.temp / "RPU_L6.bin")
|
||||
raise ValueError("Failed editing RPU Level 6 values")
|
||||
|
||||
self.log.info("Edited RPU Level 6 values")
|
||||
|
||||
# Update rpu_file to use the edited version
|
||||
self.rpu_file = "RPU_L6.bin"
|
||||
|
||||
def injecting(self):
|
||||
if os.path.isfile(config.directories.temp / self.hevc_file):
|
||||
return
|
||||
|
||||
with console.status(f"Injecting Dolby Vision metadata into {self.hdr_type} stream...", spinner="dots"):
|
||||
inject_cmd = [
|
||||
str(DoviTool),
|
||||
"inject-rpu",
|
||||
"-i",
|
||||
config.directories.temp / "HDR10.hevc",
|
||||
"--rpu-in",
|
||||
config.directories.temp / self.rpu_file,
|
||||
]
|
||||
|
||||
# If we converted from HDR10+, optionally remove HDR10+ metadata during injection
|
||||
# Default to removing HDR10+ metadata since we're converting to DV
|
||||
if self.hdr10plus_to_dv:
|
||||
inject_cmd.append("--drop-hdr10plus")
|
||||
self.log.info(" - Removing HDR10+ metadata during injection")
|
||||
|
||||
inject_cmd.extend(["-o", config.directories.temp / self.hevc_file])
|
||||
|
||||
inject = subprocess.run(
|
||||
inject_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
if inject.returncode:
|
||||
Path.unlink(config.directories.temp / self.hevc_file)
|
||||
raise ValueError("Failed injecting Dolby Vision metadata into HDR10 stream")
|
||||
|
||||
self.log.info(f"Injected Dolby Vision metadata into {self.hdr_type} stream")
|
||||
|
||||
def extract_hdr10plus(self, _video):
|
||||
"""Extract HDR10+ metadata from the video stream"""
|
||||
if os.path.isfile(config.directories.temp / self.hdr10plus_file):
|
||||
return
|
||||
|
||||
if not HDR10PlusTool:
|
||||
raise ValueError("HDR10Plus_tool not found. Please install it to use HDR10+ to DV conversion.")
|
||||
|
||||
with console.status("Extracting HDR10+ metadata...", spinner="dots"):
|
||||
# HDR10Plus_tool needs raw HEVC stream
|
||||
extraction = subprocess.run(
|
||||
[
|
||||
str(HDR10PlusTool),
|
||||
"extract",
|
||||
str(config.directories.temp / "HDR10.hevc"),
|
||||
"-o",
|
||||
str(config.directories.temp / self.hdr10plus_file),
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
if extraction.returncode:
|
||||
raise ValueError("Failed extracting HDR10+ metadata")
|
||||
|
||||
# Check if the extracted file has content
|
||||
if os.path.getsize(config.directories.temp / self.hdr10plus_file) == 0:
|
||||
raise ValueError("No HDR10+ metadata found in the stream")
|
||||
|
||||
self.log.info("Extracted HDR10+ metadata")
|
||||
|
||||
def convert_hdr10plus_to_dv(self):
|
||||
"""Convert HDR10+ metadata to Dolby Vision RPU"""
|
||||
if os.path.isfile(config.directories.temp / "RPU.bin"):
|
||||
return
|
||||
|
||||
with console.status("Converting HDR10+ metadata to Dolby Vision...", spinner="dots"):
|
||||
# First create the extra metadata JSON for dovi_tool
|
||||
extra_metadata = {
|
||||
"cm_version": "V29",
|
||||
"length": 0, # dovi_tool will figure this out
|
||||
"level6": {
|
||||
"max_display_mastering_luminance": 1000,
|
||||
"min_display_mastering_luminance": 1,
|
||||
"max_content_light_level": 0,
|
||||
"max_frame_average_light_level": 0,
|
||||
},
|
||||
}
|
||||
|
||||
with open(config.directories.temp / "extra.json", "w") as f:
|
||||
json.dump(extra_metadata, f, indent=2)
|
||||
|
||||
# Generate DV RPU from HDR10+ metadata
|
||||
conversion = subprocess.run(
|
||||
[
|
||||
str(DoviTool),
|
||||
"generate",
|
||||
"-j",
|
||||
str(config.directories.temp / "extra.json"),
|
||||
"--hdr10plus-json",
|
||||
str(config.directories.temp / self.hdr10plus_file),
|
||||
"-o",
|
||||
str(config.directories.temp / "RPU.bin"),
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
if conversion.returncode:
|
||||
raise ValueError("Failed converting HDR10+ to Dolby Vision")
|
||||
|
||||
self.log.info("Converted HDR10+ metadata to Dolby Vision")
|
||||
self.log.info("✓ HDR10+ successfully converted to Dolby Vision Profile 8")
|
||||
|
||||
# Clean up temporary files
|
||||
Path.unlink(config.directories.temp / "extra.json")
|
||||
Path.unlink(config.directories.temp / self.hdr10plus_file)
|
||||
@@ -10,14 +10,17 @@ from pathlib import Path
|
||||
from typing import Any, Callable, Iterable, Optional, Union
|
||||
|
||||
import pycaption
|
||||
import pysubs2
|
||||
import requests
|
||||
from construct import Container
|
||||
from pycaption import Caption, CaptionList, CaptionNode, WebVTTReader
|
||||
from pycaption.geometry import Layout
|
||||
from pymp4.parser import MP4
|
||||
from subby import CommonIssuesFixer, SAMIConverter, SDHStripper, WebVTTConverter
|
||||
from subtitle_filter import Subtitles
|
||||
|
||||
from unshackle.core import binaries
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.tracks.track import Track
|
||||
from unshackle.core.utilities import try_ensure_utf8
|
||||
from unshackle.core.utils.webvtt import merge_segmented_webvtt
|
||||
@@ -30,6 +33,10 @@ class Subtitle(Track):
|
||||
SubStationAlphav4 = "ASS" # https://wikipedia.org/wiki/SubStation_Alpha#Advanced_SubStation_Alpha=
|
||||
TimedTextMarkupLang = "TTML" # https://wikipedia.org/wiki/Timed_Text_Markup_Language
|
||||
WebVTT = "VTT" # https://wikipedia.org/wiki/WebVTT
|
||||
SAMI = "SMI" # https://wikipedia.org/wiki/SAMI
|
||||
MicroDVD = "SUB" # https://wikipedia.org/wiki/MicroDVD
|
||||
MPL2 = "MPL2" # MPL2 subtitle format
|
||||
TMP = "TMP" # TMP subtitle format
|
||||
# MPEG-DASH box-encapsulated subtitle formats
|
||||
fTTML = "STPP" # https://www.w3.org/TR/2018/REC-ttml-imsc1.0.1-20180424
|
||||
fVTT = "WVTT" # https://www.w3.org/TR/webvtt1
|
||||
@@ -51,6 +58,14 @@ class Subtitle(Track):
|
||||
return Subtitle.Codec.TimedTextMarkupLang
|
||||
elif mime == "vtt":
|
||||
return Subtitle.Codec.WebVTT
|
||||
elif mime in ("smi", "sami"):
|
||||
return Subtitle.Codec.SAMI
|
||||
elif mime in ("sub", "microdvd"):
|
||||
return Subtitle.Codec.MicroDVD
|
||||
elif mime == "mpl2":
|
||||
return Subtitle.Codec.MPL2
|
||||
elif mime == "tmp":
|
||||
return Subtitle.Codec.TMP
|
||||
elif mime == "stpp":
|
||||
return Subtitle.Codec.fTTML
|
||||
elif mime == "wvtt":
|
||||
@@ -228,6 +243,7 @@ class Subtitle(Track):
|
||||
try:
|
||||
caption_set = pycaption.WebVTTReader().read(text)
|
||||
Subtitle.merge_same_cues(caption_set)
|
||||
Subtitle.filter_unwanted_cues(caption_set)
|
||||
subtitle_text = pycaption.WebVTTWriter().write(caption_set)
|
||||
self.path.write_text(subtitle_text, encoding="utf8")
|
||||
except pycaption.exceptions.CaptionReadSyntaxError:
|
||||
@@ -236,6 +252,7 @@ class Subtitle(Track):
|
||||
try:
|
||||
caption_set = pycaption.WebVTTReader().read(text)
|
||||
Subtitle.merge_same_cues(caption_set)
|
||||
Subtitle.filter_unwanted_cues(caption_set)
|
||||
subtitle_text = pycaption.WebVTTWriter().write(caption_set)
|
||||
self.path.write_text(subtitle_text, encoding="utf8")
|
||||
except Exception:
|
||||
@@ -306,10 +323,213 @@ class Subtitle(Track):
|
||||
|
||||
return "\n".join(sanitized_lines)
|
||||
|
||||
def convert_with_subby(self, codec: Subtitle.Codec) -> Path:
|
||||
"""
|
||||
Convert subtitle using subby library for better format support and processing.
|
||||
|
||||
This method leverages subby's advanced subtitle processing capabilities
|
||||
including better WebVTT handling, SDH stripping, and common issue fixing.
|
||||
"""
|
||||
|
||||
if not self.path or not self.path.exists():
|
||||
raise ValueError("You must download the subtitle track first.")
|
||||
|
||||
if self.codec == codec:
|
||||
return self.path
|
||||
|
||||
output_path = self.path.with_suffix(f".{codec.value.lower()}")
|
||||
original_path = self.path
|
||||
|
||||
try:
|
||||
# Convert to SRT using subby first
|
||||
srt_subtitles = None
|
||||
|
||||
if self.codec == Subtitle.Codec.WebVTT:
|
||||
converter = WebVTTConverter()
|
||||
srt_subtitles = converter.from_file(str(self.path))
|
||||
elif self.codec == Subtitle.Codec.SAMI:
|
||||
converter = SAMIConverter()
|
||||
srt_subtitles = converter.from_file(str(self.path))
|
||||
|
||||
if srt_subtitles is not None:
|
||||
# Apply common fixes
|
||||
fixer = CommonIssuesFixer()
|
||||
fixed_srt, _ = fixer.from_srt(srt_subtitles)
|
||||
|
||||
# If target is SRT, we're done
|
||||
if codec == Subtitle.Codec.SubRip:
|
||||
output_path.write_text(str(fixed_srt), encoding="utf8")
|
||||
else:
|
||||
# Convert from SRT to target format using existing pycaption logic
|
||||
temp_srt_path = self.path.with_suffix(".temp.srt")
|
||||
temp_srt_path.write_text(str(fixed_srt), encoding="utf8")
|
||||
|
||||
# Parse the SRT and convert to target format
|
||||
caption_set = self.parse(temp_srt_path.read_bytes(), Subtitle.Codec.SubRip)
|
||||
self.merge_same_cues(caption_set)
|
||||
|
||||
writer = {
|
||||
Subtitle.Codec.TimedTextMarkupLang: pycaption.DFXPWriter,
|
||||
Subtitle.Codec.WebVTT: pycaption.WebVTTWriter,
|
||||
}.get(codec)
|
||||
|
||||
if writer:
|
||||
subtitle_text = writer().write(caption_set)
|
||||
output_path.write_text(subtitle_text, encoding="utf8")
|
||||
else:
|
||||
# Fall back to existing conversion method
|
||||
temp_srt_path.unlink()
|
||||
return self._convert_standard(codec)
|
||||
|
||||
temp_srt_path.unlink()
|
||||
|
||||
if original_path.exists() and original_path != output_path:
|
||||
original_path.unlink()
|
||||
|
||||
self.path = output_path
|
||||
self.codec = codec
|
||||
|
||||
if callable(self.OnConverted):
|
||||
self.OnConverted(codec)
|
||||
|
||||
return output_path
|
||||
else:
|
||||
# Fall back to existing conversion method
|
||||
return self._convert_standard(codec)
|
||||
|
||||
except Exception:
|
||||
# Fall back to existing conversion method on any error
|
||||
return self._convert_standard(codec)
|
||||
|
||||
def convert_with_pysubs2(self, codec: Subtitle.Codec) -> Path:
|
||||
"""
|
||||
Convert subtitle using pysubs2 library for broad format support.
|
||||
|
||||
pysubs2 is a pure-Python library supporting SubRip (SRT), SubStation Alpha
|
||||
(SSA/ASS), WebVTT, TTML, SAMI, MicroDVD, MPL2, and TMP formats.
|
||||
"""
|
||||
if not self.path or not self.path.exists():
|
||||
raise ValueError("You must download the subtitle track first.")
|
||||
|
||||
if self.codec == codec:
|
||||
return self.path
|
||||
|
||||
output_path = self.path.with_suffix(f".{codec.value.lower()}")
|
||||
original_path = self.path
|
||||
|
||||
codec_to_pysubs2_format = {
|
||||
Subtitle.Codec.SubRip: "srt",
|
||||
Subtitle.Codec.SubStationAlpha: "ssa",
|
||||
Subtitle.Codec.SubStationAlphav4: "ass",
|
||||
Subtitle.Codec.WebVTT: "vtt",
|
||||
Subtitle.Codec.TimedTextMarkupLang: "ttml",
|
||||
Subtitle.Codec.SAMI: "sami",
|
||||
Subtitle.Codec.MicroDVD: "microdvd",
|
||||
Subtitle.Codec.MPL2: "mpl2",
|
||||
Subtitle.Codec.TMP: "tmp",
|
||||
}
|
||||
|
||||
pysubs2_output_format = codec_to_pysubs2_format.get(codec)
|
||||
if pysubs2_output_format is None:
|
||||
return self._convert_standard(codec)
|
||||
|
||||
try:
|
||||
subs = pysubs2.load(str(self.path), encoding="utf-8")
|
||||
|
||||
subs.save(str(output_path), format_=pysubs2_output_format, encoding="utf-8")
|
||||
|
||||
if original_path.exists() and original_path != output_path:
|
||||
original_path.unlink()
|
||||
|
||||
self.path = output_path
|
||||
self.codec = codec
|
||||
|
||||
if callable(self.OnConverted):
|
||||
self.OnConverted(codec)
|
||||
|
||||
return output_path
|
||||
|
||||
except Exception:
|
||||
return self._convert_standard(codec)
|
||||
|
||||
def convert(self, codec: Subtitle.Codec) -> Path:
|
||||
"""
|
||||
Convert this Subtitle to another Format.
|
||||
|
||||
The conversion method is determined by the 'conversion_method' setting in config:
|
||||
- 'auto' (default): Uses subby for WebVTT/SAMI, standard for others
|
||||
- 'subby': Always uses subby with CommonIssuesFixer
|
||||
- 'subtitleedit': Uses SubtitleEdit when available, falls back to pycaption
|
||||
- 'pycaption': Uses only pycaption library
|
||||
- 'pysubs2': Uses pysubs2 library
|
||||
"""
|
||||
# Check configuration for conversion method
|
||||
conversion_method = config.subtitle.get("conversion_method", "auto")
|
||||
|
||||
if conversion_method == "subby":
|
||||
return self.convert_with_subby(codec)
|
||||
elif conversion_method == "subtitleedit":
|
||||
return self._convert_standard(codec)
|
||||
elif conversion_method == "pycaption":
|
||||
return self._convert_pycaption_only(codec)
|
||||
elif conversion_method == "pysubs2":
|
||||
return self.convert_with_pysubs2(codec)
|
||||
elif conversion_method == "auto":
|
||||
if self.codec in (Subtitle.Codec.WebVTT, Subtitle.Codec.SAMI):
|
||||
return self.convert_with_subby(codec)
|
||||
else:
|
||||
return self._convert_standard(codec)
|
||||
else:
|
||||
return self._convert_standard(codec)
|
||||
|
||||
def _convert_pycaption_only(self, codec: Subtitle.Codec) -> Path:
|
||||
"""
|
||||
Convert subtitle using only pycaption library (no SubtitleEdit, no subby).
|
||||
|
||||
This is the original conversion method that only uses pycaption.
|
||||
"""
|
||||
if not self.path or not self.path.exists():
|
||||
raise ValueError("You must download the subtitle track first.")
|
||||
|
||||
if self.codec == codec:
|
||||
return self.path
|
||||
|
||||
output_path = self.path.with_suffix(f".{codec.value.lower()}")
|
||||
original_path = self.path
|
||||
|
||||
# Use only pycaption for conversion
|
||||
writer = {
|
||||
Subtitle.Codec.SubRip: pycaption.SRTWriter,
|
||||
Subtitle.Codec.TimedTextMarkupLang: pycaption.DFXPWriter,
|
||||
Subtitle.Codec.WebVTT: pycaption.WebVTTWriter,
|
||||
}.get(codec)
|
||||
|
||||
if writer is None:
|
||||
raise NotImplementedError(f"Cannot convert {self.codec.name} to {codec.name} using pycaption only.")
|
||||
|
||||
caption_set = self.parse(self.path.read_bytes(), self.codec)
|
||||
Subtitle.merge_same_cues(caption_set)
|
||||
if codec == Subtitle.Codec.WebVTT:
|
||||
Subtitle.filter_unwanted_cues(caption_set)
|
||||
subtitle_text = writer().write(caption_set)
|
||||
|
||||
output_path.write_text(subtitle_text, encoding="utf8")
|
||||
|
||||
if original_path.exists() and original_path != output_path:
|
||||
original_path.unlink()
|
||||
|
||||
self.path = output_path
|
||||
self.codec = codec
|
||||
|
||||
if callable(self.OnConverted):
|
||||
self.OnConverted(codec)
|
||||
|
||||
return output_path
|
||||
|
||||
def _convert_standard(self, codec: Subtitle.Codec) -> Path:
|
||||
"""
|
||||
Convert this Subtitle to another Format.
|
||||
|
||||
The file path location of the Subtitle data will be kept at the same
|
||||
location but the file extension will be changed appropriately.
|
||||
|
||||
@@ -318,6 +538,7 @@ class Subtitle(Track):
|
||||
- TimedTextMarkupLang - SubtitleEdit or pycaption.DFXPWriter
|
||||
- WebVTT - SubtitleEdit or pycaption.WebVTTWriter
|
||||
- SubStationAlphav4 - SubtitleEdit
|
||||
- SAMI - subby.SAMIConverter (when available)
|
||||
- fTTML* - custom code using some pycaption functions
|
||||
- fVTT* - custom code using some pycaption functions
|
||||
*: Can read from format, but cannot convert to format
|
||||
@@ -366,6 +587,8 @@ class Subtitle(Track):
|
||||
|
||||
caption_set = self.parse(self.path.read_bytes(), self.codec)
|
||||
Subtitle.merge_same_cues(caption_set)
|
||||
if codec == Subtitle.Codec.WebVTT:
|
||||
Subtitle.filter_unwanted_cues(caption_set)
|
||||
subtitle_text = writer().write(caption_set)
|
||||
|
||||
output_path.write_text(subtitle_text, encoding="utf8")
|
||||
@@ -416,6 +639,13 @@ class Subtitle(Track):
|
||||
text = Subtitle.sanitize_broken_webvtt(text)
|
||||
text = Subtitle.space_webvtt_headers(text)
|
||||
caption_set = pycaption.WebVTTReader().read(text)
|
||||
elif codec == Subtitle.Codec.SAMI:
|
||||
# Use subby for SAMI parsing
|
||||
converter = SAMIConverter()
|
||||
srt_subtitles = converter.from_bytes(data)
|
||||
# Convert SRT back to CaptionSet for compatibility
|
||||
srt_text = str(srt_subtitles).encode("utf8")
|
||||
caption_set = Subtitle.parse(srt_text, Subtitle.Codec.SubRip)
|
||||
else:
|
||||
raise ValueError(f'Unknown Subtitle format "{codec}"...')
|
||||
except pycaption.exceptions.CaptionReadSyntaxError as e:
|
||||
@@ -520,6 +750,24 @@ class Subtitle(Track):
|
||||
if merged_captions:
|
||||
caption_set.set_captions(lang, merged_captions)
|
||||
|
||||
@staticmethod
|
||||
def filter_unwanted_cues(caption_set: pycaption.CaptionSet):
|
||||
"""
|
||||
Filter out subtitle cues containing only or whitespace.
|
||||
"""
|
||||
for lang in caption_set.get_languages():
|
||||
captions = caption_set.get_captions(lang)
|
||||
filtered_captions = pycaption.CaptionList()
|
||||
|
||||
for caption in captions:
|
||||
text = caption.get_text().strip()
|
||||
if not text or text == " " or all(c in " \t\n\r\xa0" for c in text.replace(" ", "\xa0")):
|
||||
continue
|
||||
|
||||
filtered_captions.append(caption)
|
||||
|
||||
caption_set.set_captions(lang, filtered_captions)
|
||||
|
||||
@staticmethod
|
||||
def merge_segmented_wvtt(data: bytes, period_start: float = 0.0) -> tuple[CaptionList, Optional[str]]:
|
||||
"""
|
||||
@@ -660,11 +908,56 @@ class Subtitle(Track):
|
||||
def strip_hearing_impaired(self) -> None:
|
||||
"""
|
||||
Strip captions for hearing impaired (SDH).
|
||||
It uses SubtitleEdit if available, otherwise filter-subs.
|
||||
|
||||
The SDH stripping method is determined by the 'sdh_method' setting in config:
|
||||
- 'auto' (default): Tries subby first, then SubtitleEdit, then filter-subs
|
||||
- 'subby': Uses subby's SDHStripper
|
||||
- 'subtitleedit': Uses SubtitleEdit when available
|
||||
- 'filter-subs': Uses subtitle-filter library
|
||||
"""
|
||||
if not self.path or not self.path.exists():
|
||||
raise ValueError("You must download the subtitle track first.")
|
||||
|
||||
# Check configuration for SDH stripping method
|
||||
sdh_method = config.subtitle.get("sdh_method", "auto")
|
||||
|
||||
if sdh_method == "subby" and self.codec == Subtitle.Codec.SubRip:
|
||||
# Use subby's SDHStripper directly on the file
|
||||
stripper = SDHStripper()
|
||||
stripped_srt, _ = stripper.from_file(str(self.path))
|
||||
self.path.write_text(str(stripped_srt), encoding="utf8")
|
||||
return
|
||||
elif sdh_method == "subtitleedit" and binaries.SubtitleEdit:
|
||||
# Force use of SubtitleEdit
|
||||
pass # Continue to SubtitleEdit section below
|
||||
elif sdh_method == "filter-subs":
|
||||
# Force use of filter-subs
|
||||
sub = Subtitles(self.path)
|
||||
try:
|
||||
sub.filter(rm_fonts=True, rm_ast=True, rm_music=True, rm_effects=True, rm_names=True, rm_author=True)
|
||||
except ValueError as e:
|
||||
if "too many values to unpack" in str(e):
|
||||
# Retry without name removal if the error is due to multiple colons in time references
|
||||
# This can happen with lines like "at 10:00 and 2:00"
|
||||
sub = Subtitles(self.path)
|
||||
sub.filter(
|
||||
rm_fonts=True, rm_ast=True, rm_music=True, rm_effects=True, rm_names=False, rm_author=True
|
||||
)
|
||||
else:
|
||||
raise
|
||||
sub.save()
|
||||
return
|
||||
elif sdh_method == "auto":
|
||||
# Try subby first for SRT files, then fall back
|
||||
if self.codec == Subtitle.Codec.SubRip:
|
||||
try:
|
||||
stripper = SDHStripper()
|
||||
stripped_srt, _ = stripper.from_file(str(self.path))
|
||||
self.path.write_text(str(stripped_srt), encoding="utf8")
|
||||
return
|
||||
except Exception:
|
||||
pass # Fall through to other methods
|
||||
|
||||
if binaries.SubtitleEdit:
|
||||
if self.codec == Subtitle.Codec.SubStationAlphav4:
|
||||
output_format = "AdvancedSubStationAlpha"
|
||||
@@ -687,7 +980,18 @@ class Subtitle(Track):
|
||||
)
|
||||
else:
|
||||
sub = Subtitles(self.path)
|
||||
sub.filter(rm_fonts=True, rm_ast=True, rm_music=True, rm_effects=True, rm_names=True, rm_author=True)
|
||||
try:
|
||||
sub.filter(rm_fonts=True, rm_ast=True, rm_music=True, rm_effects=True, rm_names=True, rm_author=True)
|
||||
except ValueError as e:
|
||||
if "too many values to unpack" in str(e):
|
||||
# Retry without name removal if the error is due to multiple colons in time references
|
||||
# This can happen with lines like "at 10:00 and 2:00"
|
||||
sub = Subtitles(self.path)
|
||||
sub.filter(
|
||||
rm_fonts=True, rm_ast=True, rm_music=True, rm_effects=True, rm_names=False, rm_author=True
|
||||
)
|
||||
else:
|
||||
raise
|
||||
sub.save()
|
||||
|
||||
def reverse_rtl(self) -> None:
|
||||
|
||||
@@ -13,6 +13,7 @@ from typing import Any, Callable, Iterable, Optional, Union
|
||||
from uuid import UUID
|
||||
from zlib import crc32
|
||||
|
||||
from curl_cffi.requests import Session as CurlSession
|
||||
from langcodes import Language
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
from pywidevine.cdm import Cdm as WidevineCdm
|
||||
@@ -420,6 +421,15 @@ class Track:
|
||||
for drm in self.drm:
|
||||
if isinstance(drm, PlayReady):
|
||||
return drm
|
||||
elif hasattr(cdm, "is_playready"):
|
||||
if cdm.is_playready:
|
||||
for drm in self.drm:
|
||||
if isinstance(drm, PlayReady):
|
||||
return drm
|
||||
else:
|
||||
for drm in self.drm:
|
||||
if isinstance(drm, Widevine):
|
||||
return drm
|
||||
|
||||
return self.drm[0]
|
||||
|
||||
@@ -464,6 +474,83 @@ class Track:
|
||||
if tenc.key_ID.int != 0:
|
||||
return tenc.key_ID
|
||||
|
||||
def load_drm_if_needed(self, service=None) -> bool:
|
||||
"""
|
||||
Load DRM information for this track if it was deferred during parsing.
|
||||
|
||||
Args:
|
||||
service: Service instance that can fetch track-specific DRM info
|
||||
|
||||
Returns:
|
||||
True if DRM was loaded or already present, False if failed
|
||||
"""
|
||||
if not getattr(self, "needs_drm_loading", False):
|
||||
return bool(self.drm)
|
||||
|
||||
if self.drm:
|
||||
self.needs_drm_loading = False
|
||||
return True
|
||||
|
||||
if not service or not hasattr(service, "get_track_drm"):
|
||||
return self.load_drm_from_playlist()
|
||||
|
||||
try:
|
||||
track_drm = service.get_track_drm(self)
|
||||
if track_drm:
|
||||
self.drm = track_drm if isinstance(track_drm, list) else [track_drm]
|
||||
self.needs_drm_loading = False
|
||||
return True
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to load DRM from service for track {self.id}: {e}")
|
||||
|
||||
return self.load_drm_from_playlist()
|
||||
|
||||
def load_drm_from_playlist(self) -> bool:
|
||||
"""
|
||||
Fallback method to load DRM by fetching this track's individual playlist.
|
||||
"""
|
||||
if self.drm:
|
||||
self.needs_drm_loading = False
|
||||
return True
|
||||
|
||||
try:
|
||||
import m3u8
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
from pyplayready.system.pssh import PSSH as PR_PSSH
|
||||
from pywidevine.cdm import Cdm as WidevineCdm
|
||||
from pywidevine.pssh import PSSH as WV_PSSH
|
||||
|
||||
session = getattr(self, "session", None) or Session()
|
||||
|
||||
response = session.get(self.url)
|
||||
playlist = m3u8.loads(response.text, self.url)
|
||||
|
||||
drm_list = []
|
||||
|
||||
for key in playlist.keys or []:
|
||||
if not key or not key.keyformat:
|
||||
continue
|
||||
|
||||
fmt = key.keyformat.lower()
|
||||
if fmt == WidevineCdm.urn:
|
||||
pssh_b64 = key.uri.split(",")[-1]
|
||||
drm = Widevine(pssh=WV_PSSH(pssh_b64))
|
||||
drm_list.append(drm)
|
||||
elif fmt == PlayReadyCdm or "com.microsoft.playready" in fmt:
|
||||
pssh_b64 = key.uri.split(",")[-1]
|
||||
drm = PlayReady(pssh=PR_PSSH(pssh_b64), pssh_b64=pssh_b64)
|
||||
drm_list.append(drm)
|
||||
|
||||
if drm_list:
|
||||
self.drm = drm_list
|
||||
self.needs_drm_loading = False
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to load DRM from playlist for track {self.id}: {e}")
|
||||
|
||||
return False
|
||||
|
||||
def get_init_segment(
|
||||
self,
|
||||
maximum_size: int = 20000,
|
||||
@@ -499,8 +586,8 @@ class Track:
|
||||
raise TypeError(f"Expected url to be a {str}, not {type(url)}")
|
||||
if not isinstance(byte_range, (str, type(None))):
|
||||
raise TypeError(f"Expected byte_range to be a {str}, not {type(byte_range)}")
|
||||
if not isinstance(session, (Session, type(None))):
|
||||
raise TypeError(f"Expected session to be a {Session}, not {type(session)}")
|
||||
if not isinstance(session, (Session, CurlSession, type(None))):
|
||||
raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {type(session)}")
|
||||
|
||||
if not url:
|
||||
if self.descriptor != self.Descriptor.URL:
|
||||
@@ -558,15 +645,32 @@ class Track:
|
||||
output_path = original_path.with_stem(f"{original_path.stem}_repack")
|
||||
|
||||
def _ffmpeg(extra_args: list[str] = None):
|
||||
subprocess.run(
|
||||
args = [
|
||||
binaries.FFMPEG,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"error",
|
||||
"-i",
|
||||
original_path,
|
||||
*(extra_args or []),
|
||||
]
|
||||
|
||||
if hasattr(self, "data") and self.data.get("audio_language"):
|
||||
audio_lang = self.data["audio_language"]
|
||||
audio_name = self.data.get("audio_language_name", audio_lang)
|
||||
args.extend(
|
||||
[
|
||||
"-metadata:s:a:0",
|
||||
f"language={audio_lang}",
|
||||
"-metadata:s:a:0",
|
||||
f"title={audio_name}",
|
||||
"-metadata:s:a:0",
|
||||
f"handler_name={audio_name}",
|
||||
]
|
||||
)
|
||||
|
||||
args.extend(
|
||||
[
|
||||
binaries.FFMPEG,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"error",
|
||||
"-i",
|
||||
original_path,
|
||||
*(extra_args or []),
|
||||
# Following are very important!
|
||||
"-map_metadata",
|
||||
"-1", # don't transfer metadata to output file
|
||||
@@ -575,7 +679,11 @@ class Track:
|
||||
"-codec",
|
||||
"copy",
|
||||
str(output_path),
|
||||
],
|
||||
]
|
||||
)
|
||||
|
||||
subprocess.run(
|
||||
args,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
|
||||
@@ -11,9 +11,10 @@ from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeRe
|
||||
from rich.table import Table
|
||||
from rich.tree import Tree
|
||||
|
||||
from unshackle.core import binaries
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.console import console
|
||||
from unshackle.core.constants import LANGUAGE_MAX_DISTANCE, AnyTrack, TrackT
|
||||
from unshackle.core.constants import LANGUAGE_EXACT_DISTANCE, LANGUAGE_MAX_DISTANCE, AnyTrack, TrackT
|
||||
from unshackle.core.events import events
|
||||
from unshackle.core.tracks.attachment import Attachment
|
||||
from unshackle.core.tracks.audio import Audio
|
||||
@@ -180,7 +181,7 @@ class Tracks:
|
||||
log = logging.getLogger("Tracks")
|
||||
|
||||
if duplicates:
|
||||
log.warning(f" - Found and skipped {duplicates} duplicate tracks...")
|
||||
log.debug(f" - Found and skipped {duplicates} duplicate tracks...")
|
||||
|
||||
def sort_videos(self, by_language: Optional[Sequence[Union[str, Language]]] = None) -> None:
|
||||
"""Sort video tracks by bitrate, and optionally language."""
|
||||
@@ -201,17 +202,16 @@ class Tracks:
|
||||
"""Sort audio tracks by bitrate, descriptive, and optionally language."""
|
||||
if not self.audio:
|
||||
return
|
||||
# bitrate
|
||||
self.audio.sort(key=lambda x: float(x.bitrate or 0.0), reverse=True)
|
||||
# descriptive
|
||||
self.audio.sort(key=lambda x: str(x.language) if x.descriptive else "")
|
||||
self.audio.sort(key=lambda x: x.descriptive)
|
||||
# bitrate (within each descriptive group)
|
||||
self.audio.sort(key=lambda x: float(x.bitrate or 0.0), reverse=True)
|
||||
# language
|
||||
for language in reversed(by_language or []):
|
||||
if str(language) in ("all", "best"):
|
||||
language = next((x.language for x in self.audio if x.is_original_lang), "")
|
||||
if not language:
|
||||
continue
|
||||
self.audio.sort(key=lambda x: str(x.language))
|
||||
self.audio.sort(key=lambda x: not is_close_match(language, [x.language]))
|
||||
|
||||
def sort_subtitles(self, by_language: Optional[Sequence[Union[str, Language]]] = None) -> None:
|
||||
@@ -253,6 +253,31 @@ class Tracks:
|
||||
def select_subtitles(self, x: Callable[[Subtitle], bool]) -> None:
|
||||
self.subtitles = list(filter(x, self.subtitles))
|
||||
|
||||
def select_hybrid(self, tracks, quality):
|
||||
hdr10_tracks = [
|
||||
v
|
||||
for v in tracks
|
||||
if v.range == Video.Range.HDR10 and (v.height in quality or int(v.width * 9 / 16) in quality)
|
||||
]
|
||||
hdr10 = []
|
||||
for res in quality:
|
||||
candidates = [v for v in hdr10_tracks if v.height == res or int(v.width * 9 / 16) == res]
|
||||
if candidates:
|
||||
best = max(candidates, key=lambda v: v.bitrate) # assumes .bitrate exists
|
||||
hdr10.append(best)
|
||||
|
||||
dv_tracks = [v for v in tracks if v.range == Video.Range.DV]
|
||||
lowest_dv = min(dv_tracks, key=lambda v: v.height) if dv_tracks else None
|
||||
|
||||
def select(x):
|
||||
if x in hdr10:
|
||||
return True
|
||||
if lowest_dv and x is lowest_dv:
|
||||
return True
|
||||
return False
|
||||
|
||||
return select
|
||||
|
||||
def by_resolutions(self, resolutions: list[int], per_resolution: int = 0) -> None:
|
||||
# Note: Do not merge these list comprehensions. They must be done separately so the results
|
||||
# from the 16:9 canvas check is only used if there's no exact height resolution match.
|
||||
@@ -269,17 +294,27 @@ class Tracks:
|
||||
self.videos = selected
|
||||
|
||||
@staticmethod
|
||||
def by_language(tracks: list[TrackT], languages: list[str], per_language: int = 0) -> list[TrackT]:
|
||||
def by_language(
|
||||
tracks: list[TrackT], languages: list[str], per_language: int = 0, exact_match: bool = False
|
||||
) -> list[TrackT]:
|
||||
distance = LANGUAGE_EXACT_DISTANCE if exact_match else LANGUAGE_MAX_DISTANCE
|
||||
selected = []
|
||||
for language in languages:
|
||||
selected.extend(
|
||||
[x for x in tracks if closest_supported_match(x.language, [language], LANGUAGE_MAX_DISTANCE)][
|
||||
[x for x in tracks if closest_supported_match(str(x.language), [language], distance)][
|
||||
: per_language or None
|
||||
]
|
||||
)
|
||||
return selected
|
||||
|
||||
def mux(self, title: str, delete: bool = True, progress: Optional[partial] = None) -> tuple[Path, int, list[str]]:
|
||||
def mux(
|
||||
self,
|
||||
title: str,
|
||||
delete: bool = True,
|
||||
progress: Optional[partial] = None,
|
||||
audio_expected: bool = True,
|
||||
title_language: Optional[Language] = None,
|
||||
) -> tuple[Path, int, list[str]]:
|
||||
"""
|
||||
Multiplex all the Tracks into a Matroska Container file.
|
||||
|
||||
@@ -289,9 +324,33 @@ class Tracks:
|
||||
delete: Delete all track files after multiplexing.
|
||||
progress: Update a rich progress bar via `completed=...`. This must be the
|
||||
progress object's update() func, pre-set with task id via functools.partial.
|
||||
audio_expected: Whether audio is expected in the output. Used to determine
|
||||
if embedded audio metadata should be added.
|
||||
title_language: The title's intended language. Used to select the best video track
|
||||
for audio metadata when multiple video tracks exist.
|
||||
"""
|
||||
if self.videos and not self.audio and audio_expected:
|
||||
video_track = None
|
||||
if title_language:
|
||||
video_track = next((v for v in self.videos if v.language == title_language), None)
|
||||
if not video_track:
|
||||
video_track = next((v for v in self.videos if v.is_original_lang), None)
|
||||
|
||||
video_track = video_track or self.videos[0]
|
||||
if video_track.language.is_valid():
|
||||
lang_code = str(video_track.language)
|
||||
lang_name = video_track.language.display_name()
|
||||
|
||||
for video in self.videos:
|
||||
video.needs_repack = True
|
||||
video.data["audio_language"] = lang_code
|
||||
video.data["audio_language_name"] = lang_name
|
||||
|
||||
if not binaries.MKVToolNix:
|
||||
raise RuntimeError("MKVToolNix (mkvmerge) is required for muxing but was not found")
|
||||
|
||||
cl = [
|
||||
"mkvmerge",
|
||||
str(binaries.MKVToolNix),
|
||||
"--no-date", # remove dates from the output for security
|
||||
]
|
||||
|
||||
@@ -302,21 +361,59 @@ class Tracks:
|
||||
if not vt.path or not vt.path.exists():
|
||||
raise ValueError("Video Track must be downloaded before muxing...")
|
||||
events.emit(events.Types.TRACK_MULTIPLEX, track=vt)
|
||||
cl.extend(
|
||||
[
|
||||
"--language",
|
||||
f"0:{vt.language}",
|
||||
"--default-track",
|
||||
f"0:{i == 0}",
|
||||
"--original-flag",
|
||||
f"0:{vt.is_original_lang}",
|
||||
"--compression",
|
||||
"0:none", # disable extra compression
|
||||
"(",
|
||||
str(vt.path),
|
||||
")",
|
||||
]
|
||||
)
|
||||
|
||||
is_default = False
|
||||
if title_language:
|
||||
is_default = vt.language == title_language
|
||||
if not any(v.language == title_language for v in self.videos):
|
||||
is_default = vt.is_original_lang or i == 0
|
||||
else:
|
||||
is_default = i == 0
|
||||
|
||||
# Prepare base arguments
|
||||
video_args = [
|
||||
"--language",
|
||||
f"0:{vt.language}",
|
||||
"--default-track",
|
||||
f"0:{is_default}",
|
||||
"--original-flag",
|
||||
f"0:{vt.is_original_lang}",
|
||||
"--compression",
|
||||
"0:none", # disable extra compression
|
||||
]
|
||||
|
||||
# Add FPS fix if needed (typically for hybrid mode to prevent sync issues)
|
||||
if hasattr(vt, "needs_duration_fix") and vt.needs_duration_fix and vt.fps:
|
||||
video_args.extend(
|
||||
[
|
||||
"--default-duration",
|
||||
f"0:{vt.fps}fps" if isinstance(vt.fps, str) else f"0:{vt.fps:.3f}fps",
|
||||
"--fix-bitstream-timing-information",
|
||||
"0:1",
|
||||
]
|
||||
)
|
||||
|
||||
if hasattr(vt, "range") and vt.range == Video.Range.HLG:
|
||||
video_args.extend(
|
||||
[
|
||||
"--color-transfer-characteristics",
|
||||
"0:18", # ARIB STD-B67 (HLG)
|
||||
]
|
||||
)
|
||||
|
||||
if hasattr(vt, "data") and vt.data.get("audio_language"):
|
||||
audio_lang = vt.data["audio_language"]
|
||||
audio_name = vt.data.get("audio_language_name", audio_lang)
|
||||
video_args.extend(
|
||||
[
|
||||
"--language",
|
||||
f"1:{audio_lang}",
|
||||
"--track-name",
|
||||
f"1:{audio_name}",
|
||||
]
|
||||
)
|
||||
|
||||
cl.extend(video_args + ["(", str(vt.path), ")"])
|
||||
|
||||
for i, at in enumerate(self.audio):
|
||||
if not at.path or not at.path.exists():
|
||||
|
||||
@@ -94,6 +94,7 @@ class Video(Track):
|
||||
HDR10 = "HDR10" # https://en.wikipedia.org/wiki/HDR10
|
||||
HDR10P = "HDR10+" # https://en.wikipedia.org/wiki/HDR10%2B
|
||||
DV = "DV" # https://en.wikipedia.org/wiki/Dolby_Vision
|
||||
HYBRID = "HYBRID" # Selects both HDR10 and DV tracks for hybrid processing with DoviTool
|
||||
|
||||
@staticmethod
|
||||
def from_cicp(primaries: int, transfer: int, matrix: int) -> Video.Range:
|
||||
@@ -115,6 +116,7 @@ class Video(Track):
|
||||
class Transfer(Enum):
|
||||
Unspecified = 0
|
||||
BT_709 = 1
|
||||
Unspecified_Image = 2
|
||||
BT_601 = 6
|
||||
BT_2020 = 14
|
||||
BT_2100 = 15
|
||||
@@ -236,6 +238,8 @@ class Video(Track):
|
||||
except Exception as e:
|
||||
raise ValueError("Expected fps to be a number, float, or a string as numerator/denominator form, " + str(e))
|
||||
|
||||
self.needs_duration_fix = False
|
||||
|
||||
def __str__(self) -> str:
|
||||
return " | ".join(
|
||||
filter(
|
||||
|
||||
276
unshackle/core/update_checker.py
Normal file
276
unshackle/core/update_checker.py
Normal file
@@ -0,0 +1,276 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
class UpdateChecker:
|
||||
"""
|
||||
Check for available updates from the GitHub repository.
|
||||
|
||||
This class provides functionality to check for newer versions of the application
|
||||
by querying the GitHub releases API. It includes rate limiting, caching, and
|
||||
both synchronous and asynchronous interfaces.
|
||||
|
||||
Attributes:
|
||||
REPO_URL: GitHub API URL for latest release
|
||||
TIMEOUT: Request timeout in seconds
|
||||
DEFAULT_CHECK_INTERVAL: Default time between checks in seconds (24 hours)
|
||||
"""
|
||||
|
||||
REPO_URL = "https://api.github.com/repos/unshackle-dl/unshackle/releases/latest"
|
||||
TIMEOUT = 5
|
||||
DEFAULT_CHECK_INTERVAL = 24 * 60 * 60
|
||||
|
||||
@classmethod
|
||||
def _get_cache_file(cls) -> Path:
|
||||
"""Get the path to the update check cache file."""
|
||||
from unshackle.core.config import config
|
||||
|
||||
return config.directories.cache / "update_check.json"
|
||||
|
||||
@classmethod
|
||||
def _load_cache_data(cls) -> dict:
|
||||
"""
|
||||
Load cache data from file.
|
||||
|
||||
Returns:
|
||||
Cache data dictionary or empty dict if loading fails
|
||||
"""
|
||||
cache_file = cls._get_cache_file()
|
||||
|
||||
if not cache_file.exists():
|
||||
return {}
|
||||
|
||||
try:
|
||||
with open(cache_file, "r") as f:
|
||||
return json.load(f)
|
||||
except (json.JSONDecodeError, OSError):
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def _parse_version(version_string: str) -> str:
|
||||
"""
|
||||
Parse and normalize version string by removing 'v' prefix.
|
||||
|
||||
Args:
|
||||
version_string: Raw version string from API
|
||||
|
||||
Returns:
|
||||
Cleaned version string
|
||||
"""
|
||||
return version_string.lstrip("v")
|
||||
|
||||
@staticmethod
|
||||
def _is_valid_version(version: str) -> bool:
|
||||
"""
|
||||
Validate version string format.
|
||||
|
||||
Args:
|
||||
version: Version string to validate
|
||||
|
||||
Returns:
|
||||
True if version string is valid semantic version, False otherwise
|
||||
"""
|
||||
if not version or not isinstance(version, str):
|
||||
return False
|
||||
|
||||
try:
|
||||
parts = version.split(".")
|
||||
if len(parts) < 2:
|
||||
return False
|
||||
|
||||
for part in parts:
|
||||
int(part)
|
||||
|
||||
return True
|
||||
except (ValueError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _fetch_latest_version(cls) -> Optional[str]:
|
||||
"""
|
||||
Fetch the latest version from GitHub API.
|
||||
|
||||
Returns:
|
||||
Latest version string if successful, None otherwise
|
||||
"""
|
||||
try:
|
||||
response = requests.get(cls.REPO_URL, timeout=cls.TIMEOUT)
|
||||
|
||||
if response.status_code != 200:
|
||||
return None
|
||||
|
||||
data = response.json()
|
||||
latest_version = cls._parse_version(data.get("tag_name", ""))
|
||||
|
||||
return latest_version if cls._is_valid_version(latest_version) else None
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _should_check_for_updates(cls, check_interval: int = DEFAULT_CHECK_INTERVAL) -> bool:
|
||||
"""
|
||||
Check if enough time has passed since the last update check.
|
||||
|
||||
Args:
|
||||
check_interval: Time in seconds between checks (default: 24 hours)
|
||||
|
||||
Returns:
|
||||
True if we should check for updates, False otherwise
|
||||
"""
|
||||
cache_data = cls._load_cache_data()
|
||||
|
||||
if not cache_data:
|
||||
return True
|
||||
|
||||
last_check = cache_data.get("last_check", 0)
|
||||
current_time = time.time()
|
||||
|
||||
return (current_time - last_check) >= check_interval
|
||||
|
||||
@classmethod
|
||||
def _update_cache(cls, latest_version: Optional[str] = None, current_version: Optional[str] = None) -> None:
|
||||
"""
|
||||
Update the cache file with the current timestamp and version info.
|
||||
|
||||
Args:
|
||||
latest_version: The latest version found, if any
|
||||
current_version: The current version being used
|
||||
"""
|
||||
cache_file = cls._get_cache_file()
|
||||
|
||||
try:
|
||||
cache_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
cache_data = {
|
||||
"last_check": time.time(),
|
||||
"latest_version": latest_version,
|
||||
"current_version": current_version,
|
||||
}
|
||||
|
||||
with open(cache_file, "w") as f:
|
||||
json.dump(cache_data, f, indent=2)
|
||||
|
||||
except (OSError, json.JSONEncodeError):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _compare_versions(current: str, latest: str) -> bool:
|
||||
"""
|
||||
Simple semantic version comparison.
|
||||
|
||||
Args:
|
||||
current: Current version string (e.g., "1.1.0")
|
||||
latest: Latest version string (e.g., "1.2.0")
|
||||
|
||||
Returns:
|
||||
True if latest > current, False otherwise
|
||||
"""
|
||||
if not UpdateChecker._is_valid_version(current) or not UpdateChecker._is_valid_version(latest):
|
||||
return False
|
||||
|
||||
try:
|
||||
current_parts = [int(x) for x in current.split(".")]
|
||||
latest_parts = [int(x) for x in latest.split(".")]
|
||||
|
||||
max_length = max(len(current_parts), len(latest_parts))
|
||||
current_parts.extend([0] * (max_length - len(current_parts)))
|
||||
latest_parts.extend([0] * (max_length - len(latest_parts)))
|
||||
|
||||
for current_part, latest_part in zip(current_parts, latest_parts):
|
||||
if latest_part > current_part:
|
||||
return True
|
||||
elif latest_part < current_part:
|
||||
return False
|
||||
|
||||
return False
|
||||
except (ValueError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
async def check_for_updates(cls, current_version: str) -> Optional[str]:
|
||||
"""
|
||||
Check if there's a newer version available on GitHub.
|
||||
|
||||
Args:
|
||||
current_version: The current version string (e.g., "1.1.0")
|
||||
|
||||
Returns:
|
||||
The latest version string if an update is available, None otherwise
|
||||
"""
|
||||
if not cls._is_valid_version(current_version):
|
||||
return None
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
latest_version = await loop.run_in_executor(None, cls._fetch_latest_version)
|
||||
|
||||
if latest_version and cls._compare_versions(current_version, latest_version):
|
||||
return latest_version
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _get_cached_update_info(cls, current_version: str) -> Optional[str]:
|
||||
"""
|
||||
Check if there's a cached update available for the current version.
|
||||
|
||||
Args:
|
||||
current_version: The current version string
|
||||
|
||||
Returns:
|
||||
The latest version string if an update is available from cache, None otherwise
|
||||
"""
|
||||
cache_data = cls._load_cache_data()
|
||||
|
||||
if not cache_data:
|
||||
return None
|
||||
|
||||
cached_current = cache_data.get("current_version")
|
||||
cached_latest = cache_data.get("latest_version")
|
||||
|
||||
if cached_current == current_version and cached_latest:
|
||||
if cls._compare_versions(current_version, cached_latest):
|
||||
return cached_latest
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def check_for_updates_sync(cls, current_version: str, check_interval: Optional[int] = None) -> Optional[str]:
|
||||
"""
|
||||
Synchronous version of update check with rate limiting.
|
||||
|
||||
Args:
|
||||
current_version: The current version string (e.g., "1.1.0")
|
||||
check_interval: Time in seconds between checks (default: from config)
|
||||
|
||||
Returns:
|
||||
The latest version string if an update is available, None otherwise
|
||||
"""
|
||||
if not cls._is_valid_version(current_version):
|
||||
return None
|
||||
|
||||
if check_interval is None:
|
||||
from unshackle.core.config import config
|
||||
|
||||
check_interval = config.update_check_interval * 60 * 60
|
||||
|
||||
if not cls._should_check_for_updates(check_interval):
|
||||
return cls._get_cached_update_info(current_version)
|
||||
|
||||
latest_version = cls._fetch_latest_version()
|
||||
cls._update_cache(latest_version, current_version)
|
||||
if latest_version and cls._compare_versions(current_version, latest_version):
|
||||
return latest_version
|
||||
|
||||
return None
|
||||
@@ -1,18 +1,22 @@
|
||||
import ast
|
||||
import contextlib
|
||||
import importlib.util
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import unicodedata
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import Optional, Sequence, Union
|
||||
from typing import Any, Optional, Sequence, Union
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
import chardet
|
||||
import requests
|
||||
@@ -21,8 +25,9 @@ from langcodes import Language, closest_match
|
||||
from pymp4.parser import Box
|
||||
from unidecode import unidecode
|
||||
|
||||
from unshackle.core.cacher import Cacher
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.constants import LANGUAGE_MAX_DISTANCE
|
||||
from unshackle.core.constants import LANGUAGE_EXACT_DISTANCE, LANGUAGE_MAX_DISTANCE
|
||||
|
||||
|
||||
def rotate_log_file(log_path: Path, keep: int = 20) -> Path:
|
||||
@@ -112,7 +117,15 @@ def is_close_match(language: Union[str, Language], languages: Sequence[Union[str
|
||||
return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_MAX_DISTANCE
|
||||
|
||||
|
||||
def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box:
|
||||
def is_exact_match(language: Union[str, Language], languages: Sequence[Union[str, Language, None]]) -> bool:
|
||||
"""Check if a language is an exact match to any of the provided languages."""
|
||||
languages = [x for x in languages if x]
|
||||
if not languages:
|
||||
return False
|
||||
return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_EXACT_DISTANCE
|
||||
|
||||
|
||||
def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box: # type: ignore
|
||||
"""
|
||||
Scan a byte array for a wanted MP4/ISOBMFF box, then parse and yield each find.
|
||||
|
||||
@@ -242,6 +255,101 @@ def get_ip_info(session: Optional[requests.Session] = None) -> dict:
|
||||
return (session or requests.Session()).get("https://ipinfo.io/json").json()
|
||||
|
||||
|
||||
def get_cached_ip_info(session: Optional[requests.Session] = None) -> Optional[dict]:
|
||||
"""
|
||||
Get IP location information with 24-hour caching and fallback providers.
|
||||
|
||||
This function uses a global cache to avoid repeated API calls when the IP
|
||||
hasn't changed. Should only be used for local IP checks, not for proxy verification.
|
||||
Implements smart provider rotation to handle rate limiting (429 errors).
|
||||
|
||||
Args:
|
||||
session: Optional requests session (usually without proxy for local IP)
|
||||
|
||||
Returns:
|
||||
Dict with IP info including 'country' key, or None if all providers fail
|
||||
"""
|
||||
|
||||
log = logging.getLogger("get_cached_ip_info")
|
||||
cache = Cacher("global").get("ip_info")
|
||||
|
||||
if cache and not cache.expired:
|
||||
return cache.data
|
||||
|
||||
provider_state_cache = Cacher("global").get("ip_provider_state")
|
||||
provider_state = provider_state_cache.data if provider_state_cache and not provider_state_cache.expired else {}
|
||||
|
||||
providers = {
|
||||
"ipinfo": "https://ipinfo.io/json",
|
||||
"ipapi": "https://ipapi.co/json",
|
||||
}
|
||||
|
||||
session = session or requests.Session()
|
||||
provider_order = ["ipinfo", "ipapi"]
|
||||
|
||||
current_time = time.time()
|
||||
for provider_name in list(provider_order):
|
||||
if provider_name in provider_state:
|
||||
rate_limit_info = provider_state[provider_name]
|
||||
if (current_time - rate_limit_info.get("rate_limited_at", 0)) < 300:
|
||||
log.debug(f"Provider {provider_name} was rate limited recently, trying other provider first")
|
||||
provider_order.remove(provider_name)
|
||||
provider_order.append(provider_name)
|
||||
break
|
||||
|
||||
for provider_name in provider_order:
|
||||
provider_url = providers[provider_name]
|
||||
try:
|
||||
log.debug(f"Trying IP provider: {provider_name}")
|
||||
response = session.get(provider_url, timeout=10)
|
||||
|
||||
if response.status_code == 429:
|
||||
log.warning(f"Provider {provider_name} returned 429 (rate limited), trying next provider")
|
||||
if provider_name not in provider_state:
|
||||
provider_state[provider_name] = {}
|
||||
provider_state[provider_name]["rate_limited_at"] = current_time
|
||||
provider_state[provider_name]["rate_limit_count"] = (
|
||||
provider_state[provider_name].get("rate_limit_count", 0) + 1
|
||||
)
|
||||
|
||||
provider_state_cache.set(provider_state, expiration=300)
|
||||
continue
|
||||
|
||||
elif response.status_code == 200:
|
||||
data = response.json()
|
||||
normalized_data = {}
|
||||
|
||||
if "country" in data:
|
||||
normalized_data = data
|
||||
elif "country_code" in data:
|
||||
normalized_data = {
|
||||
"country": data.get("country_code", "").lower(),
|
||||
"region": data.get("region", ""),
|
||||
"city": data.get("city", ""),
|
||||
"ip": data.get("ip", ""),
|
||||
}
|
||||
|
||||
if normalized_data and "country" in normalized_data:
|
||||
log.debug(f"Successfully got IP info from provider: {provider_name}")
|
||||
|
||||
if provider_name in provider_state:
|
||||
provider_state[provider_name].pop("rate_limited_at", None)
|
||||
provider_state_cache.set(provider_state, expiration=300)
|
||||
|
||||
normalized_data["_provider"] = provider_name
|
||||
cache.set(normalized_data, expiration=86400)
|
||||
return normalized_data
|
||||
else:
|
||||
log.debug(f"Provider {provider_name} returned status {response.status_code}")
|
||||
|
||||
except Exception as e:
|
||||
log.debug(f"Provider {provider_name} failed with exception: {e}")
|
||||
continue
|
||||
|
||||
log.warning("All IP geolocation providers failed")
|
||||
return None
|
||||
|
||||
|
||||
def time_elapsed_since(start: float) -> str:
|
||||
"""
|
||||
Get time elapsed since a timestamp as a string.
|
||||
@@ -352,3 +460,334 @@ class FPS(ast.NodeVisitor):
|
||||
@classmethod
|
||||
def parse(cls, expr: str) -> float:
|
||||
return cls().visit(ast.parse(expr).body[0])
|
||||
|
||||
|
||||
"""
|
||||
Structured JSON debug logging for unshackle.
|
||||
|
||||
Provides comprehensive debugging information for service developers and troubleshooting.
|
||||
When enabled, logs all operations, requests, responses, DRM operations, and errors in JSON format.
|
||||
"""
|
||||
|
||||
|
||||
class DebugLogger:
|
||||
"""
|
||||
Structured JSON debug logger for unshackle.
|
||||
|
||||
Outputs JSON Lines format where each line is a complete JSON object.
|
||||
This makes it easy to parse, filter, and analyze logs programmatically.
|
||||
"""
|
||||
|
||||
def __init__(self, log_path: Optional[Path] = None, enabled: bool = False, log_keys: bool = False):
|
||||
"""
|
||||
Initialize the debug logger.
|
||||
|
||||
Args:
|
||||
log_path: Path to the log file. If None, logging is disabled.
|
||||
enabled: Whether debug logging is enabled.
|
||||
log_keys: Whether to log decryption keys (for debugging key issues).
|
||||
"""
|
||||
self.enabled = enabled and log_path is not None
|
||||
self.log_path = log_path
|
||||
self.session_id = str(uuid4())[:8]
|
||||
self.file_handle = None
|
||||
self.log_keys = log_keys
|
||||
|
||||
if self.enabled:
|
||||
self.log_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.file_handle = open(self.log_path, "a", encoding="utf-8")
|
||||
self._log_session_start()
|
||||
|
||||
def _log_session_start(self):
|
||||
"""Log the start of a new session with environment information."""
|
||||
import platform
|
||||
|
||||
from unshackle.core import __version__
|
||||
|
||||
self.log(
|
||||
level="INFO",
|
||||
operation="session_start",
|
||||
message="Debug logging session started",
|
||||
context={
|
||||
"unshackle_version": __version__,
|
||||
"python_version": sys.version,
|
||||
"platform": platform.platform(),
|
||||
"platform_system": platform.system(),
|
||||
"platform_release": platform.release(),
|
||||
},
|
||||
)
|
||||
|
||||
def log(
|
||||
self,
|
||||
level: str = "DEBUG",
|
||||
operation: str = "",
|
||||
message: str = "",
|
||||
context: Optional[dict[str, Any]] = None,
|
||||
service: Optional[str] = None,
|
||||
error: Optional[Exception] = None,
|
||||
request: Optional[dict[str, Any]] = None,
|
||||
response: Optional[dict[str, Any]] = None,
|
||||
duration_ms: Optional[float] = None,
|
||||
success: Optional[bool] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Log a structured JSON entry.
|
||||
|
||||
Args:
|
||||
level: Log level (DEBUG, INFO, WARNING, ERROR)
|
||||
operation: Name of the operation being performed
|
||||
message: Human-readable message
|
||||
context: Additional context information
|
||||
service: Service name (e.g., DSNP, NF)
|
||||
error: Exception object if an error occurred
|
||||
request: Request details (URL, method, headers, body)
|
||||
response: Response details (status, headers, body)
|
||||
duration_ms: Operation duration in milliseconds
|
||||
success: Whether the operation succeeded
|
||||
**kwargs: Additional fields to include in the log entry
|
||||
"""
|
||||
if not self.enabled or not self.file_handle:
|
||||
return
|
||||
|
||||
entry = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"session_id": self.session_id,
|
||||
"level": level,
|
||||
}
|
||||
|
||||
if operation:
|
||||
entry["operation"] = operation
|
||||
if message:
|
||||
entry["message"] = message
|
||||
if service:
|
||||
entry["service"] = service
|
||||
if context:
|
||||
entry["context"] = self._sanitize_data(context)
|
||||
if request:
|
||||
entry["request"] = self._sanitize_data(request)
|
||||
if response:
|
||||
entry["response"] = self._sanitize_data(response)
|
||||
if duration_ms is not None:
|
||||
entry["duration_ms"] = duration_ms
|
||||
if success is not None:
|
||||
entry["success"] = success
|
||||
|
||||
if error:
|
||||
entry["error"] = {
|
||||
"type": type(error).__name__,
|
||||
"message": str(error),
|
||||
"traceback": traceback.format_exception(type(error), error, error.__traceback__),
|
||||
}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
if key not in entry:
|
||||
entry[key] = self._sanitize_data(value)
|
||||
|
||||
try:
|
||||
self.file_handle.write(json.dumps(entry, default=str) + "\n")
|
||||
self.file_handle.flush()
|
||||
except Exception as e:
|
||||
print(f"Failed to write debug log: {e}", file=sys.stderr)
|
||||
|
||||
def _sanitize_data(self, data: Any) -> Any:
|
||||
"""
|
||||
Sanitize data for JSON serialization.
|
||||
Handles complex objects and removes sensitive information.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if isinstance(data, (str, int, float, bool)):
|
||||
return data
|
||||
|
||||
if isinstance(data, (list, tuple)):
|
||||
return [self._sanitize_data(item) for item in data]
|
||||
|
||||
if isinstance(data, dict):
|
||||
sanitized = {}
|
||||
for key, value in data.items():
|
||||
key_lower = str(key).lower()
|
||||
has_prefix = key_lower.startswith("has_")
|
||||
|
||||
is_always_sensitive = not has_prefix and any(
|
||||
sensitive in key_lower for sensitive in ["password", "token", "secret", "auth", "cookie"]
|
||||
)
|
||||
|
||||
is_key_field = (
|
||||
"key" in key_lower
|
||||
and not has_prefix
|
||||
and not any(safe in key_lower for safe in ["_count", "_id", "_type", "kid", "keys_", "key_found"])
|
||||
)
|
||||
|
||||
should_redact = is_always_sensitive or (is_key_field and not self.log_keys)
|
||||
|
||||
if should_redact:
|
||||
sanitized[key] = "[REDACTED]"
|
||||
else:
|
||||
sanitized[key] = self._sanitize_data(value)
|
||||
return sanitized
|
||||
|
||||
if isinstance(data, bytes):
|
||||
try:
|
||||
return data.hex()
|
||||
except Exception:
|
||||
return "[BINARY_DATA]"
|
||||
|
||||
if isinstance(data, Path):
|
||||
return str(data)
|
||||
|
||||
try:
|
||||
return str(data)
|
||||
except Exception:
|
||||
return f"[{type(data).__name__}]"
|
||||
|
||||
def log_operation_start(self, operation: str, **kwargs) -> str:
|
||||
"""
|
||||
Log the start of an operation and return an operation ID.
|
||||
|
||||
Args:
|
||||
operation: Name of the operation
|
||||
**kwargs: Additional context
|
||||
|
||||
Returns:
|
||||
Operation ID that can be used to log the end of the operation
|
||||
"""
|
||||
op_id = str(uuid4())[:8]
|
||||
self.log(
|
||||
level="DEBUG",
|
||||
operation=f"{operation}_start",
|
||||
message=f"Starting operation: {operation}",
|
||||
operation_id=op_id,
|
||||
**kwargs,
|
||||
)
|
||||
return op_id
|
||||
|
||||
def log_operation_end(
|
||||
self, operation: str, operation_id: str, success: bool = True, duration_ms: Optional[float] = None, **kwargs
|
||||
):
|
||||
"""
|
||||
Log the end of an operation.
|
||||
|
||||
Args:
|
||||
operation: Name of the operation
|
||||
operation_id: Operation ID from log_operation_start
|
||||
success: Whether the operation succeeded
|
||||
duration_ms: Operation duration in milliseconds
|
||||
**kwargs: Additional context
|
||||
"""
|
||||
self.log(
|
||||
level="INFO" if success else "ERROR",
|
||||
operation=f"{operation}_end",
|
||||
message=f"Finished operation: {operation}",
|
||||
operation_id=operation_id,
|
||||
success=success,
|
||||
duration_ms=duration_ms,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def log_service_call(self, method: str, url: str, **kwargs):
|
||||
"""
|
||||
Log a service API call.
|
||||
|
||||
Args:
|
||||
method: HTTP method (GET, POST, etc.)
|
||||
url: Request URL
|
||||
**kwargs: Additional request details (headers, body, etc.)
|
||||
"""
|
||||
self.log(level="DEBUG", operation="service_call", request={"method": method, "url": url, **kwargs})
|
||||
|
||||
def log_drm_operation(self, drm_type: str, operation: str, **kwargs):
|
||||
"""
|
||||
Log a DRM operation (PSSH extraction, license request, key retrieval).
|
||||
|
||||
Args:
|
||||
drm_type: DRM type (Widevine, PlayReady, etc.)
|
||||
operation: DRM operation name
|
||||
**kwargs: Additional context (PSSH, KIDs, keys, etc.)
|
||||
"""
|
||||
self.log(
|
||||
level="DEBUG", operation=f"drm_{operation}", message=f"{drm_type} {operation}", drm_type=drm_type, **kwargs
|
||||
)
|
||||
|
||||
def log_vault_query(self, vault_name: str, operation: str, **kwargs):
|
||||
"""
|
||||
Log a vault query operation.
|
||||
|
||||
Args:
|
||||
vault_name: Name of the vault
|
||||
operation: Vault operation (get_key, add_key, etc.)
|
||||
**kwargs: Additional context (KID, key, success, etc.)
|
||||
"""
|
||||
self.log(
|
||||
level="DEBUG",
|
||||
operation=f"vault_{operation}",
|
||||
message=f"Vault {vault_name}: {operation}",
|
||||
vault=vault_name,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def log_error(self, operation: str, error: Exception, **kwargs):
|
||||
"""
|
||||
Log an error with full context.
|
||||
|
||||
Args:
|
||||
operation: Operation that failed
|
||||
error: Exception that occurred
|
||||
**kwargs: Additional context
|
||||
"""
|
||||
self.log(
|
||||
level="ERROR",
|
||||
operation=operation,
|
||||
message=f"Error in {operation}: {str(error)}",
|
||||
error=error,
|
||||
success=False,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def close(self):
|
||||
"""Close the log file and clean up resources."""
|
||||
if self.file_handle:
|
||||
self.log(level="INFO", operation="session_end", message="Debug logging session ended")
|
||||
self.file_handle.close()
|
||||
self.file_handle = None
|
||||
|
||||
|
||||
# Global debug logger instance
|
||||
_debug_logger: Optional[DebugLogger] = None
|
||||
|
||||
|
||||
def get_debug_logger() -> Optional[DebugLogger]:
|
||||
"""Get the global debug logger instance."""
|
||||
return _debug_logger
|
||||
|
||||
|
||||
def init_debug_logger(log_path: Optional[Path] = None, enabled: bool = False, log_keys: bool = False):
|
||||
"""
|
||||
Initialize the global debug logger.
|
||||
|
||||
Args:
|
||||
log_path: Path to the log file
|
||||
enabled: Whether debug logging is enabled
|
||||
log_keys: Whether to log decryption keys (for debugging key issues)
|
||||
"""
|
||||
global _debug_logger
|
||||
if _debug_logger:
|
||||
_debug_logger.close()
|
||||
_debug_logger = DebugLogger(log_path=log_path, enabled=enabled, log_keys=log_keys)
|
||||
|
||||
|
||||
def close_debug_logger():
|
||||
"""Close the global debug logger."""
|
||||
global _debug_logger
|
||||
if _debug_logger:
|
||||
_debug_logger.close()
|
||||
_debug_logger = None
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DebugLogger",
|
||||
"get_debug_logger",
|
||||
"init_debug_logger",
|
||||
"close_debug_logger",
|
||||
)
|
||||
|
||||
@@ -3,15 +3,17 @@ from __future__ import annotations
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from difflib import SequenceMatcher
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple
|
||||
from xml.sax.saxutils import escape
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
|
||||
from unshackle.core import binaries
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.titles.episode import Episode
|
||||
from unshackle.core.titles.movie import Movie
|
||||
@@ -25,10 +27,30 @@ HEADERS = {"User-Agent": "unshackle-tags/1.0"}
|
||||
log = logging.getLogger("TAGS")
|
||||
|
||||
|
||||
def _get_session() -> requests.Session:
|
||||
"""Create a requests session with retry logic for network failures."""
|
||||
session = requests.Session()
|
||||
session.headers.update(HEADERS)
|
||||
|
||||
retry = Retry(
|
||||
total=3, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504], allowed_methods=["GET", "POST"]
|
||||
)
|
||||
|
||||
adapter = HTTPAdapter(max_retries=retry)
|
||||
session.mount("https://", adapter)
|
||||
session.mount("http://", adapter)
|
||||
|
||||
return session
|
||||
|
||||
|
||||
def _api_key() -> Optional[str]:
|
||||
return config.tmdb_api_key or os.getenv("TMDB_API_KEY")
|
||||
|
||||
|
||||
def _simkl_client_id() -> Optional[str]:
|
||||
return config.simkl_client_id or os.getenv("SIMKL_CLIENT_ID")
|
||||
|
||||
|
||||
def _clean(s: str) -> str:
|
||||
return STRIP_RE.sub("", s).lower()
|
||||
|
||||
@@ -44,6 +66,96 @@ def fuzzy_match(a: str, b: str, threshold: float = 0.8) -> bool:
|
||||
return ratio >= threshold
|
||||
|
||||
|
||||
def search_simkl(title: str, year: Optional[int], kind: str) -> Tuple[Optional[dict], Optional[str], Optional[int]]:
|
||||
"""Search Simkl API for show information by filename."""
|
||||
log.debug("Searching Simkl for %r (%s, %s)", title, kind, year)
|
||||
|
||||
client_id = _simkl_client_id()
|
||||
if not client_id:
|
||||
log.debug("No SIMKL client ID configured; skipping SIMKL search")
|
||||
return None, None, None
|
||||
|
||||
# Construct appropriate filename based on type
|
||||
filename = f"{title}"
|
||||
if year:
|
||||
filename = f"{title} {year}"
|
||||
|
||||
if kind == "tv":
|
||||
filename += " S01E01.mkv"
|
||||
else: # movie
|
||||
filename += " 2160p.mkv"
|
||||
|
||||
try:
|
||||
session = _get_session()
|
||||
headers = {"simkl-api-key": client_id}
|
||||
resp = session.post("https://api.simkl.com/search/file", json={"file": filename}, headers=headers, timeout=30)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
log.debug("Simkl API response received")
|
||||
|
||||
# Handle case where SIMKL returns empty list (no results)
|
||||
if isinstance(data, list):
|
||||
log.debug("Simkl returned list (no matches) for %r", filename)
|
||||
return None, None, None
|
||||
|
||||
# Handle TV show responses
|
||||
if data.get("type") == "episode" and "show" in data:
|
||||
show_info = data["show"]
|
||||
show_title = show_info.get("title")
|
||||
show_year = show_info.get("year")
|
||||
|
||||
# Verify title matches and year if provided
|
||||
if not fuzzy_match(show_title, title):
|
||||
log.debug("Simkl title mismatch: searched %r, got %r", title, show_title)
|
||||
return None, None, None
|
||||
if year and show_year and abs(year - show_year) > 1: # Allow 1 year difference
|
||||
log.debug("Simkl year mismatch: searched %d, got %d", year, show_year)
|
||||
return None, None, None
|
||||
|
||||
tmdb_id = show_info.get("ids", {}).get("tmdbtv")
|
||||
if tmdb_id:
|
||||
tmdb_id = int(tmdb_id)
|
||||
log.debug("Simkl -> %s (TMDB ID %s)", show_title, tmdb_id)
|
||||
return data, show_title, tmdb_id
|
||||
|
||||
# Handle movie responses
|
||||
elif data.get("type") == "movie" and "movie" in data:
|
||||
movie_info = data["movie"]
|
||||
movie_title = movie_info.get("title")
|
||||
movie_year = movie_info.get("year")
|
||||
|
||||
# Verify title matches and year if provided
|
||||
if not fuzzy_match(movie_title, title):
|
||||
log.debug("Simkl title mismatch: searched %r, got %r", title, movie_title)
|
||||
return None, None, None
|
||||
if year and movie_year and abs(year - movie_year) > 1: # Allow 1 year difference
|
||||
log.debug("Simkl year mismatch: searched %d, got %d", year, movie_year)
|
||||
return None, None, None
|
||||
|
||||
ids = movie_info.get("ids", {})
|
||||
tmdb_id = ids.get("tmdb") or ids.get("moviedb")
|
||||
if tmdb_id:
|
||||
tmdb_id = int(tmdb_id)
|
||||
log.debug("Simkl -> %s (TMDB ID %s)", movie_title, tmdb_id)
|
||||
return data, movie_title, tmdb_id
|
||||
|
||||
except (requests.RequestException, ValueError, KeyError) as exc:
|
||||
log.debug("Simkl search failed: %s", exc)
|
||||
|
||||
return None, None, None
|
||||
|
||||
|
||||
def search_show_info(title: str, year: Optional[int], kind: str) -> Tuple[Optional[int], Optional[str], Optional[str]]:
|
||||
"""Search for show information, trying Simkl first, then TMDB fallback. Returns (tmdb_id, title, source)."""
|
||||
simkl_data, simkl_title, simkl_tmdb_id = search_simkl(title, year, kind)
|
||||
|
||||
if simkl_data and simkl_title and fuzzy_match(simkl_title, title):
|
||||
return simkl_tmdb_id, simkl_title, "simkl"
|
||||
|
||||
tmdb_id, tmdb_title = search_tmdb(title, year, kind)
|
||||
return tmdb_id, tmdb_title, "tmdb"
|
||||
|
||||
|
||||
def search_tmdb(title: str, year: Optional[int], kind: str) -> Tuple[Optional[int], Optional[str]]:
|
||||
api_key = _api_key()
|
||||
if not api_key:
|
||||
@@ -56,17 +168,21 @@ def search_tmdb(title: str, year: Optional[int], kind: str) -> Tuple[Optional[in
|
||||
if year is not None:
|
||||
params["year" if kind == "movie" else "first_air_date_year"] = year
|
||||
|
||||
r = requests.get(
|
||||
f"https://api.themoviedb.org/3/search/{kind}",
|
||||
params=params,
|
||||
headers=HEADERS,
|
||||
timeout=30,
|
||||
)
|
||||
r.raise_for_status()
|
||||
js = r.json()
|
||||
results = js.get("results") or []
|
||||
log.debug("TMDB returned %d results", len(results))
|
||||
if not results:
|
||||
try:
|
||||
session = _get_session()
|
||||
r = session.get(
|
||||
f"https://api.themoviedb.org/3/search/{kind}",
|
||||
params=params,
|
||||
timeout=30,
|
||||
)
|
||||
r.raise_for_status()
|
||||
js = r.json()
|
||||
results = js.get("results") or []
|
||||
log.debug("TMDB returned %d results", len(results))
|
||||
if not results:
|
||||
return None, None
|
||||
except requests.RequestException as exc:
|
||||
log.warning("Failed to search TMDB for %s: %s", title, exc)
|
||||
return None, None
|
||||
|
||||
best_ratio = 0.0
|
||||
@@ -113,10 +229,10 @@ def get_title(tmdb_id: int, kind: str) -> Optional[str]:
|
||||
return None
|
||||
|
||||
try:
|
||||
r = requests.get(
|
||||
session = _get_session()
|
||||
r = session.get(
|
||||
f"https://api.themoviedb.org/3/{kind}/{tmdb_id}",
|
||||
params={"api_key": api_key},
|
||||
headers=HEADERS,
|
||||
timeout=30,
|
||||
)
|
||||
r.raise_for_status()
|
||||
@@ -136,10 +252,10 @@ def get_year(tmdb_id: int, kind: str) -> Optional[int]:
|
||||
return None
|
||||
|
||||
try:
|
||||
r = requests.get(
|
||||
session = _get_session()
|
||||
r = session.get(
|
||||
f"https://api.themoviedb.org/3/{kind}/{tmdb_id}",
|
||||
params={"api_key": api_key},
|
||||
headers=HEADERS,
|
||||
timeout=30,
|
||||
)
|
||||
r.raise_for_status()
|
||||
@@ -160,36 +276,40 @@ def external_ids(tmdb_id: int, kind: str) -> dict:
|
||||
return {}
|
||||
url = f"https://api.themoviedb.org/3/{kind}/{tmdb_id}/external_ids"
|
||||
log.debug("Fetching external IDs for %s %s", kind, tmdb_id)
|
||||
r = requests.get(
|
||||
url,
|
||||
params={"api_key": api_key},
|
||||
headers=HEADERS,
|
||||
timeout=30,
|
||||
)
|
||||
r.raise_for_status()
|
||||
js = r.json()
|
||||
log.debug("External IDs response: %s", js)
|
||||
return js
|
||||
|
||||
try:
|
||||
session = _get_session()
|
||||
r = session.get(
|
||||
url,
|
||||
params={"api_key": api_key},
|
||||
timeout=30,
|
||||
)
|
||||
r.raise_for_status()
|
||||
js = r.json()
|
||||
log.debug("External IDs response: %s", js)
|
||||
return js
|
||||
except requests.RequestException as exc:
|
||||
log.warning("Failed to fetch external IDs for %s %s: %s", kind, tmdb_id, exc)
|
||||
return {}
|
||||
|
||||
|
||||
def _apply_tags(path: Path, tags: dict[str, str]) -> None:
|
||||
if not tags:
|
||||
return
|
||||
mkvpropedit = shutil.which("mkvpropedit")
|
||||
if not mkvpropedit:
|
||||
if not binaries.Mkvpropedit:
|
||||
log.debug("mkvpropedit not found on PATH; skipping tags")
|
||||
return
|
||||
log.debug("Applying tags to %s: %s", path, tags)
|
||||
xml_lines = ["<?xml version='1.0' encoding='UTF-8'?>", "<Tags>", " <Tag>", " <Targets/>"]
|
||||
xml_lines = ['<?xml version="1.0" encoding="UTF-8"?>', "<Tags>", " <Tag>", " <Targets/>"]
|
||||
for name, value in tags.items():
|
||||
xml_lines.append(f" <Simple><Name>{name}</Name><String>{value}</String></Simple>")
|
||||
xml_lines.append(f" <Simple><Name>{escape(name)}</Name><String>{escape(value)}</String></Simple>")
|
||||
xml_lines.extend([" </Tag>", "</Tags>"])
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".xml", delete=False) as f:
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".xml", delete=False, encoding="utf-8") as f:
|
||||
f.write("\n".join(xml_lines))
|
||||
tmp_path = Path(f.name)
|
||||
try:
|
||||
subprocess.run(
|
||||
[mkvpropedit, str(path), "--tags", f"global:{tmp_path}"],
|
||||
[str(binaries.Mkvpropedit), str(path), "--tags", f"global:{tmp_path}"],
|
||||
check=False,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
@@ -203,10 +323,8 @@ def tag_file(path: Path, title: Title, tmdb_id: Optional[int] | None = None) ->
|
||||
log.debug("Tagging file %s with title %r", path, title)
|
||||
standard_tags: dict[str, str] = {}
|
||||
custom_tags: dict[str, str] = {}
|
||||
# To add custom information to the tags
|
||||
# custom_tags["Text to the left side"] = "Text to the right side"
|
||||
|
||||
if config.tag:
|
||||
if config.tag and config.tag_group_name:
|
||||
custom_tags["Group"] = config.tag
|
||||
description = getattr(title, "description", None)
|
||||
if description:
|
||||
@@ -217,12 +335,6 @@ def tag_file(path: Path, title: Title, tmdb_id: Optional[int] | None = None) ->
|
||||
description = truncated + "..."
|
||||
custom_tags["Description"] = description
|
||||
|
||||
api_key = _api_key()
|
||||
if not api_key:
|
||||
log.debug("No TMDB API key set; applying basic tags only")
|
||||
_apply_tags(path, custom_tags)
|
||||
return
|
||||
|
||||
if isinstance(title, Movie):
|
||||
kind = "movie"
|
||||
name = title.name
|
||||
@@ -235,32 +347,98 @@ def tag_file(path: Path, title: Title, tmdb_id: Optional[int] | None = None) ->
|
||||
_apply_tags(path, custom_tags)
|
||||
return
|
||||
|
||||
tmdb_title: Optional[str] = None
|
||||
if tmdb_id is None:
|
||||
tmdb_id, tmdb_title = search_tmdb(name, year, kind)
|
||||
log.debug("Search result: %r (ID %s)", tmdb_title, tmdb_id)
|
||||
if not tmdb_id or not tmdb_title or not fuzzy_match(tmdb_title, name):
|
||||
log.debug("TMDB search did not match; skipping external ID lookup")
|
||||
_apply_tags(path, custom_tags)
|
||||
return
|
||||
if config.tag_imdb_tmdb:
|
||||
# Check if we have any API keys available for metadata lookup
|
||||
api_key = _api_key()
|
||||
simkl_client = _simkl_client_id()
|
||||
|
||||
tmdb_url = f"https://www.themoviedb.org/{'movie' if kind == 'movie' else 'tv'}/{tmdb_id}"
|
||||
standard_tags["TMDB"] = tmdb_url
|
||||
try:
|
||||
ids = external_ids(tmdb_id, kind)
|
||||
except requests.RequestException as exc:
|
||||
log.debug("Failed to fetch external IDs: %s", exc)
|
||||
ids = {}
|
||||
else:
|
||||
log.debug("External IDs found: %s", ids)
|
||||
if not api_key and not simkl_client:
|
||||
log.debug("No TMDB API key or Simkl client ID configured; skipping IMDB/TMDB tag lookup")
|
||||
else:
|
||||
# If tmdb_id is provided (via --tmdb), skip Simkl and use TMDB directly
|
||||
if tmdb_id is not None:
|
||||
log.debug("Using provided TMDB ID %s for tags", tmdb_id)
|
||||
else:
|
||||
# Try Simkl first for automatic lookup (only if client ID is available)
|
||||
if simkl_client:
|
||||
simkl_data, simkl_title, simkl_tmdb_id = search_simkl(name, year, kind)
|
||||
|
||||
imdb_id = ids.get("imdb_id")
|
||||
if imdb_id:
|
||||
standard_tags["IMDB"] = f"https://www.imdb.com/title/{imdb_id}"
|
||||
tvdb_id = ids.get("tvdb_id")
|
||||
if tvdb_id:
|
||||
tvdb_prefix = "movies" if kind == "movie" else "series"
|
||||
standard_tags["TVDB"] = f"https://thetvdb.com/dereferrer/{tvdb_prefix}/{tvdb_id}"
|
||||
if simkl_data and simkl_title and fuzzy_match(simkl_title, name):
|
||||
log.debug("Using Simkl data for tags")
|
||||
if simkl_tmdb_id:
|
||||
tmdb_id = simkl_tmdb_id
|
||||
|
||||
# Handle TV show data from Simkl
|
||||
if simkl_data.get("type") == "episode" and "show" in simkl_data:
|
||||
show_ids = simkl_data.get("show", {}).get("ids", {})
|
||||
if show_ids.get("imdb"):
|
||||
standard_tags["IMDB"] = show_ids["imdb"]
|
||||
if show_ids.get("tvdb"):
|
||||
standard_tags["TVDB2"] = f"series/{show_ids['tvdb']}"
|
||||
if show_ids.get("tmdbtv"):
|
||||
standard_tags["TMDB"] = f"tv/{show_ids['tmdbtv']}"
|
||||
|
||||
# Handle movie data from Simkl
|
||||
elif simkl_data.get("type") == "movie" and "movie" in simkl_data:
|
||||
movie_ids = simkl_data.get("movie", {}).get("ids", {})
|
||||
if movie_ids.get("imdb"):
|
||||
standard_tags["IMDB"] = movie_ids["imdb"]
|
||||
if movie_ids.get("tvdb"):
|
||||
standard_tags["TVDB2"] = f"movies/{movie_ids['tvdb']}"
|
||||
if movie_ids.get("tmdb"):
|
||||
standard_tags["TMDB"] = f"movie/{movie_ids['tmdb']}"
|
||||
|
||||
# Use TMDB API for additional metadata (either from provided ID or Simkl lookup)
|
||||
if api_key:
|
||||
tmdb_title: Optional[str] = None
|
||||
if tmdb_id is None:
|
||||
tmdb_id, tmdb_title = search_tmdb(name, year, kind)
|
||||
log.debug("TMDB search result: %r (ID %s)", tmdb_title, tmdb_id)
|
||||
if not tmdb_id or not tmdb_title or not fuzzy_match(tmdb_title, name):
|
||||
log.debug("TMDB search did not match; skipping external ID lookup")
|
||||
else:
|
||||
prefix = "movie" if kind == "movie" else "tv"
|
||||
standard_tags["TMDB"] = f"{prefix}/{tmdb_id}"
|
||||
try:
|
||||
ids = external_ids(tmdb_id, kind)
|
||||
except requests.RequestException as exc:
|
||||
log.debug("Failed to fetch external IDs: %s", exc)
|
||||
ids = {}
|
||||
else:
|
||||
log.debug("External IDs found: %s", ids)
|
||||
|
||||
imdb_id = ids.get("imdb_id")
|
||||
if imdb_id:
|
||||
standard_tags["IMDB"] = imdb_id
|
||||
tvdb_id = ids.get("tvdb_id")
|
||||
if tvdb_id:
|
||||
if kind == "movie":
|
||||
standard_tags["TVDB2"] = f"movies/{tvdb_id}"
|
||||
else:
|
||||
standard_tags["TVDB2"] = f"series/{tvdb_id}"
|
||||
elif tmdb_id is not None:
|
||||
# tmdb_id was provided or found via Simkl
|
||||
prefix = "movie" if kind == "movie" else "tv"
|
||||
standard_tags["TMDB"] = f"{prefix}/{tmdb_id}"
|
||||
try:
|
||||
ids = external_ids(tmdb_id, kind)
|
||||
except requests.RequestException as exc:
|
||||
log.debug("Failed to fetch external IDs: %s", exc)
|
||||
ids = {}
|
||||
else:
|
||||
log.debug("External IDs found: %s", ids)
|
||||
|
||||
imdb_id = ids.get("imdb_id")
|
||||
if imdb_id:
|
||||
standard_tags["IMDB"] = imdb_id
|
||||
tvdb_id = ids.get("tvdb_id")
|
||||
if tvdb_id:
|
||||
if kind == "movie":
|
||||
standard_tags["TVDB2"] = f"movies/{tvdb_id}"
|
||||
else:
|
||||
standard_tags["TVDB2"] = f"series/{tvdb_id}"
|
||||
else:
|
||||
log.debug("No TMDB API key configured; skipping TMDB external ID lookup")
|
||||
|
||||
merged_tags = {
|
||||
**custom_tags,
|
||||
@@ -270,6 +448,8 @@ def tag_file(path: Path, title: Title, tmdb_id: Optional[int] | None = None) ->
|
||||
|
||||
|
||||
__all__ = [
|
||||
"search_simkl",
|
||||
"search_show_info",
|
||||
"search_tmdb",
|
||||
"get_title",
|
||||
"get_year",
|
||||
|
||||
@@ -4,8 +4,9 @@ from uuid import UUID
|
||||
|
||||
|
||||
class Vault(metaclass=ABCMeta):
|
||||
def __init__(self, name: str):
|
||||
def __init__(self, name: str, no_push: bool = False):
|
||||
self.name = name
|
||||
self.no_push = no_push
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.name} {type(self).__name__}"
|
||||
|
||||
@@ -25,8 +25,20 @@ class Vaults:
|
||||
def __len__(self) -> int:
|
||||
return len(self.vaults)
|
||||
|
||||
def load(self, type_: str, **kwargs: Any) -> None:
|
||||
"""Load a Vault into the vaults list."""
|
||||
def load(self, type_: str, **kwargs: Any) -> bool:
|
||||
"""Load a Vault into the vaults list. Returns True if successful, False otherwise."""
|
||||
module = _MODULES.get(type_)
|
||||
if not module:
|
||||
raise ValueError(f"Unable to find vault command by the name '{type_}'.")
|
||||
try:
|
||||
vault = module(**kwargs)
|
||||
self.vaults.append(vault)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def load_critical(self, type_: str, **kwargs: Any) -> None:
|
||||
"""Load a critical Vault that must succeed or raise an exception."""
|
||||
module = _MODULES.get(type_)
|
||||
if not module:
|
||||
raise ValueError(f"Unable to find vault command by the name '{type_}'.")
|
||||
@@ -45,7 +57,7 @@ class Vaults:
|
||||
"""Add a KID:KEY to all Vaults, optionally with an exclusion."""
|
||||
success = 0
|
||||
for vault in self.vaults:
|
||||
if vault != excluding:
|
||||
if vault != excluding and not vault.no_push:
|
||||
try:
|
||||
success += vault.add_key(self.service, kid, key)
|
||||
except (PermissionError, NotImplementedError):
|
||||
@@ -56,13 +68,17 @@ class Vaults:
|
||||
"""
|
||||
Add multiple KID:KEYs to all Vaults. Duplicate Content Keys are skipped.
|
||||
PermissionErrors when the user cannot create Tables are absorbed and ignored.
|
||||
Vaults with no_push=True are skipped.
|
||||
"""
|
||||
success = 0
|
||||
for vault in self.vaults:
|
||||
try:
|
||||
success += bool(vault.add_keys(self.service, kid_keys))
|
||||
except (PermissionError, NotImplementedError):
|
||||
pass
|
||||
if not vault.no_push:
|
||||
try:
|
||||
# Count each vault that successfully processes the keys (whether new or existing)
|
||||
vault.add_keys(self.service, kid_keys)
|
||||
success += 1
|
||||
except (PermissionError, NotImplementedError):
|
||||
pass
|
||||
return success
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Subtitle, Tracks
|
||||
from unshackle.core.tracks import Chapter, Subtitle, Tracks, Video
|
||||
|
||||
|
||||
class EXAMPLE(Service):
|
||||
@@ -33,6 +33,7 @@ class EXAMPLE(Service):
|
||||
|
||||
TITLE_RE = r"^(?:https?://?domain\.com/details/)?(?P<title_id>[^/]+)"
|
||||
GEOFENCE = ("US", "UK")
|
||||
NO_SUBTITLES = True
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="EXAMPLE", short_help="https://domain.com")
|
||||
@@ -49,6 +50,11 @@ class EXAMPLE(Service):
|
||||
self.title = title
|
||||
self.movie = movie
|
||||
self.device = device
|
||||
self.cdm = ctx.obj.cdm
|
||||
|
||||
# Get range parameter for HDR support
|
||||
range_param = ctx.parent.params.get("range_")
|
||||
self.range = range_param[0].name if range_param else "SDR"
|
||||
|
||||
if self.config is None:
|
||||
raise Exception("Config is missing!")
|
||||
@@ -160,15 +166,54 @@ class EXAMPLE(Service):
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
# Handle HYBRID mode by fetching both HDR10 and DV tracks separately
|
||||
if self.range == "HYBRID" and self.cdm.security_level != 3:
|
||||
tracks = Tracks()
|
||||
|
||||
# Get HDR10 tracks
|
||||
hdr10_tracks = self._get_tracks_for_range(title, "HDR10")
|
||||
tracks.add(hdr10_tracks, warn_only=True)
|
||||
|
||||
# Get DV tracks
|
||||
dv_tracks = self._get_tracks_for_range(title, "DV")
|
||||
tracks.add(dv_tracks, warn_only=True)
|
||||
|
||||
return tracks
|
||||
else:
|
||||
# Normal single-range behavior
|
||||
return self._get_tracks_for_range(title, self.range)
|
||||
|
||||
def _get_tracks_for_range(self, title: Title_T, range_override: str = None) -> Tracks:
|
||||
# Use range_override if provided, otherwise use self.range
|
||||
current_range = range_override if range_override else self.range
|
||||
|
||||
# Build API request parameters
|
||||
params = {
|
||||
"token": self.token,
|
||||
"guid": title.id,
|
||||
}
|
||||
|
||||
data = {
|
||||
"type": self.config["client"][self.device]["type"],
|
||||
}
|
||||
|
||||
# Add range-specific parameters
|
||||
if current_range == "HDR10":
|
||||
data["video_format"] = "hdr10"
|
||||
elif current_range == "DV":
|
||||
data["video_format"] = "dolby_vision"
|
||||
else:
|
||||
data["video_format"] = "sdr"
|
||||
|
||||
# Only request high-quality HDR content with L1 CDM
|
||||
if current_range in ("HDR10", "DV") and self.cdm.security_level == 3:
|
||||
# L3 CDM - skip HDR content
|
||||
return Tracks()
|
||||
|
||||
streams = self.session.post(
|
||||
url=self.config["endpoints"]["streams"],
|
||||
params={
|
||||
"token": self.token,
|
||||
"guid": title.id,
|
||||
},
|
||||
data={
|
||||
"type": self.config["client"][self.device]["type"],
|
||||
},
|
||||
params=params,
|
||||
data=data,
|
||||
).json()["media"]
|
||||
|
||||
self.license = {
|
||||
@@ -182,6 +227,15 @@ class EXAMPLE(Service):
|
||||
self.log.debug(f"Manifest URL: {manifest_url}")
|
||||
tracks = DASH.from_url(url=manifest_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
# Set range attributes on video tracks
|
||||
for video in tracks.videos:
|
||||
if current_range == "HDR10":
|
||||
video.range = Video.Range.HDR10
|
||||
elif current_range == "DV":
|
||||
video.range = Video.Range.DV
|
||||
else:
|
||||
video.range = Video.Range.SDR
|
||||
|
||||
# Remove DRM-free ("clear") audio tracks
|
||||
tracks.audio = [
|
||||
track for track in tracks.audio if "clear" not in track.data["dash"]["representation"].get("id")
|
||||
@@ -228,6 +282,10 @@ class EXAMPLE(Service):
|
||||
|
||||
return chapters
|
||||
|
||||
def get_widevine_service_certificate(self, **_: any) -> str:
|
||||
"""Return the Widevine service certificate from config, if available."""
|
||||
return self.config.get("certificate")
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[bytes]:
|
||||
"""Retrieve a PlayReady license for a given track."""
|
||||
|
||||
|
||||
477
unshackle/unshackle-example.yaml
Normal file
477
unshackle/unshackle-example.yaml
Normal file
@@ -0,0 +1,477 @@
|
||||
# Group or Username to postfix to the end of all download filenames following a dash
|
||||
tag: user_tag
|
||||
|
||||
# Enable/disable tagging with group name (default: true)
|
||||
tag_group_name: true
|
||||
|
||||
# Enable/disable tagging with IMDB/TMDB/TVDB details (default: true)
|
||||
tag_imdb_tmdb: true
|
||||
|
||||
# Set terminal background color (custom option not in CONFIG.md)
|
||||
set_terminal_bg: false
|
||||
|
||||
# Set file naming convention
|
||||
# true for style - Prime.Suspect.S07E01.The.Final.Act.Part.One.1080p.ITV.WEB-DL.AAC2.0.H.264
|
||||
# false for style - Prime Suspect S07E01 The Final Act - Part One
|
||||
scene_naming: true
|
||||
|
||||
# Whether to include the year in series names for episodes and folders (default: true)
|
||||
# true for style - Show Name (2023) S01E01 Episode Name
|
||||
# false for style - Show Name S01E01 Episode Name
|
||||
series_year: true
|
||||
|
||||
# Check for updates from GitHub repository on startup (default: true)
|
||||
update_checks: true
|
||||
|
||||
# How often to check for updates, in hours (default: 24)
|
||||
update_check_interval: 24
|
||||
|
||||
# Title caching configuration
|
||||
# Cache title metadata to reduce redundant API calls
|
||||
title_cache_enabled: true # Enable/disable title caching globally (default: true)
|
||||
title_cache_time: 1800 # Cache duration in seconds (default: 1800 = 30 minutes)
|
||||
title_cache_max_retention: 86400 # Maximum cache retention for fallback when API fails (default: 86400 = 24 hours)
|
||||
|
||||
# Debug logging configuration
|
||||
# Comprehensive JSON-based debug logging for troubleshooting and service development
|
||||
debug:
|
||||
false # Enable structured JSON debug logging (default: false)
|
||||
# When enabled with --debug flag or set to true:
|
||||
# - Creates JSON Lines (.jsonl) log files with complete debugging context
|
||||
# - Logs: session info, CLI params, service config, CDM details, authentication,
|
||||
# titles, tracks metadata, DRM operations, vault queries, errors with stack traces
|
||||
# - File location: logs/unshackle_debug_{service}_{timestamp}.jsonl
|
||||
# - Also creates text log: logs/unshackle_root_{timestamp}.log
|
||||
|
||||
debug_keys:
|
||||
false # Log decryption keys in debug logs (default: false)
|
||||
# Set to true to include actual decryption keys in logs
|
||||
# Useful for debugging key retrieval and decryption issues
|
||||
# SECURITY NOTE: Passwords, tokens, cookies, and session tokens
|
||||
# are ALWAYS redacted regardless of this setting
|
||||
# Only affects: content_key, key fields (the actual CEKs)
|
||||
# Never affects: kid, keys_count, key_id (metadata is always logged)
|
||||
|
||||
# Muxing configuration
|
||||
muxing:
|
||||
set_title: false
|
||||
|
||||
# Login credentials for each Service
|
||||
credentials:
|
||||
# Direct credentials (no profile support)
|
||||
EXAMPLE: email@example.com:password
|
||||
|
||||
# Per-profile credentials with default fallback
|
||||
SERVICE_NAME:
|
||||
default: default@email.com:password # Used when no -p/--profile is specified
|
||||
profile1: user1@email.com:password1
|
||||
profile2: user2@email.com:password2
|
||||
|
||||
# Per-profile credentials without default (requires -p/--profile)
|
||||
SERVICE_NAME2:
|
||||
john: john@example.com:johnspassword
|
||||
jane: jane@example.com:janespassword
|
||||
|
||||
# You can also use list format for passwords with special characters
|
||||
SERVICE_NAME3:
|
||||
default: ["user@email.com", ":PasswordWith:Colons"]
|
||||
|
||||
# Override default directories used across unshackle
|
||||
directories:
|
||||
cache: Cache
|
||||
cookies: Cookies
|
||||
dcsl: DCSL # Device Certificate Status List
|
||||
downloads: Downloads
|
||||
logs: Logs
|
||||
temp: Temp
|
||||
wvds: WVDs
|
||||
prds: PRDs
|
||||
# Additional directories that can be configured:
|
||||
# commands: Commands
|
||||
services:
|
||||
- /path/to/services
|
||||
- /other/path/to/services
|
||||
# vaults: Vaults
|
||||
# fonts: Fonts
|
||||
|
||||
# Pre-define which Widevine or PlayReady device to use for each Service
|
||||
cdm:
|
||||
# Global default CDM device (fallback for all services/profiles)
|
||||
default: WVD_1
|
||||
|
||||
# Direct service-specific CDM
|
||||
DIFFERENT_EXAMPLE: PRD_1
|
||||
|
||||
# Per-profile CDM configuration
|
||||
EXAMPLE:
|
||||
john_sd: chromecdm_903_l3 # Profile 'john_sd' uses Chrome CDM L3
|
||||
jane_uhd: nexus_5_l1 # Profile 'jane_uhd' uses Nexus 5 L1
|
||||
default: generic_android_l3 # Default CDM for this service
|
||||
|
||||
# NEW: Quality-based CDM selection
|
||||
# Use different CDMs based on video resolution
|
||||
# Supports operators: >=, >, <=, <, or exact match
|
||||
EXAMPLE_QUALITY:
|
||||
"<=1080": generic_android_l3 # Use L3 for 1080p and below
|
||||
">1080": nexus_5_l1 # Use L1 for above 1080p (1440p, 2160p)
|
||||
default: generic_android_l3 # Optional: fallback if no quality match
|
||||
|
||||
# You can mix profiles and quality thresholds in the same service
|
||||
NETFLIX:
|
||||
# Profile-based selection (existing functionality)
|
||||
john: netflix_l3_profile
|
||||
jane: netflix_l1_profile
|
||||
# Quality-based selection (new functionality)
|
||||
"<=720": netflix_mobile_l3
|
||||
"1080": netflix_standard_l3
|
||||
">=1440": netflix_premium_l1
|
||||
# Fallback
|
||||
default: netflix_standard_l3
|
||||
|
||||
# Use pywidevine Serve-compliant Remote CDMs
|
||||
|
||||
# Example: Custom CDM API Configuration
|
||||
# This demonstrates the highly configurable custom_api type that can adapt to any CDM API format
|
||||
# - name: "chrome"
|
||||
# type: "custom_api"
|
||||
# host: "http://remotecdm.test/"
|
||||
# timeout: 30
|
||||
# device:
|
||||
# name: "ChromeCDM"
|
||||
# type: "CHROME"
|
||||
# system_id: 34312
|
||||
# security_level: 3
|
||||
# auth:
|
||||
# type: "header"
|
||||
# header_name: "x-api-key"
|
||||
# key: "YOUR_API_KEY_HERE"
|
||||
# custom_headers:
|
||||
# User-Agent: "Unshackle/2.0.0"
|
||||
# endpoints:
|
||||
# get_request:
|
||||
# path: "/get-challenge"
|
||||
# method: "POST"
|
||||
# timeout: 30
|
||||
# decrypt_response:
|
||||
# path: "/get-keys"
|
||||
# method: "POST"
|
||||
# timeout: 30
|
||||
# request_mapping:
|
||||
# get_request:
|
||||
# param_names:
|
||||
# scheme: "device"
|
||||
# init_data: "init_data"
|
||||
# static_params:
|
||||
# scheme: "Widevine"
|
||||
# decrypt_response:
|
||||
# param_names:
|
||||
# scheme: "device"
|
||||
# license_request: "license_request"
|
||||
# license_response: "license_response"
|
||||
# static_params:
|
||||
# scheme: "Widevine"
|
||||
# response_mapping:
|
||||
# get_request:
|
||||
# fields:
|
||||
# challenge: "challenge"
|
||||
# session_id: "session_id"
|
||||
# message: "message"
|
||||
# message_type: "message_type"
|
||||
# response_types:
|
||||
# - condition: "message_type == 'license-request'"
|
||||
# type: "license_request"
|
||||
# success_conditions:
|
||||
# - "message == 'success'"
|
||||
# decrypt_response:
|
||||
# fields:
|
||||
# keys: "keys"
|
||||
# message: "message"
|
||||
# key_fields:
|
||||
# kid: "kid"
|
||||
# key: "key"
|
||||
# type: "type"
|
||||
# success_conditions:
|
||||
# - "message == 'success'"
|
||||
# caching:
|
||||
# enabled: true
|
||||
# use_vaults: true
|
||||
# check_cached_first: true
|
||||
|
||||
remote_cdm:
|
||||
- name: "chrome"
|
||||
device_name: chrome
|
||||
device_type: CHROME
|
||||
system_id: 27175
|
||||
security_level: 3
|
||||
host: https://domain.com/api
|
||||
secret: secret_key
|
||||
- name: "chrome-2"
|
||||
device_name: chrome
|
||||
device_type: CHROME
|
||||
system_id: 26830
|
||||
security_level: 3
|
||||
host: https://domain-2.com/api
|
||||
secret: secret_key
|
||||
|
||||
- name: "decrypt_labs_chrome"
|
||||
type: "decrypt_labs" # Required to identify as DecryptLabs CDM
|
||||
device_name: "ChromeCDM" # Scheme identifier - must match exactly
|
||||
device_type: CHROME
|
||||
system_id: 4464 # Doesn't matter
|
||||
security_level: 3
|
||||
host: "https://keyxtractor.decryptlabs.com"
|
||||
secret: "your_decrypt_labs_api_key_here" # Replace with your API key
|
||||
- name: "decrypt_labs_l1"
|
||||
type: "decrypt_labs"
|
||||
device_name: "L1" # Scheme identifier - must match exactly
|
||||
device_type: ANDROID
|
||||
system_id: 4464
|
||||
security_level: 1
|
||||
host: "https://keyxtractor.decryptlabs.com"
|
||||
secret: "your_decrypt_labs_api_key_here"
|
||||
|
||||
- name: "decrypt_labs_l2"
|
||||
type: "decrypt_labs"
|
||||
device_name: "L2" # Scheme identifier - must match exactly
|
||||
device_type: ANDROID
|
||||
system_id: 4464
|
||||
security_level: 2
|
||||
host: "https://keyxtractor.decryptlabs.com"
|
||||
secret: "your_decrypt_labs_api_key_here"
|
||||
|
||||
- name: "decrypt_labs_playready_sl2"
|
||||
type: "decrypt_labs"
|
||||
device_name: "SL2" # Scheme identifier - must match exactly
|
||||
device_type: PLAYREADY
|
||||
system_id: 0
|
||||
security_level: 2000
|
||||
host: "https://keyxtractor.decryptlabs.com"
|
||||
secret: "your_decrypt_labs_api_key_here"
|
||||
|
||||
- name: "decrypt_labs_playready_sl3"
|
||||
type: "decrypt_labs"
|
||||
device_name: "SL3" # Scheme identifier - must match exactly
|
||||
device_type: PLAYREADY
|
||||
system_id: 0
|
||||
security_level: 3000
|
||||
host: "https://keyxtractor.decryptlabs.com"
|
||||
secret: "your_decrypt_labs_api_key_here"
|
||||
|
||||
# Key Vaults store your obtained Content Encryption Keys (CEKs)
|
||||
# Use 'no_push: true' to prevent a vault from receiving pushed keys
|
||||
# while still allowing it to provide keys when requested
|
||||
key_vaults:
|
||||
- type: SQLite
|
||||
name: Local
|
||||
path: key_store.db
|
||||
# Additional vault types:
|
||||
# - type: API
|
||||
# name: "Remote Vault"
|
||||
# uri: "https://key-vault.example.com"
|
||||
# token: "secret_token"
|
||||
# no_push: true # This vault will only provide keys, not receive them
|
||||
# - type: MySQL
|
||||
# name: "MySQL Vault"
|
||||
# host: "127.0.0.1"
|
||||
# port: 3306
|
||||
# database: vault
|
||||
# username: user
|
||||
# password: pass
|
||||
# no_push: false # Default behavior - vault both provides and receives keys
|
||||
|
||||
# Choose what software to use to download data
|
||||
downloader: aria2c
|
||||
# Options: requests | aria2c | curl_impersonate | n_m3u8dl_re
|
||||
# Can also be a mapping:
|
||||
# downloader:
|
||||
# NF: requests
|
||||
# AMZN: n_m3u8dl_re
|
||||
# DSNP: n_m3u8dl_re
|
||||
# default: requests
|
||||
|
||||
# aria2c downloader configuration
|
||||
aria2c:
|
||||
max_concurrent_downloads: 4
|
||||
max_connection_per_server: 3
|
||||
split: 5
|
||||
file_allocation: falloc # none | prealloc | falloc | trunc
|
||||
|
||||
# N_m3u8DL-RE downloader configuration
|
||||
n_m3u8dl_re:
|
||||
thread_count: 16
|
||||
ad_keyword: "advertisement"
|
||||
use_proxy: true
|
||||
|
||||
# curl_impersonate downloader configuration
|
||||
curl_impersonate:
|
||||
browser: chrome120
|
||||
|
||||
# Pre-define default options and switches of the dl command
|
||||
dl:
|
||||
sub_format: srt
|
||||
downloads: 4
|
||||
workers: 16
|
||||
lang:
|
||||
- en
|
||||
- fr
|
||||
EXAMPLE:
|
||||
bitrate: CBR
|
||||
|
||||
# Chapter Name to use when exporting a Chapter without a Name
|
||||
chapter_fallback_name: "Chapter {j:02}"
|
||||
|
||||
# Case-Insensitive dictionary of headers for all Services
|
||||
headers:
|
||||
Accept-Language: "en-US,en;q=0.8"
|
||||
User-Agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36"
|
||||
|
||||
# Override default filenames used across unshackle
|
||||
filenames:
|
||||
debug_log: "unshackle_debug_{service}_{time}.jsonl" # JSON Lines debug log file
|
||||
config: "config.yaml"
|
||||
root_config: "unshackle.yaml"
|
||||
chapters: "Chapters_{title}_{random}.txt"
|
||||
subtitle: "Subtitle_{id}_{language}.srt"
|
||||
|
||||
# API key for The Movie Database (TMDB)
|
||||
tmdb_api_key: ""
|
||||
|
||||
# Client ID for SIMKL API (optional, improves metadata matching)
|
||||
# Get your free client ID at: https://simkl.com/settings/developer/
|
||||
simkl_client_id: ""
|
||||
|
||||
# conversion_method:
|
||||
# - auto (default): Smart routing - subby for WebVTT/SAMI, standard for others
|
||||
# - subby: Always use subby with advanced processing
|
||||
# - pycaption: Use only pycaption library (no SubtitleEdit, no subby)
|
||||
# - subtitleedit: Prefer SubtitleEdit when available, fall back to pycaption
|
||||
# - pysubs2: Use pysubs2 library (supports SRT/SSA/ASS/WebVTT/TTML/SAMI/MicroDVD/MPL2/TMP)
|
||||
subtitle:
|
||||
conversion_method: auto
|
||||
sdh_method: auto
|
||||
|
||||
# Configuration for pywidevine's serve functionality
|
||||
serve:
|
||||
api_secret: "your-secret-key-here"
|
||||
users:
|
||||
secret_key_for_user:
|
||||
devices:
|
||||
- generic_nexus_4464_l3
|
||||
username: user
|
||||
# devices:
|
||||
# - '/path/to/device.wvd'
|
||||
|
||||
# Configuration data for each Service
|
||||
services:
|
||||
# Service-specific configuration goes here
|
||||
# Profile-specific configurations can be nested under service names
|
||||
|
||||
# You can override ANY global configuration option on a per-service basis
|
||||
# This allows fine-tuned control for services with special requirements
|
||||
# Supported overrides: dl, aria2c, n_m3u8dl_re, curl_impersonate, subtitle, muxing, headers, etc.
|
||||
|
||||
# Example: Comprehensive service configuration showing all features
|
||||
EXAMPLE:
|
||||
# Standard service config
|
||||
api_key: "service_api_key"
|
||||
|
||||
# Service certificate for Widevine L1/L2 (base64 encoded)
|
||||
# This certificate is automatically used when L1/L2 schemes are selected
|
||||
# Services obtain this from their DRM provider or license server
|
||||
certificate: |
|
||||
CAUSwwUKvQIIAxIQ5US6QAvBDzfTtjb4tU/7QxiH8c+TBSKOAjCCAQoCggEBAObzvlu2hZRsapAPx4Aa4GUZj4/GjxgXUtBH4THSkM40x63wQeyVxlEEo
|
||||
# ... (full base64 certificate here)
|
||||
|
||||
# Profile-specific device configurations
|
||||
profiles:
|
||||
john_sd:
|
||||
device:
|
||||
app_name: "AIV"
|
||||
device_model: "SHIELD Android TV"
|
||||
jane_uhd:
|
||||
device:
|
||||
app_name: "AIV"
|
||||
device_model: "Fire TV Stick 4K"
|
||||
|
||||
# NEW: Configuration overrides (can be combined with profiles and certificates)
|
||||
# Override dl command defaults for this service
|
||||
dl:
|
||||
downloads: 4 # Limit concurrent track downloads (global default: 6)
|
||||
workers: 8 # Reduce workers per track (global default: 16)
|
||||
lang: ["en", "es-419"] # Different language priority for this service
|
||||
sub_format: srt # Force SRT subtitle format
|
||||
|
||||
# Override n_m3u8dl_re downloader settings
|
||||
n_m3u8dl_re:
|
||||
thread_count: 8 # Lower thread count for rate-limited service (global default: 16)
|
||||
use_proxy: true # Force proxy usage for this service
|
||||
retry_count: 10 # More retries for unstable connections
|
||||
ad_keyword: "advertisement" # Service-specific ad filtering
|
||||
|
||||
# Override aria2c downloader settings
|
||||
aria2c:
|
||||
max_concurrent_downloads: 2 # Limit concurrent downloads (global default: 4)
|
||||
max_connection_per_server: 1 # Single connection per server
|
||||
split: 3 # Fewer splits (global default: 5)
|
||||
file_allocation: none # Faster allocation for this service
|
||||
|
||||
# Override subtitle processing for this service
|
||||
subtitle:
|
||||
conversion_method: pycaption # Use specific subtitle converter
|
||||
sdh_method: auto
|
||||
|
||||
# Service-specific headers
|
||||
headers:
|
||||
User-Agent: "Service-specific user agent string"
|
||||
Accept-Language: "en-US,en;q=0.9"
|
||||
|
||||
# Override muxing options
|
||||
muxing:
|
||||
set_title: true
|
||||
|
||||
# Example: Service with different regions per profile
|
||||
SERVICE_NAME:
|
||||
profiles:
|
||||
us_account:
|
||||
region: "US"
|
||||
api_endpoint: "https://api.us.service.com"
|
||||
uk_account:
|
||||
region: "GB"
|
||||
api_endpoint: "https://api.uk.service.com"
|
||||
|
||||
# Notes on service-specific overrides:
|
||||
# - Overrides are merged with global config, not replaced
|
||||
# - Only specified keys are overridden, others use global defaults
|
||||
# - Reserved keys (profiles, api_key, certificate, etc.) are NOT treated as overrides
|
||||
# - Any dict-type config option can be overridden (dl, aria2c, n_m3u8dl_re, etc.)
|
||||
# - Use --debug flag to see which overrides are applied during downloads
|
||||
|
||||
# External proxy provider services
|
||||
proxy_providers:
|
||||
nordvpn:
|
||||
username: username_from_service_credentials
|
||||
password: password_from_service_credentials
|
||||
server_map:
|
||||
us: 12 # force US server #12 for US proxies
|
||||
surfsharkvpn:
|
||||
username: your_surfshark_service_username # Service credentials from https://my.surfshark.com/vpn/manual-setup/main/openvpn
|
||||
password: your_surfshark_service_password # Service credentials (not your login password)
|
||||
server_map:
|
||||
us: 3844 # force US server #3844 for US proxies
|
||||
gb: 2697 # force GB server #2697 for GB proxies
|
||||
au: 4621 # force AU server #4621 for AU proxies
|
||||
windscribevpn:
|
||||
username: your_windscribe_username # Service credentials from https://windscribe.com/getconfig/openvpn
|
||||
password: your_windscribe_password # Service credentials (not your login password)
|
||||
server_map:
|
||||
us: "us-central-096.totallyacdn.com" # force US server
|
||||
gb: "uk-london-055.totallyacdn.com" # force GB server
|
||||
basic:
|
||||
GB:
|
||||
- "socks5://username:password@bhx.socks.ipvanish.com:1080" # 1 (Birmingham)
|
||||
- "socks5://username:password@gla.socks.ipvanish.com:1080" # 2 (Glasgow)
|
||||
AU:
|
||||
- "socks5://username:password@syd.socks.ipvanish.com:1080" # 1 (Sydney)
|
||||
- "https://username:password@au-syd.prod.surfshark.com" # 2 (Sydney)
|
||||
- "https://username:password@au-bne.prod.surfshark.com" # 3 (Brisbane)
|
||||
BG: "https://username:password@bg-sof.prod.surfshark.com"
|
||||
@@ -1,168 +0,0 @@
|
||||
# Group or Username to postfix to the end of all download filenames following a dash
|
||||
tag: user_tag
|
||||
|
||||
# Set terminal background color (custom option not in CONFIG.md)
|
||||
set_terminal_bg: false
|
||||
|
||||
# Muxing configuration
|
||||
muxing:
|
||||
set_title: false
|
||||
|
||||
# Login credentials for each Service
|
||||
credentials:
|
||||
EXAMPLE: email@example.com:password
|
||||
EXAMPLE2: username:password
|
||||
|
||||
# Override default directories used across unshackle
|
||||
directories:
|
||||
cache: Cache
|
||||
cookies: Cookies
|
||||
dcsl: DCSL # Device Certificate Status List
|
||||
downloads: Downloads
|
||||
logs: Logs
|
||||
temp: Temp
|
||||
wvds: WVDs
|
||||
prds: PRDs
|
||||
# Additional directories that can be configured:
|
||||
# commands: Commands
|
||||
# services: Services
|
||||
# vaults: Vaults
|
||||
# fonts: Fonts
|
||||
|
||||
# Pre-define which Widevine or PlayReady device to use for each Service
|
||||
cdm:
|
||||
default: WVD_1
|
||||
EXAMPLE: PRD_1
|
||||
|
||||
# Use pywidevine Serve-compliant Remote CDMs
|
||||
remote_cdm:
|
||||
- name: "chrome"
|
||||
device_name: chrome
|
||||
device_type: CHROME
|
||||
system_id: 27175
|
||||
security_level: 3
|
||||
host: https://domain.com/api
|
||||
secret: secret_key
|
||||
- name: "chrome-2"
|
||||
device_name: chrome
|
||||
device_type: CHROME
|
||||
system_id: 26830
|
||||
security_level: 3
|
||||
host: https://domain-2.com/api
|
||||
secret: secret_key
|
||||
|
||||
# Key Vaults store your obtained Content Encryption Keys (CEKs)
|
||||
key_vaults:
|
||||
- type: SQLite
|
||||
name: Local
|
||||
path: key_store.db
|
||||
# Additional vault types:
|
||||
# - type: API
|
||||
# name: "Remote Vault"
|
||||
# uri: "https://key-vault.example.com"
|
||||
# token: "secret_token"
|
||||
# - type: MySQL
|
||||
# name: "MySQL Vault"
|
||||
# host: "127.0.0.1"
|
||||
# port: 3306
|
||||
# database: vault
|
||||
# username: user
|
||||
# password: pass
|
||||
|
||||
# Choose what software to use to download data
|
||||
downloader: aria2c
|
||||
# Options: requests | aria2c | curl_impersonate | n_m3u8dl_re
|
||||
# Can also be a mapping:
|
||||
# downloader:
|
||||
# NF: requests
|
||||
# AMZN: n_m3u8dl_re
|
||||
# DSNP: n_m3u8dl_re
|
||||
# default: requests
|
||||
|
||||
# aria2c downloader configuration
|
||||
aria2c:
|
||||
max_concurrent_downloads: 4
|
||||
max_connection_per_server: 3
|
||||
split: 5
|
||||
file_allocation: falloc # none | prealloc | falloc | trunc
|
||||
|
||||
# N_m3u8DL-RE downloader configuration
|
||||
n_m3u8dl_re:
|
||||
thread_count: 16
|
||||
ad_keyword: "advertisement"
|
||||
use_proxy: true
|
||||
|
||||
# curl_impersonate downloader configuration
|
||||
curl_impersonate:
|
||||
browser: chrome120
|
||||
|
||||
# Pre-define default options and switches of the dl command
|
||||
dl:
|
||||
best: true
|
||||
sub_format: srt
|
||||
downloads: 4
|
||||
workers: 16
|
||||
lang:
|
||||
- en
|
||||
- fr
|
||||
EXAMPLE:
|
||||
bitrate: CBR
|
||||
|
||||
# Chapter Name to use when exporting a Chapter without a Name
|
||||
chapter_fallback_name: "Chapter {j:02}"
|
||||
|
||||
# Case-Insensitive dictionary of headers for all Services
|
||||
headers:
|
||||
Accept-Language: "en-US,en;q=0.8"
|
||||
User-Agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.75 Safari/537.36"
|
||||
|
||||
# Override default filenames used across unshackle
|
||||
filenames:
|
||||
log: "unshackle_{name}_{time}.log"
|
||||
config: "config.yaml"
|
||||
root_config: "unshackle.yaml"
|
||||
chapters: "Chapters_{title}_{random}.txt"
|
||||
subtitle: "Subtitle_{id}_{language}.srt"
|
||||
|
||||
# API key for The Movie Database (TMDB)
|
||||
tmdb_api_key: ""
|
||||
|
||||
# Configuration for pywidevine's serve functionality
|
||||
serve:
|
||||
users:
|
||||
secret_key_for_user:
|
||||
devices:
|
||||
- generic_nexus_4464_l3
|
||||
username: user
|
||||
# devices:
|
||||
# - '/path/to/device.wvd'
|
||||
|
||||
# Configuration data for each Service
|
||||
services:
|
||||
# Service-specific configuration goes here
|
||||
# EXAMPLE:
|
||||
# api_key: "service_specific_key"
|
||||
|
||||
# Legacy NordVPN configuration (use proxy_providers instead)
|
||||
nordvpn:
|
||||
username: ""
|
||||
password: ""
|
||||
servers:
|
||||
- us: 12
|
||||
|
||||
# External proxy provider services
|
||||
proxy_providers:
|
||||
nordvpn:
|
||||
username: username_from_service_credentials
|
||||
password: password_from_service_credentials
|
||||
servers:
|
||||
- us: 12 # force US server #12 for US proxies
|
||||
basic:
|
||||
GB:
|
||||
- "socks5://username:password@bhx.socks.ipvanish.com:1080" # 1 (Birmingham)
|
||||
- "socks5://username:password@gla.socks.ipvanish.com:1080" # 2 (Glasgow)
|
||||
AU:
|
||||
- "socks5://username:password@syd.socks.ipvanish.com:1080" # 1 (Sydney)
|
||||
- "https://username:password@au-syd.prod.surfshark.com" # 2 (Sydney)
|
||||
- "https://username:password@au-bne.prod.surfshark.com" # 3 (Brisbane)
|
||||
BG: "https://username:password@bg-sof.prod.surfshark.com"
|
||||
@@ -10,8 +10,8 @@ from unshackle.core.vault import Vault
|
||||
class API(Vault):
|
||||
"""Key Vault using a simple RESTful HTTP API call."""
|
||||
|
||||
def __init__(self, name: str, uri: str, token: str):
|
||||
super().__init__(name)
|
||||
def __init__(self, name: str, uri: str, token: str, no_push: bool = False):
|
||||
super().__init__(name, no_push)
|
||||
self.uri = uri.rstrip("/")
|
||||
self.session = Session()
|
||||
self.session.headers.update({"User-Agent": f"unshackle v{__version__}"})
|
||||
|
||||
@@ -16,9 +16,25 @@ class InsertResult(Enum):
|
||||
|
||||
|
||||
class HTTP(Vault):
|
||||
"""Key Vault using HTTP API with support for both query parameters and JSON payloads."""
|
||||
"""
|
||||
Key Vault using HTTP API with support for multiple API modes.
|
||||
|
||||
def __init__(self, name: str, host: str, password: str, username: Optional[str] = None, api_mode: str = "query"):
|
||||
Supported modes:
|
||||
- query: Uses GET requests with query parameters
|
||||
- json: Uses POST requests with JSON payloads
|
||||
- decrypt_labs: Uses DecryptLabs API format (read-only)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
host: str,
|
||||
password: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
username: Optional[str] = None,
|
||||
api_mode: str = "query",
|
||||
no_push: bool = False,
|
||||
):
|
||||
"""
|
||||
Initialize HTTP Vault.
|
||||
|
||||
@@ -26,12 +42,17 @@ class HTTP(Vault):
|
||||
name: Vault name
|
||||
host: Host URL
|
||||
password: Password for query mode or API token for json mode
|
||||
username: Username (required for query mode, ignored for json mode)
|
||||
api_mode: "query" for query parameters or "json" for JSON API
|
||||
api_key: API key (alternative to password, used for decrypt_labs mode)
|
||||
username: Username (required for query mode, ignored for json/decrypt_labs mode)
|
||||
api_mode: "query" for query parameters, "json" for JSON API, or "decrypt_labs" for DecryptLabs API
|
||||
no_push: If True, this vault will not receive pushed keys
|
||||
"""
|
||||
super().__init__(name)
|
||||
self.url = host.rstrip("/")
|
||||
self.password = password
|
||||
super().__init__(name, no_push)
|
||||
self.url = host
|
||||
self.password = api_key or password
|
||||
if not self.password:
|
||||
raise ValueError("Either password or api_key is required")
|
||||
|
||||
self.username = username
|
||||
self.api_mode = api_mode.lower()
|
||||
self.current_title = None
|
||||
@@ -39,11 +60,15 @@ class HTTP(Vault):
|
||||
self.session.headers.update({"User-Agent": f"unshackle v{__version__}"})
|
||||
self.api_session_id = None
|
||||
|
||||
if self.api_mode == "decrypt_labs":
|
||||
self.session.headers.update({"decrypt-labs-api-key": self.password})
|
||||
self.no_push = True
|
||||
|
||||
# Validate configuration based on mode
|
||||
if self.api_mode == "query" and not self.username:
|
||||
raise ValueError("Username is required for query mode")
|
||||
elif self.api_mode not in ["query", "json"]:
|
||||
raise ValueError("api_mode must be either 'query' or 'json'")
|
||||
elif self.api_mode not in ["query", "json", "decrypt_labs"]:
|
||||
raise ValueError("api_mode must be either 'query', 'json', or 'decrypt_labs'")
|
||||
|
||||
def request(self, method: str, params: dict = None) -> dict:
|
||||
"""Make a request to the JSON API vault."""
|
||||
@@ -86,23 +111,69 @@ class HTTP(Vault):
|
||||
if isinstance(kid, UUID):
|
||||
kid = kid.hex
|
||||
|
||||
if self.api_mode == "json":
|
||||
if self.api_mode == "decrypt_labs":
|
||||
try:
|
||||
title = getattr(self, "current_title", None)
|
||||
response = self.request(
|
||||
"GetKey",
|
||||
{
|
||||
"kid": kid,
|
||||
"service": service.lower(),
|
||||
"title": title,
|
||||
},
|
||||
)
|
||||
request_payload = {"service": service.lower(), "kid": kid}
|
||||
|
||||
response = self.session.post(self.url, json=request_payload)
|
||||
|
||||
if not response.ok:
|
||||
return None
|
||||
|
||||
data = response.json()
|
||||
|
||||
if data.get("message") != "success":
|
||||
return None
|
||||
|
||||
cached_keys = data.get("cached_keys")
|
||||
if not cached_keys:
|
||||
return None
|
||||
|
||||
if isinstance(cached_keys, str):
|
||||
try:
|
||||
cached_keys = json.loads(cached_keys)
|
||||
except json.JSONDecodeError:
|
||||
return cached_keys
|
||||
|
||||
if isinstance(cached_keys, dict):
|
||||
if cached_keys.get("kid") == kid:
|
||||
return cached_keys.get("key")
|
||||
if kid in cached_keys:
|
||||
return cached_keys[kid]
|
||||
elif isinstance(cached_keys, list):
|
||||
for entry in cached_keys:
|
||||
if isinstance(entry, dict):
|
||||
if entry.get("kid") == kid:
|
||||
return entry.get("key")
|
||||
elif isinstance(entry, str) and ":" in entry:
|
||||
entry_kid, entry_key = entry.split(":", 1)
|
||||
if entry_kid == kid:
|
||||
return entry_key
|
||||
|
||||
except Exception as e:
|
||||
print(f"Failed to get key from DecryptLabs ({e.__class__.__name__}: {e})")
|
||||
return None
|
||||
return None
|
||||
|
||||
elif self.api_mode == "json":
|
||||
try:
|
||||
params = {
|
||||
"kid": kid,
|
||||
"service": service.lower(),
|
||||
}
|
||||
|
||||
response = self.request("GetKey", params)
|
||||
if response.get("status") == "not_found":
|
||||
return None
|
||||
keys = response.get("keys", [])
|
||||
for key_entry in keys:
|
||||
if key_entry["kid"] == kid:
|
||||
return key_entry["key"]
|
||||
if isinstance(key_entry, str) and ":" in key_entry:
|
||||
entry_kid, entry_key = key_entry.split(":", 1)
|
||||
if entry_kid == kid:
|
||||
return entry_key
|
||||
elif isinstance(key_entry, dict):
|
||||
if key_entry.get("kid") == kid:
|
||||
return key_entry.get("key")
|
||||
except Exception as e:
|
||||
print(f"Failed to get key ({e.__class__.__name__}: {e})")
|
||||
return None
|
||||
@@ -121,7 +192,9 @@ class HTTP(Vault):
|
||||
return data["keys"][0]["key"]
|
||||
|
||||
def get_keys(self, service: str) -> Iterator[tuple[str, str]]:
|
||||
if self.api_mode == "json":
|
||||
if self.api_mode == "decrypt_labs":
|
||||
return iter([])
|
||||
elif self.api_mode == "json":
|
||||
# JSON API doesn't support getting all keys, so return empty iterator
|
||||
# This will cause the copy command to rely on the API's internal duplicate handling
|
||||
return iter([])
|
||||
@@ -142,6 +215,9 @@ class HTTP(Vault):
|
||||
if not key or key.count("0") == len(key):
|
||||
raise ValueError("You cannot add a NULL Content Key to a Vault.")
|
||||
|
||||
if self.api_mode == "decrypt_labs":
|
||||
return False
|
||||
|
||||
if isinstance(kid, UUID):
|
||||
kid = kid.hex
|
||||
|
||||
@@ -181,6 +257,9 @@ class HTTP(Vault):
|
||||
return data.get("status_code") == 200
|
||||
|
||||
def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str]) -> int:
|
||||
if self.api_mode == "decrypt_labs":
|
||||
return 0
|
||||
|
||||
for kid, key in kid_keys.items():
|
||||
if not key or key.count("0") == len(key):
|
||||
raise ValueError("You cannot add a NULL Content Key to a Vault.")
|
||||
@@ -232,7 +311,9 @@ class HTTP(Vault):
|
||||
return inserted_count
|
||||
|
||||
def get_services(self) -> Iterator[str]:
|
||||
if self.api_mode == "json":
|
||||
if self.api_mode == "decrypt_labs":
|
||||
return iter([])
|
||||
elif self.api_mode == "json":
|
||||
try:
|
||||
response = self.request("GetServices")
|
||||
services = response.get("services", [])
|
||||
@@ -272,6 +353,9 @@ class HTTP(Vault):
|
||||
if not key or key.count("0") == len(key):
|
||||
raise ValueError("You cannot add a NULL Content Key to a Vault.")
|
||||
|
||||
if self.api_mode == "decrypt_labs":
|
||||
return InsertResult.FAILURE
|
||||
|
||||
if isinstance(kid, UUID):
|
||||
kid = kid.hex
|
||||
|
||||
|
||||
@@ -12,12 +12,12 @@ from unshackle.core.vault import Vault
|
||||
class MySQL(Vault):
|
||||
"""Key Vault using a remotely-accessed mysql database connection."""
|
||||
|
||||
def __init__(self, name: str, host: str, database: str, username: str, **kwargs):
|
||||
def __init__(self, name: str, host: str, database: str, username: str, no_push: bool = False, **kwargs):
|
||||
"""
|
||||
All extra arguments provided via **kwargs will be sent to pymysql.connect.
|
||||
This can be used to provide more specific connection information.
|
||||
"""
|
||||
super().__init__(name)
|
||||
super().__init__(name, no_push)
|
||||
self.slug = f"{host}:{database}:{username}"
|
||||
self.conn_factory = ConnectionFactory(
|
||||
dict(host=host, db=database, user=username, cursorclass=DictCursor, **kwargs)
|
||||
@@ -28,26 +28,33 @@ class MySQL(Vault):
|
||||
raise PermissionError(f"MySQL vault {self.slug} has no SELECT permission.")
|
||||
|
||||
def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]:
|
||||
if not self.has_table(service):
|
||||
# no table, no key, simple
|
||||
return None
|
||||
|
||||
if isinstance(kid, UUID):
|
||||
kid = kid.hex
|
||||
|
||||
service_variants = [service]
|
||||
if service != service.lower():
|
||||
service_variants.append(service.lower())
|
||||
if service != service.upper():
|
||||
service_variants.append(service.upper())
|
||||
|
||||
conn = self.conn_factory.get()
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
# TODO: SQL injection risk
|
||||
f"SELECT `id`, `key_` FROM `{service}` WHERE `kid`=%s AND `key_`!=%s",
|
||||
(kid, "0" * 32),
|
||||
)
|
||||
cek = cursor.fetchone()
|
||||
if not cek:
|
||||
return None
|
||||
return cek["key_"]
|
||||
for service_name in service_variants:
|
||||
if not self.has_table(service_name):
|
||||
continue
|
||||
|
||||
cursor.execute(
|
||||
# TODO: SQL injection risk
|
||||
f"SELECT `id`, `key_` FROM `{service_name}` WHERE `kid`=%s AND `key_`!=%s",
|
||||
(kid, "0" * 32),
|
||||
)
|
||||
cek = cursor.fetchone()
|
||||
if cek:
|
||||
return cek["key_"]
|
||||
|
||||
return None
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
@@ -131,16 +138,27 @@ class MySQL(Vault):
|
||||
if any(isinstance(kid, UUID) for kid, key_ in kid_keys.items()):
|
||||
kid_keys = {kid.hex if isinstance(kid, UUID) else kid: key_ for kid, key_ in kid_keys.items()}
|
||||
|
||||
if not kid_keys:
|
||||
return 0
|
||||
|
||||
conn = self.conn_factory.get()
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
placeholders = ",".join(["%s"] * len(kid_keys))
|
||||
cursor.execute(f"SELECT kid FROM `{service}` WHERE kid IN ({placeholders})", list(kid_keys.keys()))
|
||||
existing_kids = {row["kid"] for row in cursor.fetchall()}
|
||||
|
||||
new_keys = {kid: key for kid, key in kid_keys.items() if kid not in existing_kids}
|
||||
|
||||
if not new_keys:
|
||||
return 0
|
||||
|
||||
cursor.executemany(
|
||||
# TODO: SQL injection risk
|
||||
f"INSERT IGNORE INTO `{service}` (kid, key_) VALUES (%s, %s)",
|
||||
kid_keys.items(),
|
||||
f"INSERT INTO `{service}` (kid, key_) VALUES (%s, %s)",
|
||||
new_keys.items(),
|
||||
)
|
||||
return cursor.rowcount
|
||||
return len(new_keys)
|
||||
finally:
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
|
||||
@@ -12,29 +12,37 @@ from unshackle.core.vault import Vault
|
||||
class SQLite(Vault):
|
||||
"""Key Vault using a locally-accessed sqlite DB file."""
|
||||
|
||||
def __init__(self, name: str, path: Union[str, Path]):
|
||||
super().__init__(name)
|
||||
def __init__(self, name: str, path: Union[str, Path], no_push: bool = False):
|
||||
super().__init__(name, no_push)
|
||||
self.path = Path(path).expanduser()
|
||||
# TODO: Use a DictCursor or such to get fetches as dict?
|
||||
self.conn_factory = ConnectionFactory(self.path)
|
||||
|
||||
def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]:
|
||||
if not self.has_table(service):
|
||||
# no table, no key, simple
|
||||
return None
|
||||
|
||||
if isinstance(kid, UUID):
|
||||
kid = kid.hex
|
||||
|
||||
conn = self.conn_factory.get()
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Try both the original service name and lowercase version to handle case sensitivity issues
|
||||
service_variants = [service]
|
||||
if service != service.lower():
|
||||
service_variants.append(service.lower())
|
||||
if service != service.upper():
|
||||
service_variants.append(service.upper())
|
||||
|
||||
try:
|
||||
cursor.execute(f"SELECT `id`, `key_` FROM `{service}` WHERE `kid`=? AND `key_`!=?", (kid, "0" * 32))
|
||||
cek = cursor.fetchone()
|
||||
if not cek:
|
||||
return None
|
||||
return cek[1]
|
||||
for service_name in service_variants:
|
||||
if not self.has_table(service_name):
|
||||
continue
|
||||
|
||||
cursor.execute(f"SELECT `id`, `key_` FROM `{service_name}` WHERE `kid`=? AND `key_`!=?", (kid, "0" * 32))
|
||||
cek = cursor.fetchone()
|
||||
if cek:
|
||||
return cek[1]
|
||||
|
||||
return None
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
@@ -102,16 +110,27 @@ class SQLite(Vault):
|
||||
if any(isinstance(kid, UUID) for kid, key_ in kid_keys.items()):
|
||||
kid_keys = {kid.hex if isinstance(kid, UUID) else kid: key_ for kid, key_ in kid_keys.items()}
|
||||
|
||||
if not kid_keys:
|
||||
return 0
|
||||
|
||||
conn = self.conn_factory.get()
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
placeholders = ",".join(["?"] * len(kid_keys))
|
||||
cursor.execute(f"SELECT kid FROM `{service}` WHERE kid IN ({placeholders})", list(kid_keys.keys()))
|
||||
existing_kids = {row[0] for row in cursor.fetchall()}
|
||||
|
||||
new_keys = {kid: key for kid, key in kid_keys.items() if kid not in existing_kids}
|
||||
|
||||
if not new_keys:
|
||||
return 0
|
||||
|
||||
cursor.executemany(
|
||||
# TODO: SQL injection risk
|
||||
f"INSERT OR IGNORE INTO `{service}` (kid, key_) VALUES (?, ?)",
|
||||
kid_keys.items(),
|
||||
f"INSERT INTO `{service}` (kid, key_) VALUES (?, ?)",
|
||||
new_keys.items(),
|
||||
)
|
||||
return cursor.rowcount
|
||||
return len(new_keys)
|
||||
finally:
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
|
||||
697
uv.lock
generated
697
uv.lock
generated
@@ -13,7 +13,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.12.14"
|
||||
version = "3.12.15"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
@@ -25,59 +25,75 @@ dependencies = [
|
||||
{ name = "propcache" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e6/0b/e39ad954107ebf213a2325038a3e7a506be3d98e1435e1f82086eec4cde2/aiohttp-3.12.14.tar.gz", hash = "sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2", size = 7822921, upload-time = "2025-07-10T13:05:33.968Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/88/f161f429f9de391eee6a5c2cffa54e2ecd5b7122ae99df247f7734dfefcb/aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248", size = 702641, upload-time = "2025-07-10T13:02:38.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/b5/24fa382a69a25d242e2baa3e56d5ea5227d1b68784521aaf3a1a8b34c9a4/aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb", size = 479005, upload-time = "2025-07-10T13:02:42.714Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/67/fda1bc34adbfaa950d98d934a23900918f9d63594928c70e55045838c943/aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd", size = 466781, upload-time = "2025-07-10T13:02:44.639Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/96/3ce1ea96d3cf6928b87cfb8cdd94650367f5c2f36e686a1f5568f0f13754/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c", size = 1648841, upload-time = "2025-07-10T13:02:46.356Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/04/ddea06cb4bc7d8db3745cf95e2c42f310aad485ca075bd685f0e4f0f6b65/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95", size = 1622896, upload-time = "2025-07-10T13:02:48.422Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/66/63942f104d33ce6ca7871ac6c1e2ebab48b88f78b2b7680c37de60f5e8cd/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663", size = 1695302, upload-time = "2025-07-10T13:02:50.078Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/00/aab615742b953f04b48cb378ee72ada88555b47b860b98c21c458c030a23/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1", size = 1737617, upload-time = "2025-07-10T13:02:52.123Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/4f/ef6d9f77225cf27747368c37b3d69fac1f8d6f9d3d5de2d410d155639524/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61", size = 1642282, upload-time = "2025-07-10T13:02:53.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/e1/e98a43c15aa52e9219a842f18c59cbae8bbe2d50c08d298f17e9e8bafa38/aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656", size = 1582406, upload-time = "2025-07-10T13:02:55.515Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/5c/29c6dfb49323bcdb0239bf3fc97ffcf0eaf86d3a60426a3287ec75d67721/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3", size = 1626255, upload-time = "2025-07-10T13:02:57.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/60/ec90782084090c4a6b459790cfd8d17be2c5662c9c4b2d21408b2f2dc36c/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288", size = 1637041, upload-time = "2025-07-10T13:02:59.008Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/89/205d3ad30865c32bc472ac13f94374210745b05bd0f2856996cb34d53396/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda", size = 1612494, upload-time = "2025-07-10T13:03:00.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/ae/2f66edaa8bd6db2a4cba0386881eb92002cdc70834e2a93d1d5607132c7e/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc", size = 1692081, upload-time = "2025-07-10T13:03:02.154Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/3a/fa73bfc6e21407ea57f7906a816f0dc73663d9549da703be05dbd76d2dc3/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8", size = 1715318, upload-time = "2025-07-10T13:03:04.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/b3/751124b8ceb0831c17960d06ee31a4732cb4a6a006fdbfa1153d07c52226/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3", size = 1643660, upload-time = "2025-07-10T13:03:06.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/3c/72477a1d34edb8ab8ce8013086a41526d48b64f77e381c8908d24e1c18f5/aiohttp-3.12.14-cp310-cp310-win32.whl", hash = "sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c", size = 428289, upload-time = "2025-07-10T13:03:08.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/c4/8aec4ccf1b822ec78e7982bd5cf971113ecce5f773f04039c76a083116fc/aiohttp-3.12.14-cp310-cp310-win_amd64.whl", hash = "sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db", size = 451328, upload-time = "2025-07-10T13:03:10.146Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/e1/8029b29316971c5fa89cec170274582619a01b3d82dd1036872acc9bc7e8/aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597", size = 709960, upload-time = "2025-07-10T13:03:11.936Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/bd/4f204cf1e282041f7b7e8155f846583b19149e0872752711d0da5e9cc023/aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393", size = 482235, upload-time = "2025-07-10T13:03:14.118Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/0f/2a580fcdd113fe2197a3b9df30230c7e85bb10bf56f7915457c60e9addd9/aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179", size = 470501, upload-time = "2025-07-10T13:03:16.153Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/78/2c1089f6adca90c3dd74915bafed6d6d8a87df5e3da74200f6b3a8b8906f/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb", size = 1740696, upload-time = "2025-07-10T13:03:18.4Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/c8/ce6c7a34d9c589f007cfe064da2d943b3dee5aabc64eaecd21faf927ab11/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245", size = 1689365, upload-time = "2025-07-10T13:03:20.629Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/10/431cd3d089de700756a56aa896faf3ea82bee39d22f89db7ddc957580308/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b", size = 1788157, upload-time = "2025-07-10T13:03:22.44Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b2/26f4524184e0f7ba46671c512d4b03022633bcf7d32fa0c6f1ef49d55800/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641", size = 1827203, upload-time = "2025-07-10T13:03:24.628Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/30/aadcdf71b510a718e3d98a7bfeaea2396ac847f218b7e8edb241b09bd99a/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe", size = 1729664, upload-time = "2025-07-10T13:03:26.412Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/7f/7ccf11756ae498fdedc3d689a0c36ace8fc82f9d52d3517da24adf6e9a74/aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7", size = 1666741, upload-time = "2025-07-10T13:03:28.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/4d/35ebc170b1856dd020c92376dbfe4297217625ef4004d56587024dc2289c/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635", size = 1715013, upload-time = "2025-07-10T13:03:30.018Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/24/46dc0380146f33e2e4aa088b92374b598f5bdcde1718c77e8d1a0094f1a4/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da", size = 1710172, upload-time = "2025-07-10T13:03:31.821Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/0a/46599d7d19b64f4d0fe1b57bdf96a9a40b5c125f0ae0d8899bc22e91fdce/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419", size = 1690355, upload-time = "2025-07-10T13:03:34.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/86/b21b682e33d5ca317ef96bd21294984f72379454e689d7da584df1512a19/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab", size = 1783958, upload-time = "2025-07-10T13:03:36.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/45/f639482530b1396c365f23c5e3b1ae51c9bc02ba2b2248ca0c855a730059/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0", size = 1804423, upload-time = "2025-07-10T13:03:38.504Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/e5/39635a9e06eed1d73671bd4079a3caf9cf09a49df08490686f45a710b80e/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28", size = 1717479, upload-time = "2025-07-10T13:03:40.158Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/e1/7f1c77515d369b7419c5b501196526dad3e72800946c0099594c1f0c20b4/aiohttp-3.12.14-cp311-cp311-win32.whl", hash = "sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b", size = 427907, upload-time = "2025-07-10T13:03:41.801Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/24/a6bf915c85b7a5b07beba3d42b3282936b51e4578b64a51e8e875643c276/aiohttp-3.12.14-cp311-cp311-win_amd64.whl", hash = "sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced", size = 452334, upload-time = "2025-07-10T13:03:43.485Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/0d/29026524e9336e33d9767a1e593ae2b24c2b8b09af7c2bd8193762f76b3e/aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22", size = 701055, upload-time = "2025-07-10T13:03:45.59Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/b8/a5e8e583e6c8c1056f4b012b50a03c77a669c2e9bf012b7cf33d6bc4b141/aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a", size = 475670, upload-time = "2025-07-10T13:03:47.249Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/e8/5202890c9e81a4ec2c2808dd90ffe024952e72c061729e1d49917677952f/aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff", size = 468513, upload-time = "2025-07-10T13:03:49.377Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/e5/d11db8c23d8923d3484a27468a40737d50f05b05eebbb6288bafcb467356/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d", size = 1715309, upload-time = "2025-07-10T13:03:51.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/44/af6879ca0eff7a16b1b650b7ea4a827301737a350a464239e58aa7c387ef/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869", size = 1697961, upload-time = "2025-07-10T13:03:53.511Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/94/18457f043399e1ec0e59ad8674c0372f925363059c276a45a1459e17f423/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c", size = 1753055, upload-time = "2025-07-10T13:03:55.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/d9/1d3744dc588fafb50ff8a6226d58f484a2242b5dd93d8038882f55474d41/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7", size = 1799211, upload-time = "2025-07-10T13:03:57.216Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/12/2530fb2b08773f717ab2d249ca7a982ac66e32187c62d49e2c86c9bba9b4/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660", size = 1718649, upload-time = "2025-07-10T13:03:59.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/34/8d6015a729f6571341a311061b578e8b8072ea3656b3d72329fa0faa2c7c/aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088", size = 1634452, upload-time = "2025-07-10T13:04:01.698Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/4b/08b83ea02595a582447aeb0c1986792d0de35fe7a22fb2125d65091cbaf3/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7", size = 1695511, upload-time = "2025-07-10T13:04:04.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/66/9c7c31037a063eec13ecf1976185c65d1394ded4a5120dd5965e3473cb21/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9", size = 1716967, upload-time = "2025-07-10T13:04:06.132Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/02/84406e0ad1acb0fb61fd617651ab6de760b2d6a31700904bc0b33bd0894d/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3", size = 1657620, upload-time = "2025-07-10T13:04:07.944Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/53/da018f4013a7a179017b9a274b46b9a12cbeb387570f116964f498a6f211/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb", size = 1737179, upload-time = "2025-07-10T13:04:10.182Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/e8/ca01c5ccfeaafb026d85fa4f43ceb23eb80ea9c1385688db0ef322c751e9/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425", size = 1765156, upload-time = "2025-07-10T13:04:12.029Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/32/5501ab525a47ba23c20613e568174d6c63aa09e2caa22cded5c6ea8e3ada/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0", size = 1724766, upload-time = "2025-07-10T13:04:13.961Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/af/28e24574801fcf1657945347ee10df3892311c2829b41232be6089e461e7/aiohttp-3.12.14-cp312-cp312-win32.whl", hash = "sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729", size = 422641, upload-time = "2025-07-10T13:04:16.018Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/d5/7ac2464aebd2eecac38dbe96148c9eb487679c512449ba5215d233755582/aiohttp-3.12.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338", size = 449316, upload-time = "2025-07-10T13:04:18.289Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/dc/ef9394bde9080128ad401ac7ede185267ed637df03b51f05d14d1c99ad67/aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc", size = 703921, upload-time = "2025-07-29T05:49:43.584Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/42/63fccfc3a7ed97eb6e1a71722396f409c46b60a0552d8a56d7aad74e0df5/aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af", size = 480288, upload-time = "2025-07-29T05:49:47.851Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/a2/7b8a020549f66ea2a68129db6960a762d2393248f1994499f8ba9728bbed/aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421", size = 468063, upload-time = "2025-07-29T05:49:49.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/f5/d11e088da9176e2ad8220338ae0000ed5429a15f3c9dfd983f39105399cd/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79", size = 1650122, upload-time = "2025-07-29T05:49:51.874Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/6b/b60ce2757e2faed3d70ed45dafee48cee7bfb878785a9423f7e883f0639c/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77", size = 1624176, upload-time = "2025-07-29T05:49:53.805Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/de/8c9fde2072a1b72c4fadecf4f7d4be7a85b1d9a4ab333d8245694057b4c6/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c", size = 1696583, upload-time = "2025-07-29T05:49:55.338Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/ad/07f863ca3d895a1ad958a54006c6dafb4f9310f8c2fdb5f961b8529029d3/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4", size = 1738896, upload-time = "2025-07-29T05:49:57.045Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/43/2bd482ebe2b126533e8755a49b128ec4e58f1a3af56879a3abdb7b42c54f/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6", size = 1643561, upload-time = "2025-07-29T05:49:58.762Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/40/2fa9f514c4cf4cbae8d7911927f81a1901838baf5e09a8b2c299de1acfe5/aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2", size = 1583685, upload-time = "2025-07-29T05:50:00.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/c3/94dc7357bc421f4fb978ca72a201a6c604ee90148f1181790c129396ceeb/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d", size = 1627533, upload-time = "2025-07-29T05:50:02.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/3f/1f8911fe1844a07001e26593b5c255a685318943864b27b4e0267e840f95/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb", size = 1638319, upload-time = "2025-07-29T05:50:04.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/46/27bf57a99168c4e145ffee6b63d0458b9c66e58bb70687c23ad3d2f0bd17/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5", size = 1613776, upload-time = "2025-07-29T05:50:05.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/7e/1d2d9061a574584bb4ad3dbdba0da90a27fdc795bc227def3a46186a8bc1/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b", size = 1693359, upload-time = "2025-07-29T05:50:07.563Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/98/bee429b52233c4a391980a5b3b196b060872a13eadd41c3a34be9b1469ed/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065", size = 1716598, upload-time = "2025-07-29T05:50:09.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/39/b0314c1ea774df3392751b686104a3938c63ece2b7ce0ba1ed7c0b4a934f/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1", size = 1644940, upload-time = "2025-07-29T05:50:11.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/83/3dacb8d3f8f512c8ca43e3fa8a68b20583bd25636ffa4e56ee841ffd79ae/aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a", size = 429239, upload-time = "2025-07-29T05:50:12.803Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/f9/470b5daba04d558c9673ca2034f28d067f3202a40e17804425f0c331c89f/aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830", size = 452297, upload-time = "2025-07-29T05:50:14.266Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp-swagger3"
|
||||
version = "0.10.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "attrs" },
|
||||
{ name = "fastjsonschema" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "rfc3339-validator" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/06/00ccb2c8afdde4ca7c3cac424d54715c7d90cdd4e13e1ca71d68f5b2e665/aiohttp_swagger3-0.10.0.tar.gz", hash = "sha256:a333c59328f64dd64587e5f276ee84dc256f587d09f2da6ddaae3812fa4d4f33", size = 1839028, upload-time = "2025-02-11T10:51:26.974Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/8f/db4cb843999a3088846d170f38eda2182b50b5733387be8102fed171c53f/aiohttp_swagger3-0.10.0-py3-none-any.whl", hash = "sha256:0ae2d2ba7dbd8ea8fe1cffe8f0197db5d0aa979eb9679bd699ecd87923912509", size = 1826491, upload-time = "2025-02-11T10:51:25.174Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -95,7 +111,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.9.0"
|
||||
version = "4.10.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
@@ -103,9 +119,9 @@ dependencies = [
|
||||
{ name = "sniffio" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -208,11 +224,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.7.14"
|
||||
version = "2025.8.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -281,50 +297,44 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.2"
|
||||
version = "3.4.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -365,49 +375,49 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "45.0.5"
|
||||
version = "45.0.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/95/1e/49527ac611af559665f71cbb8f92b332b5ec9c6fbc4e88b0f8e92f5e85df/cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a", size = 744903, upload-time = "2025-07-02T13:06:25.941Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949, upload-time = "2025-08-05T23:59:27.93Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/fb/09e28bc0c46d2c547085e60897fea96310574c70fb21cd58a730a45f3403/cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8", size = 7043092, upload-time = "2025-07-02T13:05:01.514Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/05/2194432935e29b91fb649f6149c1a4f9e6d3d9fc880919f4ad1bcc22641e/cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d", size = 4205926, upload-time = "2025-07-02T13:05:04.741Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/8b/9ef5da82350175e32de245646b1884fc01124f53eb31164c77f95a08d682/cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5", size = 4429235, upload-time = "2025-07-02T13:05:07.084Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/e1/c809f398adde1994ee53438912192d92a1d0fc0f2d7582659d9ef4c28b0c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57", size = 4209785, upload-time = "2025-07-02T13:05:09.321Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/8b/07eb6bd5acff58406c5e806eff34a124936f41a4fb52909ffa4d00815f8c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0", size = 3893050, upload-time = "2025-07-02T13:05:11.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/ef/3333295ed58d900a13c92806b67e62f27876845a9a908c939f040887cca9/cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d", size = 4457379, upload-time = "2025-07-02T13:05:13.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/9d/44080674dee514dbb82b21d6fa5d1055368f208304e2ab1828d85c9de8f4/cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9", size = 4209355, upload-time = "2025-07-02T13:05:15.017Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/d8/0749f7d39f53f8258e5c18a93131919ac465ee1f9dccaf1b3f420235e0b5/cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27", size = 4456087, upload-time = "2025-07-02T13:05:16.945Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/d7/92acac187387bf08902b0bf0699816f08553927bdd6ba3654da0010289b4/cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e", size = 4332873, upload-time = "2025-07-02T13:05:18.743Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/c2/840e0710da5106a7c3d4153c7215b2736151bba60bf4491bdb421df5056d/cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174", size = 4564651, upload-time = "2025-07-02T13:05:21.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/92/cc723dd6d71e9747a887b94eb3827825c6c24b9e6ce2bb33b847d31d5eaa/cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9", size = 2929050, upload-time = "2025-07-02T13:05:23.39Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/10/197da38a5911a48dd5389c043de4aec4b3c94cb836299b01253940788d78/cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63", size = 3403224, upload-time = "2025-07-02T13:05:25.202Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/2b/160ce8c2765e7a481ce57d55eba1546148583e7b6f85514472b1d151711d/cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8", size = 7017143, upload-time = "2025-07-02T13:05:27.229Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/e7/2187be2f871c0221a81f55ee3105d3cf3e273c0a0853651d7011eada0d7e/cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd", size = 4197780, upload-time = "2025-07-02T13:05:29.299Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/cf/84210c447c06104e6be9122661159ad4ce7a8190011669afceeaea150524/cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e", size = 4420091, upload-time = "2025-07-02T13:05:31.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/6a/cb8b5c8bb82fafffa23aeff8d3a39822593cee6e2f16c5ca5c2ecca344f7/cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0", size = 4198711, upload-time = "2025-07-02T13:05:33.062Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/f7/36d2d69df69c94cbb2473871926daf0f01ad8e00fe3986ac3c1e8c4ca4b3/cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135", size = 3883299, upload-time = "2025-07-02T13:05:34.94Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/c7/f0ea40f016de72f81288e9fe8d1f6748036cb5ba6118774317a3ffc6022d/cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7", size = 4450558, upload-time = "2025-07-02T13:05:37.288Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/ae/94b504dc1a3cdf642d710407c62e86296f7da9e66f27ab12a1ee6fdf005b/cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42", size = 4198020, upload-time = "2025-07-02T13:05:39.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/2b/aaf0adb845d5dabb43480f18f7ca72e94f92c280aa983ddbd0bcd6ecd037/cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492", size = 4449759, upload-time = "2025-07-02T13:05:41.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/e4/f17e02066de63e0100a3a01b56f8f1016973a1d67551beaf585157a86b3f/cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0", size = 4319991, upload-time = "2025-07-02T13:05:43.64Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", size = 4554189, upload-time = "2025-07-02T13:05:46.045Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/ea/a78a0c38f4c8736287b71c2ea3799d173d5ce778c7d6e3c163a95a05ad2a/cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f", size = 2911769, upload-time = "2025-07-02T13:05:48.329Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/b3/28ac139109d9005ad3f6b6f8976ffede6706a6478e21c889ce36c840918e/cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97", size = 3390016, upload-time = "2025-07-02T13:05:50.811Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/8b/34394337abe4566848a2bd49b26bcd4b07fd466afd3e8cce4cb79a390869/cryptography-45.0.5-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:206210d03c1193f4e1ff681d22885181d47efa1ab3018766a7b32a7b3d6e6afd", size = 3575762, upload-time = "2025-07-02T13:05:53.166Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/5d/a19441c1e89afb0f173ac13178606ca6fab0d3bd3ebc29e9ed1318b507fc/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c648025b6840fe62e57107e0a25f604db740e728bd67da4f6f060f03017d5097", size = 4140906, upload-time = "2025-07-02T13:05:55.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/db/daceb259982a3c2da4e619f45b5bfdec0e922a23de213b2636e78ef0919b/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b8fa8b0a35a9982a3c60ec79905ba5bb090fc0b9addcfd3dc2dd04267e45f25e", size = 4374411, upload-time = "2025-07-02T13:05:57.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/35/5d06ad06402fc522c8bf7eab73422d05e789b4e38fe3206a85e3d6966c11/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:14d96584701a887763384f3c47f0ca7c1cce322aa1c31172680eb596b890ec30", size = 4140942, upload-time = "2025-07-02T13:06:00.137Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/79/020a5413347e44c382ef1f7f7e7a66817cd6273e3e6b5a72d18177b08b2f/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57c816dfbd1659a367831baca4b775b2a5b43c003daf52e9d57e1d30bc2e1b0e", size = 4374079, upload-time = "2025-07-02T13:06:02.043Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/c5/c0e07d84a9a2a8a0ed4f865e58f37c71af3eab7d5e094ff1b21f3f3af3bc/cryptography-45.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b9e38e0a83cd51e07f5a48ff9691cae95a79bea28fe4ded168a8e5c6c77e819d", size = 3321362, upload-time = "2025-07-02T13:06:04.463Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/71/9bdbcfd58d6ff5084687fe722c58ac718ebedbc98b9f8f93781354e6d286/cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e", size = 3587878, upload-time = "2025-07-02T13:06:06.339Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/63/83516cfb87f4a8756eaa4203f93b283fda23d210fc14e1e594bd5f20edb6/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6", size = 4152447, upload-time = "2025-07-02T13:06:08.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/11/d2823d2a5a0bd5802b3565437add16f5c8ce1f0778bf3822f89ad2740a38/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18", size = 4386778, upload-time = "2025-07-02T13:06:10.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/38/6bf177ca6bce4fe14704ab3e93627c5b0ca05242261a2e43ef3168472540/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463", size = 4151627, upload-time = "2025-07-02T13:06:13.097Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/6a/69fc67e5266bff68a91bcb81dff8fb0aba4d79a78521a08812048913e16f/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1", size = 4385593, upload-time = "2025-07-02T13:06:15.689Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/34/31a1604c9a9ade0fdab61eb48570e09a796f4d9836121266447b0eaf7feb/cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f", size = 3331106, upload-time = "2025-07-02T13:06:18.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/29/2793d178d0eda1ca4a09a7c4e09a5185e75738cc6d526433e8663b460ea6/cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74", size = 7042702, upload-time = "2025-08-05T23:58:23.464Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483, upload-time = "2025-08-05T23:58:27.132Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679, upload-time = "2025-08-05T23:58:29.152Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553, upload-time = "2025-08-05T23:58:30.596Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499, upload-time = "2025-08-05T23:58:32.03Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484, upload-time = "2025-08-05T23:58:33.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281, upload-time = "2025-08-05T23:58:35.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890, upload-time = "2025-08-05T23:58:36.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247, upload-time = "2025-08-05T23:58:38.781Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045, upload-time = "2025-08-05T23:58:40.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/c3/77722446b13fa71dddd820a5faab4ce6db49e7e0bf8312ef4192a3f78e2f/cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159", size = 2928923, upload-time = "2025-08-05T23:58:41.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/63/a025c3225188a811b82932a4dcc8457a26c3729d81578ccecbcce2cb784e/cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec", size = 3403805, upload-time = "2025-08-05T23:58:43.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/af/bcfbea93a30809f126d51c074ee0fac5bd9d57d068edf56c2a73abedbea4/cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0", size = 7020111, upload-time = "2025-08-05T23:58:45.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169, upload-time = "2025-08-05T23:58:47.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273, upload-time = "2025-08-05T23:58:48.557Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211, upload-time = "2025-08-05T23:58:50.139Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732, upload-time = "2025-08-05T23:58:52.253Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655, upload-time = "2025-08-05T23:58:53.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956, upload-time = "2025-08-05T23:58:55.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859, upload-time = "2025-08-05T23:58:56.639Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254, upload-time = "2025-08-05T23:58:58.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815, upload-time = "2025-08-05T23:59:00.283Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/01/aa2f4940262d588a8fdf4edabe4cda45854d00ebc6eaac12568b3a491a16/cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02", size = 2912147, upload-time = "2025-08-05T23:59:01.716Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/bc/16e0276078c2de3ceef6b5a34b965f4436215efac45313df90d55f0ba2d2/cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b", size = 3390459, upload-time = "2025-08-05T23:59:03.358Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/d2/4482d97c948c029be08cb29854a91bd2ae8da7eb9c4152461f1244dcea70/cryptography-45.0.6-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:705bb7c7ecc3d79a50f236adda12ca331c8e7ecfbea51edd931ce5a7a7c4f012", size = 3576812, upload-time = "2025-08-05T23:59:04.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/24/55fc238fcaa122855442604b8badb2d442367dfbd5a7ca4bb0bd346e263a/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d", size = 4141694, upload-time = "2025-08-05T23:59:06.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/7e/3ea4fa6fbe51baf3903806a0241c666b04c73d2358a3ecce09ebee8b9622/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d", size = 4375010, upload-time = "2025-08-05T23:59:08.14Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/42/ec5a892d82d2a2c29f80fc19ced4ba669bca29f032faf6989609cff1f8dc/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da", size = 4141377, upload-time = "2025-08-05T23:59:09.584Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/d7/246c4c973a22b9c2931999da953a2c19cae7c66b9154c2d62ffed811225e/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db", size = 4374609, upload-time = "2025-08-05T23:59:11.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/6d/c49ccf243f0a1b0781c2a8de8123ee552f0c8a417c6367a24d2ecb7c11b3/cryptography-45.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2384f2ab18d9be88a6e4f8972923405e2dbb8d3e16c6b43f15ca491d7831bd18", size = 3322156, upload-time = "2025-08-05T23:59:13.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/69/c252de4ec047ba2f567ecb53149410219577d408c2aea9c989acae7eafce/cryptography-45.0.6-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fc022c1fa5acff6def2fc6d7819bbbd31ccddfe67d075331a65d9cfb28a20983", size = 3584669, upload-time = "2025-08-05T23:59:15.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/fe/deea71e9f310a31fe0a6bfee670955152128d309ea2d1c79e2a5ae0f0401/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427", size = 4153022, upload-time = "2025-08-05T23:59:16.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/45/a77452f5e49cb580feedba6606d66ae7b82c128947aa754533b3d1bd44b0/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b", size = 4386802, upload-time = "2025-08-05T23:59:18.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/b9/a2f747d2acd5e3075fdf5c145c7c3568895daaa38b3b0c960ef830db6cdc/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c", size = 4152706, upload-time = "2025-08-05T23:59:20.044Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/ec/381b3e8d0685a3f3f304a382aa3dfce36af2d76467da0fd4bb21ddccc7b2/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385", size = 4386740, upload-time = "2025-08-05T23:59:21.525Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/76/cf8d69da8d0b5ecb0db406f24a63a3f69ba5e791a11b782aeeefef27ccbb/cryptography-45.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:629127cfdcdc6806dfe234734d7cb8ac54edaf572148274fa377a7d3405b0043", size = 3331874, upload-time = "2025-08-05T23:59:23.017Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -475,12 +485,21 @@ wheels = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.18.0"
|
||||
name = "fastjsonschema"
|
||||
version = "2.19.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/7f/cedf77ace50aa60c566deaca9066750f06e1fcf6ad24f254d255bb976dd6/fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d", size = 372732, upload-time = "2023-12-28T14:02:06.823Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/b9/79691036d4a8f9857e74d1728b23f34f583b81350a27492edda58d5604e1/fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0", size = 23388, upload-time = "2023-12-28T14:02:04.512Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.19.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -582,11 +601,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.6.12"
|
||||
version = "2.6.13"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/82/ca/ffbabe3635bb839aa36b3a893c91a9b0d368cb4d8073e03a12896970af82/identify-2.6.13.tar.gz", hash = "sha256:da8d6c828e773620e13bfa86ea601c5a5310ba4bcd65edf378198b56a1f9fb32", size = 99243, upload-time = "2025-08-09T19:35:00.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/ce/461b60a3ee109518c055953729bf9ed089a04db895d47e95444071dcdef2/identify-2.6.13-py2.py3-none-any.whl", hash = "sha256:60381139b3ae39447482ecc406944190f690d4a2997f2584062089848361b33b", size = 99153, upload-time = "2025-08-09T19:34:59.1Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -651,67 +670,72 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "lxml"
|
||||
version = "5.4.0"
|
||||
version = "6.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479, upload-time = "2025-04-23T01:50:29.322Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/1f/a3b6b74a451ceb84b471caa75c934d2430a4d84395d38ef201d539f38cd1/lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c", size = 8076838, upload-time = "2025-04-23T01:44:29.325Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/af/a567a55b3e47135b4d1f05a1118c24529104c003f95851374b3748139dc1/lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7", size = 4381827, upload-time = "2025-04-23T01:44:33.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/ba/4ee47d24c675932b3eb5b6de77d0f623c2db6dc466e7a1f199792c5e3e3a/lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf", size = 5204098, upload-time = "2025-04-23T01:44:35.809Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/0f/b4db6dfebfefe3abafe360f42a3d471881687fd449a0b86b70f1f2683438/lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28", size = 4930261, upload-time = "2025-04-23T01:44:38.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/1f/0bb1bae1ce056910f8db81c6aba80fec0e46c98d77c0f59298c70cd362a3/lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609", size = 5529621, upload-time = "2025-04-23T01:44:40.921Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/f5/e7b66a533fc4a1e7fa63dd22a1ab2ec4d10319b909211181e1ab3e539295/lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4", size = 4983231, upload-time = "2025-04-23T01:44:43.871Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/39/a38244b669c2d95a6a101a84d3c85ba921fea827e9e5483e93168bf1ccb2/lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7", size = 5084279, upload-time = "2025-04-23T01:44:46.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/64/48cac242347a09a07740d6cee7b7fd4663d5c1abd65f2e3c60420e231b27/lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f", size = 4927405, upload-time = "2025-04-23T01:44:49.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/89/97442835fbb01d80b72374f9594fe44f01817d203fa056e9906128a5d896/lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997", size = 5550169, upload-time = "2025-04-23T01:44:52.791Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/97/164ca398ee654eb21f29c6b582685c6c6b9d62d5213abc9b8380278e9c0a/lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c", size = 5062691, upload-time = "2025-04-23T01:44:56.108Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/bc/712b96823d7feb53482d2e4f59c090fb18ec7b0d0b476f353b3085893cda/lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b", size = 5133503, upload-time = "2025-04-23T01:44:59.222Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/55/a62a39e8f9da2a8b6002603475e3c57c870cd9c95fd4b94d4d9ac9036055/lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b", size = 4999346, upload-time = "2025-04-23T01:45:02.088Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/47/a393728ae001b92bb1a9e095e570bf71ec7f7fbae7688a4792222e56e5b9/lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563", size = 5627139, upload-time = "2025-04-23T01:45:04.582Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/5f/9dcaaad037c3e642a7ea64b479aa082968de46dd67a8293c541742b6c9db/lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5", size = 5465609, upload-time = "2025-04-23T01:45:07.649Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/0a/ebcae89edf27e61c45023005171d0ba95cb414ee41c045ae4caf1b8487fd/lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776", size = 5192285, upload-time = "2025-04-23T01:45:10.456Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/ad/cc8140ca99add7d85c92db8b2354638ed6d5cc0e917b21d36039cb15a238/lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7", size = 3477507, upload-time = "2025-04-23T01:45:12.474Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/39/597ce090da1097d2aabd2f9ef42187a6c9c8546d67c419ce61b88b336c85/lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250", size = 3805104, upload-time = "2025-04-23T01:45:15.104Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/2d/67693cc8a605a12e5975380d7ff83020dcc759351b5a066e1cced04f797b/lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9", size = 8083240, upload-time = "2025-04-23T01:45:18.566Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/53/b5a05ab300a808b72e848efd152fe9c022c0181b0a70b8bca1199f1bed26/lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7", size = 4387685, upload-time = "2025-04-23T01:45:21.387Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/cb/1a3879c5f512bdcd32995c301886fe082b2edd83c87d41b6d42d89b4ea4d/lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa", size = 4991164, upload-time = "2025-04-23T01:45:23.849Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/94/bbc66e42559f9d04857071e3b3d0c9abd88579367fd2588a4042f641f57e/lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df", size = 4746206, upload-time = "2025-04-23T01:45:26.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/95/34b0679bee435da2d7cae895731700e519a8dfcab499c21662ebe671603e/lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e", size = 5342144, upload-time = "2025-04-23T01:45:28.939Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/5d/abfcc6ab2fa0be72b2ba938abdae1f7cad4c632f8d552683ea295d55adfb/lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44", size = 4825124, upload-time = "2025-04-23T01:45:31.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/78/6bd33186c8863b36e084f294fc0a5e5eefe77af95f0663ef33809cc1c8aa/lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba", size = 4876520, upload-time = "2025-04-23T01:45:34.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/74/4d7ad4839bd0fc64e3d12da74fc9a193febb0fae0ba6ebd5149d4c23176a/lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba", size = 4765016, upload-time = "2025-04-23T01:45:36.7Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/0d/0a98ed1f2471911dadfc541003ac6dd6879fc87b15e1143743ca20f3e973/lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c", size = 5362884, upload-time = "2025-04-23T01:45:39.291Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/de/d4f7e4c39740a6610f0f6959052b547478107967362e8424e1163ec37ae8/lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8", size = 4902690, upload-time = "2025-04-23T01:45:42.386Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/8c/61763abd242af84f355ca4ef1ee096d3c1b7514819564cce70fd18c22e9a/lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86", size = 4944418, upload-time = "2025-04-23T01:45:46.051Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/c5/6d7e3b63e7e282619193961a570c0a4c8a57fe820f07ca3fe2f6bd86608a/lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056", size = 4827092, upload-time = "2025-04-23T01:45:48.943Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/4a/e60a306df54680b103348545706a98a7514a42c8b4fbfdcaa608567bb065/lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7", size = 5418231, upload-time = "2025-04-23T01:45:51.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/f2/9754aacd6016c930875854f08ac4b192a47fe19565f776a64004aa167521/lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd", size = 5261798, upload-time = "2025-04-23T01:45:54.146Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/a2/0c49ec6941428b1bd4f280650d7b11a0f91ace9db7de32eb7aa23bcb39ff/lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751", size = 4988195, upload-time = "2025-04-23T01:45:56.685Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/75/87a3963a08eafc46a86c1131c6e28a4de103ba30b5ae903114177352a3d7/lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4", size = 3474243, upload-time = "2025-04-23T01:45:58.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f9/1f0964c4f6c2be861c50db380c554fb8befbea98c6404744ce243a3c87ef/lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539", size = 3815197, upload-time = "2025-04-23T01:46:01.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392, upload-time = "2025-04-23T01:46:04.09Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103, upload-time = "2025-04-23T01:46:07.227Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224, upload-time = "2025-04-23T01:46:10.237Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913, upload-time = "2025-04-23T01:46:12.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441, upload-time = "2025-04-23T01:46:16.037Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165, upload-time = "2025-04-23T01:46:19.137Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580, upload-time = "2025-04-23T01:46:21.963Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493, upload-time = "2025-04-23T01:46:24.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679, upload-time = "2025-04-23T01:46:27.097Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691, upload-time = "2025-04-23T01:46:30.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075, upload-time = "2025-04-23T01:46:32.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680, upload-time = "2025-04-23T01:46:34.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253, upload-time = "2025-04-23T01:46:37.608Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651, upload-time = "2025-04-23T01:46:40.183Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315, upload-time = "2025-04-23T01:46:43.333Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149, upload-time = "2025-04-23T01:46:45.684Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095, upload-time = "2025-04-23T01:46:48.521Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/b0/e4d1cbb8c078bc4ae44de9c6a79fec4e2b4151b1b4d50af71d799e76b177/lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55", size = 3892319, upload-time = "2025-04-23T01:49:22.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/aa/e2bdefba40d815059bcb60b371a36fbfcce970a935370e1b367ba1cc8f74/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740", size = 4211614, upload-time = "2025-04-23T01:49:24.599Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/5f/91ff89d1e092e7cfdd8453a939436ac116db0a665e7f4be0cd8e65c7dc5a/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5", size = 4306273, upload-time = "2025-04-23T01:49:27.355Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/7c/8c3f15df2ca534589717bfd19d1e3482167801caedfa4d90a575facf68a6/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37", size = 4208552, upload-time = "2025-04-23T01:49:29.949Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/d8/9567afb1665f64d73fc54eb904e418d1138d7f011ed00647121b4dd60b38/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571", size = 4331091, upload-time = "2025-04-23T01:49:32.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/ab/fdbbd91d8d82bf1a723ba88ec3e3d76c022b53c391b0c13cad441cdb8f9e/lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4", size = 3487862, upload-time = "2025-04-23T01:49:36.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/8a/f8192a08237ef2fb1b19733f709db88a4c43bc8ab8357f01cb41a27e7f6a/lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388", size = 8590589, upload-time = "2025-09-22T04:00:10.51Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/64/27bcd07ae17ff5e5536e8d88f4c7d581b48963817a13de11f3ac3329bfa2/lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153", size = 4629671, upload-time = "2025-09-22T04:00:15.411Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/5a/a7d53b3291c324e0b6e48f3c797be63836cc52156ddf8f33cd72aac78866/lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31", size = 4999961, upload-time = "2025-09-22T04:00:17.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/55/d465e9b89df1761674d8672bb3e4ae2c47033b01ec243964b6e334c6743f/lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9", size = 5157087, upload-time = "2025-09-22T04:00:19.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/38/3073cd7e3e8dfc3ba3c3a139e33bee3a82de2bfb0925714351ad3d255c13/lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8", size = 5067620, upload-time = "2025-09-22T04:00:21.877Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/d3/1e001588c5e2205637b08985597827d3827dbaaece16348c8822bfe61c29/lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba", size = 5406664, upload-time = "2025-09-22T04:00:23.714Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/cf/cab09478699b003857ed6ebfe95e9fb9fa3d3c25f1353b905c9b73cfb624/lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c", size = 5289397, upload-time = "2025-09-22T04:00:25.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/84/02a2d0c38ac9a8b9f9e5e1bbd3f24b3f426044ad618b552e9549ee91bd63/lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c", size = 4772178, upload-time = "2025-09-22T04:00:27.602Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/87/e1ceadcc031ec4aa605fe95476892d0b0ba3b7f8c7dcdf88fdeff59a9c86/lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321", size = 5358148, upload-time = "2025-09-22T04:00:29.323Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/13/5bb6cf42bb228353fd4ac5f162c6a84fd68a4d6f67c1031c8cf97e131fc6/lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1", size = 5112035, upload-time = "2025-09-22T04:00:31.061Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/e2/ea0498552102e59834e297c5c6dff8d8ded3db72ed5e8aad77871476f073/lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34", size = 4799111, upload-time = "2025-09-22T04:00:33.11Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/9e/8de42b52a73abb8af86c66c969b3b4c2a96567b6ac74637c037d2e3baa60/lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a", size = 5351662, upload-time = "2025-09-22T04:00:35.237Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/a2/de776a573dfb15114509a37351937c367530865edb10a90189d0b4b9b70a/lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c", size = 5314973, upload-time = "2025-09-22T04:00:37.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/a0/3ae1b1f8964c271b5eec91db2043cf8c6c0bce101ebb2a633b51b044db6c/lxml-6.0.2-cp310-cp310-win32.whl", hash = "sha256:1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b", size = 3611953, upload-time = "2025-09-22T04:00:39.224Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/70/bd42491f0634aad41bdfc1e46f5cff98825fb6185688dc82baa35d509f1a/lxml-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0", size = 4032695, upload-time = "2025-09-22T04:00:41.402Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/d0/05c6a72299f54c2c561a6c6cbb2f512e047fca20ea97a05e57931f194ac4/lxml-6.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5", size = 3680051, upload-time = "2025-09-22T04:00:43.525Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/9c/780c9a8fce3f04690b374f72f41306866b0400b9d0fdf3e17aaa37887eed/lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6", size = 3939264, upload-time = "2025-09-22T04:04:32.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/5a/1ab260c00adf645d8bf7dec7f920f744b032f69130c681302821d5debea6/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba", size = 4216435, upload-time = "2025-09-22T04:04:34.907Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/37/565f3b3d7ffede22874b6d86be1a1763d00f4ea9fc5b9b6ccb11e4ec8612/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5", size = 4325913, upload-time = "2025-09-22T04:04:37.205Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/ec/f3a1b169b2fb9d03467e2e3c0c752ea30e993be440a068b125fc7dd248b0/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4", size = 4269357, upload-time = "2025-09-22T04:04:39.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/a2/585a28fe3e67daa1cf2f06f34490d556d121c25d500b10082a7db96e3bcd/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d", size = 4412295, upload-time = "2025-09-22T04:04:41.647Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/d9/a57dd8bcebd7c69386c20263830d4fa72d27e6b72a229ef7a48e88952d9a/lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d", size = 3516913, upload-time = "2025-09-22T04:04:43.602Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -760,14 +784,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "3.0.0"
|
||||
version = "4.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mdurl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -790,73 +814,73 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "multidict"
|
||||
version = "6.6.3"
|
||||
version = "6.6.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/67/414933982bce2efce7cbcb3169eaaf901e0f25baec69432b4874dfb1f297/multidict-6.6.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a2be5b7b35271f7fff1397204ba6708365e3d773579fe2a30625e16c4b4ce817", size = 77017, upload-time = "2025-06-30T15:50:58.931Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/fe/d8a3ee1fad37dc2ef4f75488b0d9d4f25bf204aad8306cbab63d97bff64a/multidict-6.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12f4581d2930840295c461764b9a65732ec01250b46c6b2c510d7ee68872b140", size = 44897, upload-time = "2025-06-30T15:51:00.999Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/e0/265d89af8c98240265d82b8cbcf35897f83b76cd59ee3ab3879050fd8c45/multidict-6.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dd7793bab517e706c9ed9d7310b06c8672fd0aeee5781bfad612f56b8e0f7d14", size = 44574, upload-time = "2025-06-30T15:51:02.449Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/05/6b759379f7e8e04ccc97cfb2a5dcc5cdbd44a97f072b2272dc51281e6a40/multidict-6.6.3-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:72d8815f2cd3cf3df0f83cac3f3ef801d908b2d90409ae28102e0553af85545a", size = 225729, upload-time = "2025-06-30T15:51:03.794Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/f5/8d5a15488edd9a91fa4aad97228d785df208ed6298580883aa3d9def1959/multidict-6.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:531e331a2ee53543ab32b16334e2deb26f4e6b9b28e41f8e0c87e99a6c8e2d69", size = 242515, upload-time = "2025-06-30T15:51:05.002Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/b5/a8f317d47d0ac5bb746d6d8325885c8967c2a8ce0bb57be5399e3642cccb/multidict-6.6.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:42ca5aa9329a63be8dc49040f63817d1ac980e02eeddba763a9ae5b4027b9c9c", size = 222224, upload-time = "2025-06-30T15:51:06.148Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/88/18b2a0d5e80515fa22716556061189c2853ecf2aa2133081ebbe85ebea38/multidict-6.6.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:208b9b9757060b9faa6f11ab4bc52846e4f3c2fb8b14d5680c8aac80af3dc751", size = 253124, upload-time = "2025-06-30T15:51:07.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/bf/ebfcfd6b55a1b05ef16d0775ae34c0fe15e8dab570d69ca9941073b969e7/multidict-6.6.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:acf6b97bd0884891af6a8b43d0f586ab2fcf8e717cbd47ab4bdddc09e20652d8", size = 251529, upload-time = "2025-06-30T15:51:08.691Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/11/780615a98fd3775fc309d0234d563941af69ade2df0bb82c91dda6ddaea1/multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:68e9e12ed00e2089725669bdc88602b0b6f8d23c0c95e52b95f0bc69f7fe9b55", size = 241627, upload-time = "2025-06-30T15:51:10.605Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/3d/35f33045e21034b388686213752cabc3a1b9d03e20969e6fa8f1b1d82db1/multidict-6.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:05db2f66c9addb10cfa226e1acb363450fab2ff8a6df73c622fefe2f5af6d4e7", size = 239351, upload-time = "2025-06-30T15:51:12.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/cc/ff84c03b95b430015d2166d9aae775a3985d757b94f6635010d0038d9241/multidict-6.6.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:0db58da8eafb514db832a1b44f8fa7906fdd102f7d982025f816a93ba45e3dcb", size = 233429, upload-time = "2025-06-30T15:51:13.533Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/f0/8cd49a0b37bdea673a4b793c2093f2f4ba8e7c9d6d7c9bd672fd6d38cd11/multidict-6.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:14117a41c8fdb3ee19c743b1c027da0736fdb79584d61a766da53d399b71176c", size = 243094, upload-time = "2025-06-30T15:51:14.815Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/19/5d9a0cfdafe65d82b616a45ae950975820289069f885328e8185e64283c2/multidict-6.6.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:877443eaaabcd0b74ff32ebeed6f6176c71850feb7d6a1d2db65945256ea535c", size = 248957, upload-time = "2025-06-30T15:51:16.076Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/dc/c90066151da87d1e489f147b9b4327927241e65f1876702fafec6729c014/multidict-6.6.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:70b72e749a4f6e7ed8fb334fa8d8496384840319512746a5f42fa0aec79f4d61", size = 243590, upload-time = "2025-06-30T15:51:17.413Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/39/458afb0cccbb0ee9164365273be3e039efddcfcb94ef35924b7dbdb05db0/multidict-6.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43571f785b86afd02b3855c5ac8e86ec921b760298d6f82ff2a61daf5a35330b", size = 237487, upload-time = "2025-06-30T15:51:19.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/38/0016adac3990426610a081787011177e661875546b434f50a26319dc8372/multidict-6.6.3-cp310-cp310-win32.whl", hash = "sha256:20c5a0c3c13a15fd5ea86c42311859f970070e4e24de5a550e99d7c271d76318", size = 41390, upload-time = "2025-06-30T15:51:20.362Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/d2/17897a8f3f2c5363d969b4c635aa40375fe1f09168dc09a7826780bfb2a4/multidict-6.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab0a34a007704c625e25a9116c6770b4d3617a071c8a7c30cd338dfbadfe6485", size = 45954, upload-time = "2025-06-30T15:51:21.383Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/5f/d4a717c1e457fe44072e33fa400d2b93eb0f2819c4d669381f925b7cba1f/multidict-6.6.3-cp310-cp310-win_arm64.whl", hash = "sha256:769841d70ca8bdd140a715746199fc6473414bd02efd678d75681d2d6a8986c5", size = 42981, upload-time = "2025-06-30T15:51:22.809Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/f0/1a39863ced51f639c81a5463fbfa9eb4df59c20d1a8769ab9ef4ca57ae04/multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c", size = 76445, upload-time = "2025-06-30T15:51:24.01Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/0e/a7cfa451c7b0365cd844e90b41e21fab32edaa1e42fc0c9f68461ce44ed7/multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df", size = 44610, upload-time = "2025-06-30T15:51:25.158Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/bb/a14a4efc5ee748cc1904b0748be278c31b9295ce5f4d2ef66526f410b94d/multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d", size = 44267, upload-time = "2025-06-30T15:51:26.326Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/f8/410677d563c2d55e063ef74fe578f9d53fe6b0a51649597a5861f83ffa15/multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539", size = 230004, upload-time = "2025-06-30T15:51:27.491Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/df/2b787f80059314a98e1ec6a4cc7576244986df3e56b3c755e6fc7c99e038/multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462", size = 247196, upload-time = "2025-06-30T15:51:28.762Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/f2/f9117089151b9a8ab39f9019620d10d9718eec2ac89e7ca9d30f3ec78e96/multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9", size = 225337, upload-time = "2025-06-30T15:51:30.025Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/2d/7115300ec5b699faa152c56799b089a53ed69e399c3c2d528251f0aeda1a/multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7", size = 257079, upload-time = "2025-06-30T15:51:31.716Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/ea/ff4bab367623e39c20d3b07637225c7688d79e4f3cc1f3b9f89867677f9a/multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9", size = 255461, upload-time = "2025-06-30T15:51:33.029Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/07/2c9246cda322dfe08be85f1b8739646f2c4c5113a1422d7a407763422ec4/multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821", size = 246611, upload-time = "2025-06-30T15:51:34.47Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/62/279c13d584207d5697a752a66ffc9bb19355a95f7659140cb1b3cf82180e/multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d", size = 243102, upload-time = "2025-06-30T15:51:36.525Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/cc/e06636f48c6d51e724a8bc8d9e1db5f136fe1df066d7cafe37ef4000f86a/multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6", size = 238693, upload-time = "2025-06-30T15:51:38.278Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/a4/66c9d8fb9acf3b226cdd468ed009537ac65b520aebdc1703dd6908b19d33/multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430", size = 246582, upload-time = "2025-06-30T15:51:39.709Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/01/c69e0317be556e46257826d5449feb4e6aa0d18573e567a48a2c14156f1f/multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b", size = 253355, upload-time = "2025-06-30T15:51:41.013Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/da/9cc1da0299762d20e626fe0042e71b5694f9f72d7d3f9678397cbaa71b2b/multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56", size = 247774, upload-time = "2025-06-30T15:51:42.291Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/91/b22756afec99cc31105ddd4a52f95ab32b1a4a58f4d417979c570c4a922e/multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183", size = 242275, upload-time = "2025-06-30T15:51:43.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/f1/adcc185b878036a20399d5be5228f3cbe7f823d78985d101d425af35c800/multidict-6.6.3-cp311-cp311-win32.whl", hash = "sha256:9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5", size = 41290, upload-time = "2025-06-30T15:51:45.264Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/d4/27652c1c6526ea6b4f5ddd397e93f4232ff5de42bea71d339bc6a6cc497f/multidict-6.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2", size = 45942, upload-time = "2025-06-30T15:51:46.377Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/18/23f4932019804e56d3c2413e237f866444b774b0263bcb81df2fdecaf593/multidict-6.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb", size = 42880, upload-time = "2025-06-30T15:51:47.561Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/6b/86f353088c1358e76fd30b0146947fddecee812703b604ee901e85cd2a80/multidict-6.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f", size = 77054, upload-time = "2025-08-11T12:06:02.99Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/5d/c01dc3d3788bb877bd7f5753ea6eb23c1beeca8044902a8f5bfb54430f63/multidict-6.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb", size = 44914, upload-time = "2025-08-11T12:06:05.264Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/44/964dae19ea42f7d3e166474d8205f14bb811020e28bc423d46123ddda763/multidict-6.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495", size = 44601, upload-time = "2025-08-11T12:06:06.627Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/20/0616348a1dfb36cb2ab33fc9521de1f27235a397bf3f59338e583afadd17/multidict-6.6.4-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8", size = 224821, upload-time = "2025-08-11T12:06:08.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/26/5d8923c69c110ff51861af05bd27ca6783011b96725d59ccae6d9daeb627/multidict-6.6.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7", size = 242608, upload-time = "2025-08-11T12:06:09.697Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/cc/e2ad3ba9459aa34fa65cf1f82a5c4a820a2ce615aacfb5143b8817f76504/multidict-6.6.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796", size = 222324, upload-time = "2025-08-11T12:06:10.905Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/db/4ed0f65701afbc2cb0c140d2d02928bb0fe38dd044af76e58ad7c54fd21f/multidict-6.6.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db", size = 253234, upload-time = "2025-08-11T12:06:12.658Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/c1/5160c9813269e39ae14b73debb907bfaaa1beee1762da8c4fb95df4764ed/multidict-6.6.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0", size = 251613, upload-time = "2025-08-11T12:06:13.97Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/a9/48d1bd111fc2f8fb98b2ed7f9a115c55a9355358432a19f53c0b74d8425d/multidict-6.6.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877", size = 241649, upload-time = "2025-08-11T12:06:15.204Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/2a/f7d743df0019408768af8a70d2037546a2be7b81fbb65f040d76caafd4c5/multidict-6.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace", size = 239238, upload-time = "2025-08-11T12:06:16.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/b8/4f4bb13323c2d647323f7919201493cf48ebe7ded971717bfb0f1a79b6bf/multidict-6.6.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6", size = 233517, upload-time = "2025-08-11T12:06:18.107Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/29/4293c26029ebfbba4f574febd2ed01b6f619cfa0d2e344217d53eef34192/multidict-6.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb", size = 243122, upload-time = "2025-08-11T12:06:19.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/60/a1c53628168aa22447bfde3a8730096ac28086704a0d8c590f3b63388d0c/multidict-6.6.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb", size = 248992, upload-time = "2025-08-11T12:06:20.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/3b/55443a0c372f33cae5d9ec37a6a973802884fa0ab3586659b197cf8cc5e9/multidict-6.6.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987", size = 243708, upload-time = "2025-08-11T12:06:21.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/60/a18c6900086769312560b2626b18e8cca22d9e85b1186ba77f4755b11266/multidict-6.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f", size = 237498, upload-time = "2025-08-11T12:06:23.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/3d/8bdd8bcaff2951ce2affccca107a404925a2beafedd5aef0b5e4a71120a6/multidict-6.6.4-cp310-cp310-win32.whl", hash = "sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f", size = 41415, upload-time = "2025-08-11T12:06:24.77Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/53/cab1ad80356a4cd1b685a254b680167059b433b573e53872fab245e9fc95/multidict-6.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0", size = 46046, upload-time = "2025-08-11T12:06:25.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/9a/874212b6f5c1c2d870d0a7adc5bb4cfe9b0624fa15cdf5cf757c0f5087ae/multidict-6.6.4-cp310-cp310-win_arm64.whl", hash = "sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729", size = 43147, upload-time = "2025-08-11T12:06:27.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.17.0"
|
||||
version = "1.17.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mypy-extensions" },
|
||||
@@ -864,27 +888,27 @@ dependencies = [
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1e/e3/034322d5a779685218ed69286c32faa505247f1f096251ef66c8fd203b08/mypy-1.17.0.tar.gz", hash = "sha256:e5d7ccc08ba089c06e2f5629c660388ef1fee708444f1dee0b9203fa031dee03", size = 3352114, upload-time = "2025-07-14T20:34:30.181Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/31/e762baa3b73905c856d45ab77b4af850e8159dffffd86a52879539a08c6b/mypy-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8e08de6138043108b3b18f09d3f817a4783912e48828ab397ecf183135d84d6", size = 10998313, upload-time = "2025-07-14T20:33:24.519Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/c1/25b2f0d46fb7e0b5e2bee61ec3a47fe13eff9e3c2f2234f144858bbe6485/mypy-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce4a17920ec144647d448fc43725b5873548b1aae6c603225626747ededf582d", size = 10128922, upload-time = "2025-07-14T20:34:06.414Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/78/6d646603a57aa8a2886df1b8881fe777ea60f28098790c1089230cd9c61d/mypy-1.17.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ff25d151cc057fdddb1cb1881ef36e9c41fa2a5e78d8dd71bee6e4dcd2bc05b", size = 11913524, upload-time = "2025-07-14T20:33:19.109Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/19/dae6c55e87ee426fb76980f7e78484450cad1c01c55a1dc4e91c930bea01/mypy-1.17.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93468cf29aa9a132bceb103bd8475f78cacde2b1b9a94fd978d50d4bdf616c9a", size = 12650527, upload-time = "2025-07-14T20:32:44.095Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/e1/f916845a235235a6c1e4d4d065a3930113767001d491b8b2e1b61ca56647/mypy-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:98189382b310f16343151f65dd7e6867386d3e35f7878c45cfa11383d175d91f", size = 12897284, upload-time = "2025-07-14T20:33:38.168Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/dc/414760708a4ea1b096bd214d26a24e30ac5e917ef293bc33cdb6fe22d2da/mypy-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:c004135a300ab06a045c1c0d8e3f10215e71d7b4f5bb9a42ab80236364429937", size = 9506493, upload-time = "2025-07-14T20:34:01.093Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/24/82efb502b0b0f661c49aa21cfe3e1999ddf64bf5500fc03b5a1536a39d39/mypy-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d4fe5c72fd262d9c2c91c1117d16aac555e05f5beb2bae6a755274c6eec42be", size = 10914150, upload-time = "2025-07-14T20:31:51.985Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/96/8ef9a6ff8cedadff4400e2254689ca1dc4b420b92c55255b44573de10c54/mypy-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d96b196e5c16f41b4f7736840e8455958e832871990c7ba26bf58175e357ed61", size = 10039845, upload-time = "2025-07-14T20:32:30.527Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/32/7ce359a56be779d38021d07941cfbb099b41411d72d827230a36203dbb81/mypy-1.17.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:73a0ff2dd10337ceb521c080d4147755ee302dcde6e1a913babd59473904615f", size = 11837246, upload-time = "2025-07-14T20:32:01.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/16/b775047054de4d8dbd668df9137707e54b07fe18c7923839cd1e524bf756/mypy-1.17.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24cfcc1179c4447854e9e406d3af0f77736d631ec87d31c6281ecd5025df625d", size = 12571106, upload-time = "2025-07-14T20:34:26.942Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/cf/fa33eaf29a606102c8d9ffa45a386a04c2203d9ad18bf4eef3e20c43ebc8/mypy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c56f180ff6430e6373db7a1d569317675b0a451caf5fef6ce4ab365f5f2f6c3", size = 12759960, upload-time = "2025-07-14T20:33:42.882Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/75/3f5a29209f27e739ca57e6350bc6b783a38c7621bdf9cac3ab8a08665801/mypy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:eafaf8b9252734400f9b77df98b4eee3d2eecab16104680d51341c75702cad70", size = 9503888, upload-time = "2025-07-14T20:32:34.392Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/e9/e6824ed620bbf51d3bf4d6cbbe4953e83eaf31a448d1b3cfb3620ccb641c/mypy-1.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f986f1cab8dbec39ba6e0eaa42d4d3ac6686516a5d3dccd64be095db05ebc6bb", size = 11086395, upload-time = "2025-07-14T20:34:11.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/51/a4afd1ae279707953be175d303f04a5a7bd7e28dc62463ad29c1c857927e/mypy-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:51e455a54d199dd6e931cd7ea987d061c2afbaf0960f7f66deef47c90d1b304d", size = 10120052, upload-time = "2025-07-14T20:33:09.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/71/19adfeac926ba8205f1d1466d0d360d07b46486bf64360c54cb5a2bd86a8/mypy-1.17.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3204d773bab5ff4ebbd1f8efa11b498027cd57017c003ae970f310e5b96be8d8", size = 11861806, upload-time = "2025-07-14T20:32:16.028Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/64/d6120eca3835baf7179e6797a0b61d6c47e0bc2324b1f6819d8428d5b9ba/mypy-1.17.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1051df7ec0886fa246a530ae917c473491e9a0ba6938cfd0ec2abc1076495c3e", size = 12744371, upload-time = "2025-07-14T20:33:33.503Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/dc/56f53b5255a166f5bd0f137eed960e5065f2744509dfe69474ff0ba772a5/mypy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f773c6d14dcc108a5b141b4456b0871df638eb411a89cd1c0c001fc4a9d08fc8", size = 12914558, upload-time = "2025-07-14T20:33:56.961Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/ac/070bad311171badc9add2910e7f89271695a25c136de24bbafc7eded56d5/mypy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:1619a485fd0e9c959b943c7b519ed26b712de3002d7de43154a489a2d0fd817d", size = 9585447, upload-time = "2025-07-14T20:32:20.594Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/fc/ee058cc4316f219078464555873e99d170bde1d9569abd833300dbeb484a/mypy-1.17.0-py3-none-any.whl", hash = "sha256:15d9d0018237ab058e5de3d8fce61b6fa72cc59cc78fd91f1b474bce12abf496", size = 2283195, upload-time = "2025-07-14T20:31:54.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/a9/3d7aa83955617cdf02f94e50aab5c830d205cfa4320cf124ff64acce3a8e/mypy-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972", size = 11003299, upload-time = "2025-07-31T07:54:06.425Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/e8/72e62ff837dd5caaac2b4a5c07ce769c8e808a00a65e5d8f94ea9c6f20ab/mypy-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7", size = 10125451, upload-time = "2025-07-31T07:53:52.974Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/10/f3f3543f6448db11881776f26a0ed079865926b0c841818ee22de2c6bbab/mypy-1.17.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df", size = 11916211, upload-time = "2025-07-31T07:53:18.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/bf/63e83ed551282d67bb3f7fea2cd5561b08d2bb6eb287c096539feb5ddbc5/mypy-1.17.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390", size = 12652687, upload-time = "2025-07-31T07:53:30.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/66/68f2eeef11facf597143e85b694a161868b3b006a5fbad50e09ea117ef24/mypy-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94", size = 12896322, upload-time = "2025-07-31T07:53:50.74Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/87/8e3e9c2c8bd0d7e071a89c71be28ad088aaecbadf0454f46a540bda7bca6/mypy-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b", size = 9507962, upload-time = "2025-07-31T07:53:08.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009, upload-time = "2025-07-31T07:53:23.037Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482, upload-time = "2025-07-31T07:53:26.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883, upload-time = "2025-07-31T07:53:47.948Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215, upload-time = "2025-07-31T07:54:04.031Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956, upload-time = "2025-07-31T07:53:36.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307, upload-time = "2025-07-31T07:53:59.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1102,6 +1126,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/5f/af7da8e6f1e42b52f44a24d08b8e4c726207434e2593732d39e7af5e7256/pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe", size = 1806478, upload-time = "2025-05-17T17:23:26.066Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyexecjs"
|
||||
version = "1.5.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "six" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/8e/aedef81641c8dca6fd0fb7294de5bed9c45f3397d67fddf755c1042c2642/PyExecJS-1.5.1.tar.gz", hash = "sha256:34cc1d070976918183ff7bdc0ad71f8157a891c92708c00c5fbbff7a769f505c", size = 13344, upload-time = "2018-01-18T04:33:55.126Z" }
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
@@ -1154,21 +1187,22 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pyplayready"
|
||||
version = "0.6.0"
|
||||
version = "0.6.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "click" },
|
||||
{ name = "construct" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "ecpy" },
|
||||
{ name = "lxml" },
|
||||
{ name = "pycryptodome" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "requests" },
|
||||
{ name = "xmltodict" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/39/5f/aba36faf0f7feafa4b82bb9e38a0d8c70048e068416a931ee54a565ee3db/pyplayready-0.6.0.tar.gz", hash = "sha256:2b874596a8532efa5d7f2380e8de2cdb611a96cd69b0da5182ab1902083566e9", size = 99157, upload-time = "2025-02-06T13:16:02.763Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/53/f2/6d75b6d10a8361b53a2acbe959d51aa586418e9af497381a9f5c436ca488/pyplayready-0.6.3.tar.gz", hash = "sha256:b9b82a32c2cced9c43f910eb1fb891545f1491dc063c1eb9c20634e2417eda76", size = 58019, upload-time = "2025-08-20T19:32:43.642Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/85/a5b7dba7d5420c8f5d133123376a135fda69973f3e8d7c05c58a516a54e5/pyplayready-0.6.0-py3-none-any.whl", hash = "sha256:7f85ba94f2ae0d0c964d2c84e3a4f99bfa947fb120069c70af6c17f83ed6a7f3", size = 114232, upload-time = "2025-02-06T13:16:01.448Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/7f/64d5ff5d765f9f2138ee7cc196fd9401f9eae0fb514c66660ad4e56584fa/pyplayready-0.6.3-py3-none-any.whl", hash = "sha256:82f35434e790a7da21df57ec053a2924ceb63622c5a6c5ff9f0fa03db0531c57", size = 66162, upload-time = "2025-08-20T19:32:42.62Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1180,6 +1214,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload-time = "2019-09-20T02:06:22.938Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pysubs2"
|
||||
version = "1.8.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/31/4a/becf78d9d3df56e6c4a9c50b83794e5436b6c5ab6dd8a3f934e94c89338c/pysubs2-1.8.0.tar.gz", hash = "sha256:3397bb58a4a15b1325ba2ae3fd4d7c214e2c0ddb9f33190d6280d783bb433b20", size = 1130048, upload-time = "2024-12-24T12:39:47.769Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/99/09/0fc0719162e5ad723f71d41cf336f18b6b5054d70dc0fe42ace6b4d2bdc9/pysubs2-1.8.0-py3-none-any.whl", hash = "sha256:05716f5039a9ebe32cd4d7673f923cf36204f3a3e99987f823ab83610b7035a0", size = 43516, upload-time = "2024-12-24T12:39:44.469Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pywidevine"
|
||||
version = "1.8.0"
|
||||
@@ -1258,6 +1301,18 @@ socks = [
|
||||
{ name = "pysocks" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rfc3339-validator"
|
||||
version = "0.1.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "six" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "13.9.4"
|
||||
@@ -1364,6 +1419,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.17.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
@@ -1391,6 +1455,26 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "srt"
|
||||
version = "3.5.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/66/b7/4a1bc231e0681ebf339337b0cd05b91dc6a0d701fa852bb812e244b7a030/srt-3.5.3.tar.gz", hash = "sha256:4884315043a4f0740fd1f878ed6caa376ac06d70e135f306a6dc44632eed0cc0", size = 28296, upload-time = "2023-03-28T02:35:44.007Z" }
|
||||
|
||||
[[package]]
|
||||
name = "subby"
|
||||
version = "0.3.23"
|
||||
source = { git = "https://github.com/vevv/subby.git?rev=5a925c367ffb3f5e53fd114ae222d3be1fdff35d#5a925c367ffb3f5e53fd114ae222d3be1fdff35d" }
|
||||
dependencies = [
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "click" },
|
||||
{ name = "langcodes" },
|
||||
{ name = "lxml" },
|
||||
{ name = "pymp4" },
|
||||
{ name = "srt" },
|
||||
{ name = "tinycss" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "subtitle-filter"
|
||||
version = "1.5.0"
|
||||
@@ -1400,6 +1484,12 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/40/c5d138e1f302b25240678943422a646feea52bab1f594c669c101c5e5070/subtitle_filter-1.5.0-py3-none-any.whl", hash = "sha256:6b506315be64870fba2e6894a70d76389407ce58c325fdf05129e0530f0a0f5b", size = 8346, upload-time = "2024-08-01T22:42:47.787Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinycss"
|
||||
version = "0.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/05/59/af583fff6236c7d2f94f8175c40ce501dcefb8d1b42e4bb7a2622dff689e/tinycss-0.4.tar.gz", hash = "sha256:12306fb50e5e9e7eaeef84b802ed877488ba80e35c672867f548c0924a76716e", size = 87759, upload-time = "2016-09-23T16:30:14.894Z" }
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.2.1"
|
||||
@@ -1449,14 +1539,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.4.20250611"
|
||||
version = "2.32.4.20250809"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/7f/73b3a04a53b0fd2a911d4ec517940ecd6600630b559e4505cc7b68beb5a0/types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826", size = 23118, upload-time = "2025-06-11T03:11:41.272Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/ea/0be9258c5a4fa1ba2300111aa5a0767ee6d18eb3fd20e91616c12082284d/types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072", size = 20643, upload-time = "2025-06-11T03:11:40.186Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1479,9 +1569,10 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "unshackle"
|
||||
version = "1.0.1"
|
||||
version = "2.0.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "aiohttp-swagger3" },
|
||||
{ name = "appdirs" },
|
||||
{ name = "brotli" },
|
||||
{ name = "chardet" },
|
||||
@@ -1503,6 +1594,7 @@ dependencies = [
|
||||
{ name = "pymp4" },
|
||||
{ name = "pymysql" },
|
||||
{ name = "pyplayready" },
|
||||
{ name = "pysubs2" },
|
||||
{ name = "pywidevine", extra = ["serve"] },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "requests", extra = ["socks"] },
|
||||
@@ -1510,6 +1602,7 @@ dependencies = [
|
||||
{ name = "rlaphoenix-m3u8" },
|
||||
{ name = "ruamel-yaml" },
|
||||
{ name = "sortedcontainers" },
|
||||
{ name = "subby" },
|
||||
{ name = "subtitle-filter" },
|
||||
{ name = "unidecode" },
|
||||
{ name = "urllib3" },
|
||||
@@ -1530,6 +1623,7 @@ dev = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "aiohttp-swagger3", specifier = ">=0.9.0,<1" },
|
||||
{ name = "appdirs", specifier = ">=1.4.4,<2" },
|
||||
{ name = "brotli", specifier = ">=1.1.0,<2" },
|
||||
{ name = "chardet", specifier = ">=5.2.0,<6" },
|
||||
@@ -1540,8 +1634,8 @@ requires-dist = [
|
||||
{ name = "curl-cffi", specifier = ">=0.7.0b4,<0.8" },
|
||||
{ name = "httpx", specifier = ">=0.28.1,<0.29" },
|
||||
{ name = "jsonpickle", specifier = ">=3.0.4,<4" },
|
||||
{ name = "langcodes", extras = ["data"], specifier = ">=3.4.0,<4" },
|
||||
{ name = "lxml", specifier = ">=5.2.1,<6" },
|
||||
{ name = "langcodes", specifier = ">=3.4.0,<4" },
|
||||
{ name = "lxml", specifier = ">=5.2.1,<7" },
|
||||
{ name = "pproxy", specifier = ">=2.7.9,<3" },
|
||||
{ name = "protobuf", specifier = ">=4.25.3,<5" },
|
||||
{ name = "pycaption", specifier = ">=2.2.6,<3" },
|
||||
@@ -1550,7 +1644,8 @@ requires-dist = [
|
||||
{ name = "pymediainfo", specifier = ">=6.1.0,<7" },
|
||||
{ name = "pymp4", specifier = ">=1.4.0,<2" },
|
||||
{ name = "pymysql", specifier = ">=1.1.0,<2" },
|
||||
{ name = "pyplayready", specifier = ">=0.6.0,<0.7" },
|
||||
{ name = "pyplayready", specifier = ">=0.6.3,<0.7" },
|
||||
{ name = "pysubs2", specifier = ">=1.7.0,<2" },
|
||||
{ name = "pywidevine", extras = ["serve"], specifier = ">=1.8.0,<2" },
|
||||
{ name = "pyyaml", specifier = ">=6.0.1,<7" },
|
||||
{ name = "requests", extras = ["socks"], specifier = ">=2.31.0,<3" },
|
||||
@@ -1558,6 +1653,7 @@ requires-dist = [
|
||||
{ name = "rlaphoenix-m3u8", specifier = ">=3.4.0,<4" },
|
||||
{ name = "ruamel-yaml", specifier = ">=0.18.6,<0.19" },
|
||||
{ name = "sortedcontainers", specifier = ">=2.4.0,<3" },
|
||||
{ name = "subby", git = "https://github.com/vevv/subby.git?rev=5a925c367ffb3f5e53fd114ae222d3be1fdff35d" },
|
||||
{ name = "subtitle-filter", specifier = ">=1.4.9,<2" },
|
||||
{ name = "unidecode", specifier = ">=1.3.8,<2" },
|
||||
{ name = "urllib3", specifier = ">=2.2.1,<3" },
|
||||
@@ -1587,16 +1683,17 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.31.2"
|
||||
version = "20.34.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "distlib" },
|
||||
{ name = "filelock" },
|
||||
{ name = "platformdirs" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/56/2c/444f465fb2c65f40c3a104fd0c495184c4f2336d65baf398e3c75d72ea94/virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af", size = 6076316, upload-time = "2025-05-08T17:58:23.811Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11", size = 6057982, upload-time = "2025-05-08T17:58:21.15Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user