mirror of
https://github.com/unshackle-dl/unshackle.git
synced 2025-10-23 15:11:08 +00:00
feat(update_checker): Enhance update checking logic and cache handling
This commit is contained in:
@@ -10,11 +10,22 @@ import requests
|
||||
|
||||
|
||||
class UpdateChecker:
|
||||
"""Check for available updates from the GitHub repository."""
|
||||
"""
|
||||
Check for available updates from the GitHub repository.
|
||||
|
||||
This class provides functionality to check for newer versions of the application
|
||||
by querying the GitHub releases API. It includes rate limiting, caching, and
|
||||
both synchronous and asynchronous interfaces.
|
||||
|
||||
Attributes:
|
||||
REPO_URL: GitHub API URL for latest release
|
||||
TIMEOUT: Request timeout in seconds
|
||||
DEFAULT_CHECK_INTERVAL: Default time between checks in seconds (24 hours)
|
||||
"""
|
||||
|
||||
REPO_URL = "https://api.github.com/repos/unshackle-dl/unshackle/releases/latest"
|
||||
TIMEOUT = 5
|
||||
DEFAULT_CHECK_INTERVAL = 24 * 60 * 60 # 24 hours in seconds
|
||||
DEFAULT_CHECK_INTERVAL = 24 * 60 * 60
|
||||
|
||||
@classmethod
|
||||
def _get_cache_file(cls) -> Path:
|
||||
@@ -23,6 +34,86 @@ class UpdateChecker:
|
||||
|
||||
return config.directories.cache / "update_check.json"
|
||||
|
||||
@classmethod
|
||||
def _load_cache_data(cls) -> dict:
|
||||
"""
|
||||
Load cache data from file.
|
||||
|
||||
Returns:
|
||||
Cache data dictionary or empty dict if loading fails
|
||||
"""
|
||||
cache_file = cls._get_cache_file()
|
||||
|
||||
if not cache_file.exists():
|
||||
return {}
|
||||
|
||||
try:
|
||||
with open(cache_file, "r") as f:
|
||||
return json.load(f)
|
||||
except (json.JSONDecodeError, OSError):
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def _parse_version(version_string: str) -> str:
|
||||
"""
|
||||
Parse and normalize version string by removing 'v' prefix.
|
||||
|
||||
Args:
|
||||
version_string: Raw version string from API
|
||||
|
||||
Returns:
|
||||
Cleaned version string
|
||||
"""
|
||||
return version_string.lstrip("v")
|
||||
|
||||
@staticmethod
|
||||
def _is_valid_version(version: str) -> bool:
|
||||
"""
|
||||
Validate version string format.
|
||||
|
||||
Args:
|
||||
version: Version string to validate
|
||||
|
||||
Returns:
|
||||
True if version string is valid semantic version, False otherwise
|
||||
"""
|
||||
if not version or not isinstance(version, str):
|
||||
return False
|
||||
|
||||
try:
|
||||
parts = version.split(".")
|
||||
if len(parts) < 2:
|
||||
return False
|
||||
|
||||
for part in parts:
|
||||
int(part)
|
||||
|
||||
return True
|
||||
except (ValueError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _fetch_latest_version(cls) -> Optional[str]:
|
||||
"""
|
||||
Fetch the latest version from GitHub API.
|
||||
|
||||
Returns:
|
||||
Latest version string if successful, None otherwise
|
||||
"""
|
||||
try:
|
||||
response = requests.get(cls.REPO_URL, timeout=cls.TIMEOUT)
|
||||
|
||||
if response.status_code != 200:
|
||||
return None
|
||||
|
||||
data = response.json()
|
||||
latest_version = cls._parse_version(data.get("tag_name", ""))
|
||||
|
||||
return latest_version if cls._is_valid_version(latest_version) else None
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _should_check_for_updates(cls, check_interval: int = DEFAULT_CHECK_INTERVAL) -> bool:
|
||||
"""
|
||||
@@ -34,45 +125,40 @@ class UpdateChecker:
|
||||
Returns:
|
||||
True if we should check for updates, False otherwise
|
||||
"""
|
||||
cache_file = cls._get_cache_file()
|
||||
cache_data = cls._load_cache_data()
|
||||
|
||||
if not cache_file.exists():
|
||||
if not cache_data:
|
||||
return True
|
||||
|
||||
try:
|
||||
with open(cache_file, "r") as f:
|
||||
cache_data = json.load(f)
|
||||
last_check = cache_data.get("last_check", 0)
|
||||
current_time = time.time()
|
||||
|
||||
last_check = cache_data.get("last_check", 0)
|
||||
current_time = time.time()
|
||||
|
||||
return (current_time - last_check) >= check_interval
|
||||
|
||||
except (json.JSONDecodeError, KeyError, OSError):
|
||||
# If cache is corrupted or unreadable, allow check
|
||||
return True
|
||||
return (current_time - last_check) >= check_interval
|
||||
|
||||
@classmethod
|
||||
def _update_cache(cls, latest_version: Optional[str] = None) -> None:
|
||||
def _update_cache(cls, latest_version: Optional[str] = None, current_version: Optional[str] = None) -> None:
|
||||
"""
|
||||
Update the cache file with the current timestamp and latest version.
|
||||
Update the cache file with the current timestamp and version info.
|
||||
|
||||
Args:
|
||||
latest_version: The latest version found, if any
|
||||
current_version: The current version being used
|
||||
"""
|
||||
cache_file = cls._get_cache_file()
|
||||
|
||||
try:
|
||||
# Ensure cache directory exists
|
||||
cache_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
cache_data = {"last_check": time.time(), "latest_version": latest_version}
|
||||
cache_data = {
|
||||
"last_check": time.time(),
|
||||
"latest_version": latest_version,
|
||||
"current_version": current_version,
|
||||
}
|
||||
|
||||
with open(cache_file, "w") as f:
|
||||
json.dump(cache_data, f)
|
||||
json.dump(cache_data, f, indent=2)
|
||||
|
||||
except (OSError, json.JSONEncodeError):
|
||||
# Silently fail if we can't write cache
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
@@ -87,6 +173,9 @@ class UpdateChecker:
|
||||
Returns:
|
||||
True if latest > current, False otherwise
|
||||
"""
|
||||
if not UpdateChecker._is_valid_version(current) or not UpdateChecker._is_valid_version(latest):
|
||||
return False
|
||||
|
||||
try:
|
||||
current_parts = [int(x) for x in current.split(".")]
|
||||
latest_parts = [int(x) for x in latest.split(".")]
|
||||
@@ -116,20 +205,14 @@ class UpdateChecker:
|
||||
Returns:
|
||||
The latest version string if an update is available, None otherwise
|
||||
"""
|
||||
if not cls._is_valid_version(current_version):
|
||||
return None
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
response = await loop.run_in_executor(None, lambda: requests.get(cls.REPO_URL, timeout=cls.TIMEOUT))
|
||||
latest_version = await loop.run_in_executor(None, cls._fetch_latest_version)
|
||||
|
||||
if response.status_code != 200:
|
||||
return None
|
||||
|
||||
data = response.json()
|
||||
latest_version = data.get("tag_name", "").lstrip("v")
|
||||
|
||||
if not latest_version:
|
||||
return None
|
||||
|
||||
if cls._compare_versions(current_version, latest_version):
|
||||
if latest_version and cls._compare_versions(current_version, latest_version):
|
||||
return latest_version
|
||||
|
||||
except Exception:
|
||||
@@ -137,6 +220,31 @@ class UpdateChecker:
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _get_cached_update_info(cls, current_version: str) -> Optional[str]:
|
||||
"""
|
||||
Check if there's a cached update available for the current version.
|
||||
|
||||
Args:
|
||||
current_version: The current version string
|
||||
|
||||
Returns:
|
||||
The latest version string if an update is available from cache, None otherwise
|
||||
"""
|
||||
cache_data = cls._load_cache_data()
|
||||
|
||||
if not cache_data:
|
||||
return None
|
||||
|
||||
cached_current = cache_data.get("current_version")
|
||||
cached_latest = cache_data.get("latest_version")
|
||||
|
||||
if cached_current == current_version and cached_latest:
|
||||
if cls._compare_versions(current_version, cached_latest):
|
||||
return cached_latest
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def check_for_updates_sync(cls, current_version: str, check_interval: Optional[int] = None) -> Optional[str]:
|
||||
"""
|
||||
@@ -149,40 +257,20 @@ class UpdateChecker:
|
||||
Returns:
|
||||
The latest version string if an update is available, None otherwise
|
||||
"""
|
||||
# Use config value if not specified
|
||||
if not cls._is_valid_version(current_version):
|
||||
return None
|
||||
|
||||
if check_interval is None:
|
||||
from unshackle.core.config import config
|
||||
|
||||
check_interval = config.update_check_interval * 60 * 60 # Convert hours to seconds
|
||||
check_interval = config.update_check_interval * 60 * 60
|
||||
|
||||
# Check if we should skip this check due to rate limiting
|
||||
if not cls._should_check_for_updates(check_interval):
|
||||
return None
|
||||
return cls._get_cached_update_info(current_version)
|
||||
|
||||
try:
|
||||
response = requests.get(cls.REPO_URL, timeout=cls.TIMEOUT)
|
||||
|
||||
if response.status_code != 200:
|
||||
# Update cache even on failure to prevent rapid retries
|
||||
cls._update_cache()
|
||||
return None
|
||||
|
||||
data = response.json()
|
||||
latest_version = data.get("tag_name", "").lstrip("v")
|
||||
|
||||
if not latest_version:
|
||||
cls._update_cache()
|
||||
return None
|
||||
|
||||
# Update cache with the latest version info
|
||||
cls._update_cache(latest_version)
|
||||
|
||||
if cls._compare_versions(current_version, latest_version):
|
||||
return latest_version
|
||||
|
||||
except Exception:
|
||||
# Update cache even on exception to prevent rapid retries
|
||||
cls._update_cache()
|
||||
pass
|
||||
latest_version = cls._fetch_latest_version()
|
||||
cls._update_cache(latest_version, current_version)
|
||||
if latest_version and cls._compare_versions(current_version, latest_version):
|
||||
return latest_version
|
||||
|
||||
return None
|
||||
|
||||
Reference in New Issue
Block a user