mirror of
https://github.com/Omni-guides/Jackify.git
synced 2026-01-17 19:47:00 +01:00
Sync from development - prepare for v0.2.0
This commit is contained in:
@@ -29,9 +29,10 @@ class AutomatedPrefixService:
|
||||
and direct Proton wrapper integration.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, system_info=None):
|
||||
self.scripts_dir = Path.home() / "Jackify/scripts"
|
||||
self.scripts_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.system_info = system_info
|
||||
# Use shared timing for consistency across services
|
||||
|
||||
def _get_progress_timestamp(self):
|
||||
@@ -546,13 +547,15 @@ exit"""
|
||||
def restart_steam(self) -> bool:
|
||||
"""
|
||||
Restart Steam using the robust service method.
|
||||
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
from .steam_restart_service import robust_steam_restart
|
||||
return robust_steam_restart(progress_callback=None, timeout=60)
|
||||
# Use system_info if available (backward compatibility)
|
||||
system_info = getattr(self, 'system_info', None)
|
||||
return robust_steam_restart(progress_callback=None, timeout=60, system_info=system_info)
|
||||
except Exception as e:
|
||||
logger.error(f"Error restarting Steam: {e}")
|
||||
return False
|
||||
@@ -929,22 +932,35 @@ echo Prefix creation complete.
|
||||
# Get or create CompatToolMapping
|
||||
if 'CompatToolMapping' not in config_data['Software']['Valve']['Steam']:
|
||||
config_data['Software']['Valve']['Steam']['CompatToolMapping'] = {}
|
||||
|
||||
# Set the Proton version for this AppID
|
||||
config_data['Software']['Valve']['Steam']['CompatToolMapping'][str(appid)] = proton_version
|
||||
|
||||
# Set the Proton version for this AppID using Steam's expected format
|
||||
# Steam requires a dict with 'name', 'config', and 'priority' keys
|
||||
config_data['Software']['Valve']['Steam']['CompatToolMapping'][str(appid)] = {
|
||||
'name': proton_version,
|
||||
'config': '',
|
||||
'priority': '250'
|
||||
}
|
||||
|
||||
# Write back to file (text format)
|
||||
with open(config_path, 'w') as f:
|
||||
vdf.dump(config_data, f)
|
||||
|
||||
|
||||
# Ensure file is fully written to disk before Steam restart
|
||||
import os
|
||||
os.fsync(f.fileno()) if hasattr(f, 'fileno') else None
|
||||
|
||||
logger.info(f"Set Proton version {proton_version} for AppID {appid}")
|
||||
debug_print(f"[DEBUG] Set Proton version {proton_version} for AppID {appid} in config.vdf")
|
||||
|
||||
|
||||
# Small delay to ensure filesystem write completes
|
||||
import time
|
||||
time.sleep(0.5)
|
||||
|
||||
# Verify it was set correctly
|
||||
with open(config_path, 'r') as f:
|
||||
verify_data = vdf.load(f)
|
||||
actual_value = verify_data.get('Software', {}).get('Valve', {}).get('Steam', {}).get('CompatToolMapping', {}).get(str(appid))
|
||||
debug_print(f"[DEBUG] Verification: AppID {appid} -> {actual_value}")
|
||||
compat_mapping = verify_data.get('Software', {}).get('Valve', {}).get('Steam', {}).get('CompatToolMapping', {}).get(str(appid))
|
||||
debug_print(f"[DEBUG] Verification: AppID {appid} -> {compat_mapping}")
|
||||
|
||||
return True
|
||||
|
||||
@@ -1045,7 +1061,18 @@ echo Prefix creation complete.
|
||||
env = os.environ.copy()
|
||||
env['STEAM_COMPAT_DATA_PATH'] = str(prefix_path)
|
||||
env['STEAM_COMPAT_APP_ID'] = str(positive_appid) # Use positive AppID for environment
|
||||
env['STEAM_COMPAT_CLIENT_INSTALL_PATH'] = str(Path.home() / ".local/share/Steam")
|
||||
|
||||
# Determine correct Steam root based on installation type
|
||||
from ..handlers.path_handler import PathHandler
|
||||
path_handler = PathHandler()
|
||||
steam_library = path_handler.find_steam_library()
|
||||
if steam_library and steam_library.name == "common":
|
||||
# Extract Steam root from library path: .../Steam/steamapps/common -> .../Steam
|
||||
steam_root = steam_library.parent.parent
|
||||
env['STEAM_COMPAT_CLIENT_INSTALL_PATH'] = str(steam_root)
|
||||
else:
|
||||
# Fallback to legacy path if detection fails
|
||||
env['STEAM_COMPAT_CLIENT_INSTALL_PATH'] = str(Path.home() / ".local/share/Steam")
|
||||
|
||||
# Build the command
|
||||
cmd = [
|
||||
@@ -1109,7 +1136,10 @@ echo Prefix creation complete.
|
||||
|
||||
def _get_compatdata_path_for_appid(self, appid: int) -> Optional[Path]:
|
||||
"""
|
||||
Get the compatdata path for a given AppID using existing Jackify functions.
|
||||
Get the compatdata path for a given AppID.
|
||||
|
||||
First tries to find existing compatdata, then constructs path from libraryfolders.vdf
|
||||
for creating new prefixes.
|
||||
|
||||
Args:
|
||||
appid: The AppID to get the path for
|
||||
@@ -1117,22 +1147,32 @@ echo Prefix creation complete.
|
||||
Returns:
|
||||
Path to the compatdata directory, or None if not found
|
||||
"""
|
||||
# Use existing Jackify path detection
|
||||
from ..handlers.path_handler import PathHandler
|
||||
|
||||
# First, try to find existing compatdata
|
||||
compatdata_path = PathHandler.find_compat_data(str(appid))
|
||||
if compatdata_path:
|
||||
return compatdata_path
|
||||
|
||||
# Fallback: construct the path manually
|
||||
possible_bases = [
|
||||
# Prefix doesn't exist yet - determine where to create it from libraryfolders.vdf
|
||||
library_paths = PathHandler.get_all_steam_library_paths()
|
||||
if library_paths:
|
||||
# Use the first library (typically the default library)
|
||||
# Construct compatdata path: library_path/steamapps/compatdata/appid
|
||||
first_library = library_paths[0]
|
||||
compatdata_base = first_library / "steamapps" / "compatdata"
|
||||
return compatdata_base / str(appid)
|
||||
|
||||
# Only fallback if VDF parsing completely fails
|
||||
logger.warning("Could not get library paths from libraryfolders.vdf, using fallback locations")
|
||||
fallback_bases = [
|
||||
Path.home() / ".var/app/com.valvesoftware.Steam/data/Steam/steamapps/compatdata",
|
||||
Path.home() / ".var/app/com.valvesoftware.Steam/.local/share/Steam/steamapps/compatdata",
|
||||
Path.home() / ".steam/steam/steamapps/compatdata",
|
||||
Path.home() / ".local/share/Steam/steamapps/compatdata",
|
||||
Path.home() / ".var/app/com.valvesoftware.Steam/home/.steam/steam/steamapps/compatdata",
|
||||
Path.home() / ".var/app/com.valvesoftware.Steam/home/.local/share/Steam/steamapps/compatdata",
|
||||
]
|
||||
|
||||
for base_path in possible_bases:
|
||||
for base_path in fallback_bases:
|
||||
if base_path.is_dir():
|
||||
return base_path / str(appid)
|
||||
|
||||
@@ -2666,9 +2706,40 @@ echo Prefix creation complete.
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
steam_root = Path.home() / ".steam/steam"
|
||||
compatdata_dir = steam_root / "steamapps/compatdata"
|
||||
proton_common_dir = steam_root / "steamapps/common"
|
||||
# Determine Steam locations based on installation type
|
||||
from ..handlers.path_handler import PathHandler
|
||||
path_handler = PathHandler()
|
||||
all_libraries = path_handler.get_all_steam_library_paths()
|
||||
|
||||
# Check if we have Flatpak Steam by looking for .var/app/com.valvesoftware.Steam in library paths
|
||||
is_flatpak_steam = any('.var/app/com.valvesoftware.Steam' in str(lib) for lib in all_libraries)
|
||||
|
||||
if is_flatpak_steam and all_libraries:
|
||||
# Flatpak Steam: Use the actual library root from libraryfolders.vdf
|
||||
# Compatdata should be in the library root, not the client root
|
||||
flatpak_library_root = all_libraries[0] # Use first library (typically the default)
|
||||
flatpak_client_root = flatpak_library_root.parent.parent / ".steam/steam"
|
||||
|
||||
if not flatpak_library_root.is_dir():
|
||||
logger.error(
|
||||
f"Flatpak Steam library root does not exist: {flatpak_library_root}"
|
||||
)
|
||||
return False
|
||||
|
||||
steam_root = flatpak_client_root if flatpak_client_root.is_dir() else flatpak_library_root
|
||||
# CRITICAL: compatdata must be in the library root, not client root
|
||||
compatdata_dir = flatpak_library_root / "steamapps/compatdata"
|
||||
proton_common_dir = flatpak_library_root / "steamapps/common"
|
||||
else:
|
||||
# Native Steam (or unknown): fall back to legacy ~/.steam/steam layout
|
||||
steam_root = Path.home() / ".steam/steam"
|
||||
compatdata_dir = steam_root / "steamapps/compatdata"
|
||||
proton_common_dir = steam_root / "steamapps/common"
|
||||
|
||||
# Ensure compatdata root exists and is a directory we actually want to use
|
||||
if not compatdata_dir.is_dir():
|
||||
logger.error(f"Compatdata root does not exist: {compatdata_dir}. Aborting prefix creation.")
|
||||
return False
|
||||
|
||||
# Find a Proton wrapper to use
|
||||
proton_path = self._find_proton_binary(proton_common_dir)
|
||||
@@ -2686,9 +2757,9 @@ echo Prefix creation complete.
|
||||
env['WINEDEBUG'] = '-all'
|
||||
env['WINEDLLOVERRIDES'] = 'msdia80.dll=n;conhost.exe=d;cmd.exe=d'
|
||||
|
||||
# Create the compatdata directory
|
||||
# Create the compatdata directory for this AppID (but never the whole tree)
|
||||
compat_dir = compatdata_dir / str(abs(appid))
|
||||
compat_dir.mkdir(parents=True, exist_ok=True)
|
||||
compat_dir.mkdir(exist_ok=True)
|
||||
|
||||
logger.info(f"Creating Proton prefix for AppID {appid}")
|
||||
logger.info(f"STEAM_COMPAT_CLIENT_INSTALL_PATH={env['STEAM_COMPAT_CLIENT_INSTALL_PATH']}")
|
||||
|
||||
474
jackify/backend/services/modlist_gallery_service.py
Normal file
474
jackify/backend/services/modlist_gallery_service.py
Normal file
@@ -0,0 +1,474 @@
|
||||
"""
|
||||
Service for fetching and managing modlist metadata for the gallery view.
|
||||
|
||||
Handles jackify-engine integration, caching, and image management.
|
||||
"""
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict
|
||||
from datetime import datetime, timedelta
|
||||
import urllib.request
|
||||
|
||||
from jackify.backend.models.modlist_metadata import (
|
||||
ModlistMetadataResponse,
|
||||
ModlistMetadata,
|
||||
parse_modlist_metadata_response
|
||||
)
|
||||
from jackify.backend.core.modlist_operations import get_jackify_engine_path
|
||||
from jackify.backend.handlers.config_handler import ConfigHandler
|
||||
from jackify.shared.paths import get_jackify_data_dir
|
||||
|
||||
|
||||
class ModlistGalleryService:
|
||||
"""Service for fetching and caching modlist metadata from jackify-engine"""
|
||||
|
||||
CACHE_VALIDITY_DAYS = 7 # Refresh cache after 7 days
|
||||
# CRITICAL: Thread lock to prevent concurrent engine calls that could cause recursive spawning
|
||||
_engine_call_lock = threading.Lock()
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the gallery service"""
|
||||
self.config_handler = ConfigHandler()
|
||||
# Cache directories in Jackify Data Directory
|
||||
jackify_data_dir = get_jackify_data_dir()
|
||||
self.CACHE_DIR = jackify_data_dir / "modlist-cache" / "metadata"
|
||||
self.IMAGE_CACHE_DIR = jackify_data_dir / "modlist-cache" / "images"
|
||||
self.METADATA_CACHE_FILE = self.CACHE_DIR / "modlist_metadata.json"
|
||||
self._ensure_cache_dirs()
|
||||
# Tag metadata caches (avoid refetching per render)
|
||||
self._tag_mappings_cache: Optional[Dict[str, str]] = None
|
||||
self._tag_mapping_lookup: Optional[Dict[str, str]] = None
|
||||
self._allowed_tags_cache: Optional[set] = None
|
||||
self._allowed_tags_lookup: Optional[Dict[str, str]] = None
|
||||
|
||||
def _ensure_cache_dirs(self):
|
||||
"""Create cache directories if they don't exist"""
|
||||
self.CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
self.IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def fetch_modlist_metadata(
|
||||
self,
|
||||
include_validation: bool = True,
|
||||
include_search_index: bool = False,
|
||||
sort_by: str = "title",
|
||||
force_refresh: bool = False
|
||||
) -> Optional[ModlistMetadataResponse]:
|
||||
"""
|
||||
Fetch modlist metadata from jackify-engine.
|
||||
|
||||
Args:
|
||||
include_validation: Include validation status (slower)
|
||||
include_search_index: Include mod search index (slower)
|
||||
sort_by: Sort order (title, size, date)
|
||||
force_refresh: Force refresh even if cache is valid
|
||||
|
||||
Returns:
|
||||
ModlistMetadataResponse or None if fetch fails
|
||||
"""
|
||||
# Check cache first unless force refresh
|
||||
# If include_search_index is True, check if cache has mods before using it
|
||||
if not force_refresh:
|
||||
cached = self._load_from_cache()
|
||||
if cached and self._is_cache_valid():
|
||||
# If we need search index, check if cached data has mods
|
||||
if include_search_index:
|
||||
# Check if at least one modlist has mods (indicates cache was built with search index)
|
||||
has_mods = any(hasattr(m, 'mods') and m.mods for m in cached.modlists)
|
||||
if has_mods:
|
||||
return cached # Cache has mods, use it
|
||||
# Cache doesn't have mods, need to fetch fresh
|
||||
else:
|
||||
return cached # Don't need search index, use cache
|
||||
|
||||
# Fetch fresh data from jackify-engine
|
||||
try:
|
||||
metadata = self._fetch_from_engine(
|
||||
include_validation=include_validation,
|
||||
include_search_index=include_search_index,
|
||||
sort_by=sort_by
|
||||
)
|
||||
|
||||
if metadata:
|
||||
self._save_to_cache(metadata)
|
||||
|
||||
return metadata
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error fetching modlist metadata: {e}")
|
||||
# Fall back to cache if available
|
||||
return self._load_from_cache()
|
||||
|
||||
def _fetch_from_engine(
|
||||
self,
|
||||
include_validation: bool,
|
||||
include_search_index: bool,
|
||||
sort_by: str
|
||||
) -> Optional[ModlistMetadataResponse]:
|
||||
"""Call jackify-engine to fetch modlist metadata"""
|
||||
# CRITICAL: Use thread lock to prevent concurrent engine calls
|
||||
# Multiple simultaneous calls could cause recursive spawning issues
|
||||
with self._engine_call_lock:
|
||||
# CRITICAL: Get engine path BEFORE cleaning environment
|
||||
# get_jackify_engine_path() may need APPDIR to locate the engine
|
||||
engine_path = get_jackify_engine_path()
|
||||
if not engine_path:
|
||||
raise FileNotFoundError("jackify-engine not found")
|
||||
|
||||
# Build command
|
||||
cmd = [str(engine_path), "list-modlists", "--json", "--sort-by", sort_by]
|
||||
|
||||
if include_validation:
|
||||
cmd.append("--include-validation-status")
|
||||
|
||||
if include_search_index:
|
||||
cmd.append("--include-search-index")
|
||||
|
||||
# Execute command
|
||||
# CRITICAL: Use centralized clean environment to prevent AppImage recursive spawning
|
||||
# This must happen AFTER engine path resolution
|
||||
from jackify.backend.handlers.subprocess_utils import get_clean_subprocess_env
|
||||
clean_env = get_clean_subprocess_env()
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=300, # 5 minute timeout for large data
|
||||
env=clean_env
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(f"jackify-engine failed: {result.stderr}")
|
||||
|
||||
# Parse JSON response - skip progress messages and extract JSON
|
||||
# jackify-engine prints progress to stdout before the JSON
|
||||
stdout = result.stdout.strip()
|
||||
|
||||
# Find the start of JSON (first '{' on its own line)
|
||||
lines = stdout.split('\n')
|
||||
json_start = 0
|
||||
for i, line in enumerate(lines):
|
||||
if line.strip().startswith('{'):
|
||||
json_start = i
|
||||
break
|
||||
|
||||
json_text = '\n'.join(lines[json_start:])
|
||||
data = json.loads(json_text)
|
||||
return parse_modlist_metadata_response(data)
|
||||
|
||||
def _load_from_cache(self) -> Optional[ModlistMetadataResponse]:
|
||||
"""Load metadata from cache file"""
|
||||
if not self.METADATA_CACHE_FILE.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(self.METADATA_CACHE_FILE, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
return parse_modlist_metadata_response(data)
|
||||
except Exception as e:
|
||||
print(f"Error loading cache: {e}")
|
||||
return None
|
||||
|
||||
def _save_to_cache(self, metadata: ModlistMetadataResponse):
|
||||
"""Save metadata to cache file"""
|
||||
try:
|
||||
# Convert to dict for JSON serialization
|
||||
data = {
|
||||
'metadataVersion': metadata.metadataVersion,
|
||||
'timestamp': metadata.timestamp,
|
||||
'count': metadata.count,
|
||||
'modlists': [self._metadata_to_dict(m) for m in metadata.modlists]
|
||||
}
|
||||
|
||||
with open(self.METADATA_CACHE_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error saving cache: {e}")
|
||||
|
||||
def _metadata_to_dict(self, metadata: ModlistMetadata) -> dict:
|
||||
"""Convert ModlistMetadata to dict for JSON serialization"""
|
||||
result = {
|
||||
'title': metadata.title,
|
||||
'description': metadata.description,
|
||||
'author': metadata.author,
|
||||
'maintainers': metadata.maintainers,
|
||||
'namespacedName': metadata.namespacedName,
|
||||
'repositoryName': metadata.repositoryName,
|
||||
'machineURL': metadata.machineURL,
|
||||
'game': metadata.game,
|
||||
'gameHumanFriendly': metadata.gameHumanFriendly,
|
||||
'official': metadata.official,
|
||||
'nsfw': metadata.nsfw,
|
||||
'utilityList': metadata.utilityList,
|
||||
'forceDown': metadata.forceDown,
|
||||
'imageContainsTitle': metadata.imageContainsTitle,
|
||||
'version': metadata.version,
|
||||
'displayVersionOnlyInInstallerView': metadata.displayVersionOnlyInInstallerView,
|
||||
'dateCreated': metadata.dateCreated,
|
||||
'dateUpdated': metadata.dateUpdated,
|
||||
'tags': metadata.tags,
|
||||
'mods': metadata.mods
|
||||
}
|
||||
|
||||
if metadata.images:
|
||||
result['images'] = {
|
||||
'small': metadata.images.small,
|
||||
'large': metadata.images.large
|
||||
}
|
||||
|
||||
if metadata.links:
|
||||
result['links'] = {
|
||||
'image': metadata.links.image,
|
||||
'readme': metadata.links.readme,
|
||||
'download': metadata.links.download,
|
||||
'discordURL': metadata.links.discordURL,
|
||||
'websiteURL': metadata.links.websiteURL
|
||||
}
|
||||
|
||||
if metadata.sizes:
|
||||
result['sizes'] = {
|
||||
'downloadSize': metadata.sizes.downloadSize,
|
||||
'downloadSizeFormatted': metadata.sizes.downloadSizeFormatted,
|
||||
'installSize': metadata.sizes.installSize,
|
||||
'installSizeFormatted': metadata.sizes.installSizeFormatted,
|
||||
'totalSize': metadata.sizes.totalSize,
|
||||
'totalSizeFormatted': metadata.sizes.totalSizeFormatted,
|
||||
'numberOfArchives': metadata.sizes.numberOfArchives,
|
||||
'numberOfInstalledFiles': metadata.sizes.numberOfInstalledFiles
|
||||
}
|
||||
|
||||
if metadata.validation:
|
||||
result['validation'] = {
|
||||
'failed': metadata.validation.failed,
|
||||
'passed': metadata.validation.passed,
|
||||
'updating': metadata.validation.updating,
|
||||
'mirrored': metadata.validation.mirrored,
|
||||
'modListIsMissing': metadata.validation.modListIsMissing,
|
||||
'hasFailures': metadata.validation.hasFailures
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
def _is_cache_valid(self) -> bool:
|
||||
"""Check if cache is still valid based on age"""
|
||||
if not self.METADATA_CACHE_FILE.exists():
|
||||
return False
|
||||
|
||||
# Check file modification time
|
||||
mtime = datetime.fromtimestamp(self.METADATA_CACHE_FILE.stat().st_mtime)
|
||||
age = datetime.now() - mtime
|
||||
|
||||
return age < timedelta(days=self.CACHE_VALIDITY_DAYS)
|
||||
|
||||
def download_images(
|
||||
self,
|
||||
game_filter: Optional[str] = None,
|
||||
size: str = "both",
|
||||
overwrite: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
Download modlist images to cache using jackify-engine.
|
||||
|
||||
Args:
|
||||
game_filter: Filter by game name (None = all games)
|
||||
size: Image size to download (small, large, both)
|
||||
overwrite: Overwrite existing images
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
# Build command (engine path will be resolved inside lock)
|
||||
cmd = [
|
||||
"placeholder", # Will be replaced with actual engine path
|
||||
"download-modlist-images",
|
||||
"--output", str(self.IMAGE_CACHE_DIR),
|
||||
"--size", size
|
||||
]
|
||||
|
||||
if game_filter:
|
||||
cmd.extend(["--game", game_filter])
|
||||
|
||||
if overwrite:
|
||||
cmd.append("--overwrite")
|
||||
|
||||
# Execute command
|
||||
try:
|
||||
# CRITICAL: Use thread lock to prevent concurrent engine calls
|
||||
with self._engine_call_lock:
|
||||
# CRITICAL: Get engine path BEFORE cleaning environment
|
||||
# get_jackify_engine_path() may need APPDIR to locate the engine
|
||||
engine_path = get_jackify_engine_path()
|
||||
if not engine_path:
|
||||
return False
|
||||
|
||||
# Update cmd with resolved engine path
|
||||
cmd[0] = str(engine_path)
|
||||
|
||||
# CRITICAL: Use centralized clean environment to prevent AppImage recursive spawning
|
||||
# This must happen AFTER engine path resolution
|
||||
from jackify.backend.handlers.subprocess_utils import get_clean_subprocess_env
|
||||
clean_env = get_clean_subprocess_env()
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=3600, # 1 hour timeout for downloads
|
||||
env=clean_env
|
||||
)
|
||||
return result.returncode == 0
|
||||
except Exception as e:
|
||||
print(f"Error downloading images: {e}")
|
||||
return False
|
||||
|
||||
def get_cached_image_path(self, metadata: ModlistMetadata, size: str = "large") -> Optional[Path]:
|
||||
"""
|
||||
Get path to cached image for a modlist (only if it exists).
|
||||
|
||||
Args:
|
||||
metadata: Modlist metadata
|
||||
size: Image size (small or large)
|
||||
|
||||
Returns:
|
||||
Path to cached image or None if not cached
|
||||
"""
|
||||
filename = f"{metadata.machineURL}_{size}.webp"
|
||||
image_path = self.IMAGE_CACHE_DIR / metadata.repositoryName / filename
|
||||
|
||||
if image_path.exists():
|
||||
return image_path
|
||||
return None
|
||||
|
||||
def get_image_cache_path(self, metadata: ModlistMetadata, size: str = "large") -> Path:
|
||||
"""
|
||||
Get path where image should be cached (always returns path, even if file doesn't exist).
|
||||
|
||||
Args:
|
||||
metadata: Modlist metadata
|
||||
size: Image size (small or large)
|
||||
|
||||
Returns:
|
||||
Path where image should be cached
|
||||
"""
|
||||
filename = f"{metadata.machineURL}_{size}.webp"
|
||||
return self.IMAGE_CACHE_DIR / metadata.repositoryName / filename
|
||||
|
||||
def get_image_url(self, metadata: ModlistMetadata, size: str = "large") -> Optional[str]:
|
||||
"""
|
||||
Get image URL for a modlist.
|
||||
|
||||
Args:
|
||||
metadata: Modlist metadata
|
||||
size: Image size (small or large)
|
||||
|
||||
Returns:
|
||||
Image URL or None if images not available
|
||||
"""
|
||||
if not metadata.images:
|
||||
return None
|
||||
|
||||
return metadata.images.large if size == "large" else metadata.images.small
|
||||
|
||||
def clear_cache(self):
|
||||
"""Clear all cached metadata and images"""
|
||||
if self.METADATA_CACHE_FILE.exists():
|
||||
self.METADATA_CACHE_FILE.unlink()
|
||||
|
||||
# Clear image cache
|
||||
if self.IMAGE_CACHE_DIR.exists():
|
||||
import shutil
|
||||
shutil.rmtree(self.IMAGE_CACHE_DIR)
|
||||
self.IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def get_installed_modlists(self) -> List[str]:
|
||||
"""
|
||||
Get list of installed modlist machine URLs.
|
||||
|
||||
Returns:
|
||||
List of machine URLs for installed modlists
|
||||
"""
|
||||
# TODO: Integrate with existing modlist database/config
|
||||
# For now, return empty list - will be implemented when integrated with existing modlist tracking
|
||||
return []
|
||||
|
||||
def is_modlist_installed(self, machine_url: str) -> bool:
|
||||
"""Check if a modlist is installed"""
|
||||
return machine_url in self.get_installed_modlists()
|
||||
|
||||
def load_tag_mappings(self) -> Dict[str, str]:
|
||||
"""
|
||||
Load tag mappings from Wabbajack GitHub repository.
|
||||
Maps variant tag names to canonical tag names.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping variant tags to canonical tags
|
||||
"""
|
||||
url = "https://raw.githubusercontent.com/wabbajack-tools/mod-lists/master/tag_mappings.json"
|
||||
try:
|
||||
with urllib.request.urlopen(url, timeout=10) as response:
|
||||
data = json.loads(response.read().decode('utf-8'))
|
||||
return data
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not load tag mappings: {e}")
|
||||
return {}
|
||||
|
||||
def load_allowed_tags(self) -> set:
|
||||
"""
|
||||
Load allowed tags from Wabbajack GitHub repository.
|
||||
|
||||
Returns:
|
||||
Set of allowed tag names (preserving original case)
|
||||
"""
|
||||
url = "https://raw.githubusercontent.com/wabbajack-tools/mod-lists/master/allowed_tags.json"
|
||||
try:
|
||||
with urllib.request.urlopen(url, timeout=10) as response:
|
||||
data = json.loads(response.read().decode('utf-8'))
|
||||
return set(data) # Return as set preserving original case
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not load allowed tags: {e}")
|
||||
return set()
|
||||
|
||||
def _ensure_tag_metadata(self):
|
||||
"""Ensure tag mappings/allowed tags (and lookups) are cached."""
|
||||
if self._tag_mappings_cache is None:
|
||||
self._tag_mappings_cache = self.load_tag_mappings()
|
||||
if self._tag_mapping_lookup is None:
|
||||
self._tag_mapping_lookup = {k.lower(): v for k, v in self._tag_mappings_cache.items()}
|
||||
if self._allowed_tags_cache is None:
|
||||
self._allowed_tags_cache = self.load_allowed_tags()
|
||||
if self._allowed_tags_lookup is None:
|
||||
self._allowed_tags_lookup = {tag.lower(): tag for tag in self._allowed_tags_cache}
|
||||
|
||||
def normalize_tag_value(self, tag: str) -> str:
|
||||
"""
|
||||
Normalize a tag to its canonical display form using Wabbajack mappings.
|
||||
Returns the normalized tag (original casing preserved when possible).
|
||||
"""
|
||||
if not tag:
|
||||
return ""
|
||||
self._ensure_tag_metadata()
|
||||
tag_key = tag.strip().lower()
|
||||
if not tag_key:
|
||||
return ""
|
||||
canonical = self._tag_mapping_lookup.get(tag_key, tag.strip())
|
||||
# Prefer allowed tag casing if available
|
||||
return self._allowed_tags_lookup.get(canonical.lower(), canonical)
|
||||
|
||||
def normalize_tags_for_display(self, tags: Optional[List[str]]) -> List[str]:
|
||||
"""Normalize a list of tags for UI display (deduped, canonical casing)."""
|
||||
if not tags:
|
||||
return []
|
||||
self._ensure_tag_metadata()
|
||||
normalized = []
|
||||
seen = set()
|
||||
for tag in tags:
|
||||
normalized_tag = self.normalize_tag_value(tag)
|
||||
key = normalized_tag.lower()
|
||||
if key and key not in seen:
|
||||
normalized.append(normalized_tag)
|
||||
seen.add(key)
|
||||
return normalized
|
||||
@@ -285,8 +285,18 @@ class ModlistService:
|
||||
output_callback(f"Jackify Install Engine not found or not executable at: {engine_path}")
|
||||
return False
|
||||
|
||||
# Build command (copied from working code)
|
||||
cmd = [engine_path, 'install']
|
||||
# Build command (copied from working code)
|
||||
cmd = [engine_path, 'install', '--show-file-progress']
|
||||
|
||||
# Check GPU setting
|
||||
from jackify.backend.handlers.config_handler import ConfigHandler
|
||||
config_handler = ConfigHandler()
|
||||
gpu_enabled = config_handler.get('enable_gpu_texture_conversion', True)
|
||||
logger.info(f"GPU texture conversion setting: {gpu_enabled}")
|
||||
if not gpu_enabled:
|
||||
cmd.append('--no-gpu')
|
||||
logger.info("Added --no-gpu flag to jackify-engine command")
|
||||
|
||||
modlist_value = context.get('modlist_value')
|
||||
if modlist_value and modlist_value.endswith('.wabbajack') and os.path.isfile(modlist_value):
|
||||
cmd += ['-w', modlist_value]
|
||||
@@ -326,8 +336,10 @@ class ModlistService:
|
||||
else:
|
||||
output_callback(f"File descriptor limit warning: {message}")
|
||||
|
||||
# Subprocess call (copied from working code)
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=False, env=None, cwd=engine_dir)
|
||||
# Subprocess call with cleaned environment to prevent AppImage variable inheritance
|
||||
from jackify.backend.handlers.subprocess_utils import get_clean_subprocess_env
|
||||
clean_env = get_clean_subprocess_env()
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=False, env=clean_env, cwd=engine_dir)
|
||||
|
||||
# Output processing (copied from working code)
|
||||
buffer = b''
|
||||
|
||||
@@ -481,14 +481,34 @@ class NativeSteamService:
|
||||
Returns:
|
||||
(success, app_id) - Success status and the AppID
|
||||
"""
|
||||
# Auto-detect best Proton version if none provided
|
||||
# Use Game Proton from settings for shortcut creation (not Install Proton)
|
||||
if proton_version is None:
|
||||
try:
|
||||
from jackify.backend.core.modlist_operations import _get_user_proton_version
|
||||
proton_version = _get_user_proton_version()
|
||||
logger.info(f"Auto-detected Proton version: {proton_version}")
|
||||
from jackify.backend.handlers.config_handler import ConfigHandler
|
||||
config_handler = ConfigHandler()
|
||||
game_proton_path = config_handler.get_game_proton_path()
|
||||
|
||||
if game_proton_path and game_proton_path != 'auto':
|
||||
# User has selected Game Proton - use it
|
||||
proton_version = os.path.basename(game_proton_path)
|
||||
# Convert to Steam format
|
||||
if not proton_version.startswith('GE-Proton'):
|
||||
proton_version = proton_version.lower().replace(' - ', '_').replace(' ', '_').replace('-', '_')
|
||||
if not proton_version.startswith('proton'):
|
||||
proton_version = f"proton_{proton_version}"
|
||||
logger.info(f"Using Game Proton from settings: {proton_version}")
|
||||
else:
|
||||
# Fallback to auto-detect if Game Proton not set
|
||||
from jackify.backend.handlers.wine_utils import WineUtils
|
||||
best_proton = WineUtils.select_best_proton()
|
||||
if best_proton:
|
||||
proton_version = best_proton['name']
|
||||
logger.info(f"Auto-detected Game Proton: {proton_version}")
|
||||
else:
|
||||
proton_version = "proton_experimental"
|
||||
logger.warning("Failed to auto-detect Game Proton, falling back to experimental")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to auto-detect Proton, falling back to experimental: {e}")
|
||||
logger.warning(f"Failed to get Game Proton, falling back to experimental: {e}")
|
||||
proton_version = "proton_experimental"
|
||||
|
||||
logger.info(f"Creating shortcut with Proton: '{app_name}' -> '{proton_version}'")
|
||||
|
||||
258
jackify/backend/services/nexus_auth_service.py
Normal file
258
jackify/backend/services/nexus_auth_service.py
Normal file
@@ -0,0 +1,258 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Nexus Authentication Service
|
||||
Unified service for Nexus authentication using OAuth or API key fallback
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional, Tuple
|
||||
from .nexus_oauth_service import NexusOAuthService
|
||||
from ..handlers.oauth_token_handler import OAuthTokenHandler
|
||||
from .api_key_service import APIKeyService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NexusAuthService:
|
||||
"""
|
||||
Unified authentication service for Nexus Mods
|
||||
Handles OAuth 2.0 (preferred) with API key fallback (legacy)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize authentication service"""
|
||||
self.oauth_service = NexusOAuthService()
|
||||
self.token_handler = OAuthTokenHandler()
|
||||
self.api_key_service = APIKeyService()
|
||||
logger.debug("NexusAuthService initialized")
|
||||
|
||||
def get_auth_token(self) -> Optional[str]:
|
||||
"""
|
||||
Get authentication token, preferring OAuth over API key
|
||||
|
||||
Returns:
|
||||
Access token or API key, or None if no authentication available
|
||||
"""
|
||||
# Try OAuth first
|
||||
oauth_token = self._get_oauth_token()
|
||||
if oauth_token:
|
||||
logger.debug("Using OAuth token for authentication")
|
||||
return oauth_token
|
||||
|
||||
# Fall back to API key
|
||||
api_key = self.api_key_service.get_saved_api_key()
|
||||
if api_key:
|
||||
logger.debug("Using API key for authentication (OAuth not available)")
|
||||
return api_key
|
||||
|
||||
logger.warning("No authentication available (neither OAuth nor API key)")
|
||||
return None
|
||||
|
||||
def _get_oauth_token(self) -> Optional[str]:
|
||||
"""
|
||||
Get OAuth access token, refreshing if needed
|
||||
|
||||
Returns:
|
||||
Valid access token or None
|
||||
"""
|
||||
# Check if we have a stored token
|
||||
if not self.token_handler.has_token():
|
||||
logger.debug("No OAuth token stored")
|
||||
return None
|
||||
|
||||
# Check if token is expired (15 minute buffer for long installs)
|
||||
if self.token_handler.is_token_expired(buffer_minutes=15):
|
||||
logger.info("OAuth token expiring soon, attempting refresh")
|
||||
|
||||
# Try to refresh
|
||||
refresh_token = self.token_handler.get_refresh_token()
|
||||
if refresh_token:
|
||||
new_token_data = self.oauth_service.refresh_token(refresh_token)
|
||||
|
||||
if new_token_data:
|
||||
# Save refreshed token
|
||||
self.token_handler.save_token({'oauth': new_token_data})
|
||||
logger.info("OAuth token refreshed successfully")
|
||||
return new_token_data.get('access_token')
|
||||
else:
|
||||
logger.warning("Token refresh failed, OAuth token invalid")
|
||||
# Delete invalid token
|
||||
self.token_handler.delete_token()
|
||||
return None
|
||||
else:
|
||||
logger.warning("No refresh token available")
|
||||
return None
|
||||
|
||||
# Token is valid, return it
|
||||
return self.token_handler.get_access_token()
|
||||
|
||||
def is_authenticated(self) -> bool:
|
||||
"""
|
||||
Check if user is authenticated via OAuth or API key
|
||||
|
||||
Returns:
|
||||
True if authenticated
|
||||
"""
|
||||
return self.get_auth_token() is not None
|
||||
|
||||
def get_auth_method(self) -> Optional[str]:
|
||||
"""
|
||||
Get current authentication method
|
||||
|
||||
Returns:
|
||||
'oauth', 'api_key', or None
|
||||
"""
|
||||
# Check OAuth first
|
||||
oauth_token = self._get_oauth_token()
|
||||
if oauth_token:
|
||||
return 'oauth'
|
||||
|
||||
# Check API key
|
||||
api_key = self.api_key_service.get_saved_api_key()
|
||||
if api_key:
|
||||
return 'api_key'
|
||||
|
||||
return None
|
||||
|
||||
def get_auth_status(self) -> Tuple[bool, str, Optional[str]]:
|
||||
"""
|
||||
Get detailed authentication status
|
||||
|
||||
Returns:
|
||||
Tuple of (authenticated, method, username)
|
||||
- authenticated: True if authenticated
|
||||
- method: 'oauth', 'oauth_expired', 'api_key', or 'none'
|
||||
- username: Username if available (OAuth only), or None
|
||||
"""
|
||||
# Check if OAuth token exists
|
||||
if self.token_handler.has_token():
|
||||
# Check if refresh token is likely expired (hasn't been refreshed in 30+ days)
|
||||
token_info = self.token_handler.get_token_info()
|
||||
if token_info.get('refresh_token_likely_expired'):
|
||||
logger.warning("Refresh token likely expired (30+ days old), user should re-authorize")
|
||||
return False, 'oauth_expired', None
|
||||
|
||||
# Try OAuth
|
||||
oauth_token = self._get_oauth_token()
|
||||
if oauth_token:
|
||||
# Try to get username from userinfo
|
||||
user_info = self.oauth_service.get_user_info(oauth_token)
|
||||
username = user_info.get('name') if user_info else None
|
||||
return True, 'oauth', username
|
||||
elif self.token_handler.has_token():
|
||||
# Had token but couldn't get valid access token (refresh failed)
|
||||
logger.warning("OAuth token refresh failed, token may be invalid")
|
||||
return False, 'oauth_expired', None
|
||||
|
||||
# Try API key
|
||||
api_key = self.api_key_service.get_saved_api_key()
|
||||
if api_key:
|
||||
return True, 'api_key', None
|
||||
|
||||
return False, 'none', None
|
||||
|
||||
def authorize_oauth(self, show_browser_message_callback=None) -> bool:
|
||||
"""
|
||||
Perform OAuth authorization flow
|
||||
|
||||
Args:
|
||||
show_browser_message_callback: Optional callback for browser messages
|
||||
|
||||
Returns:
|
||||
True if authorization successful
|
||||
"""
|
||||
logger.info("Starting OAuth authorization")
|
||||
|
||||
token_data = self.oauth_service.authorize(show_browser_message_callback)
|
||||
|
||||
if token_data:
|
||||
# Save token
|
||||
success = self.token_handler.save_token({'oauth': token_data})
|
||||
if success:
|
||||
logger.info("OAuth authorization completed successfully")
|
||||
return True
|
||||
else:
|
||||
logger.error("Failed to save OAuth token")
|
||||
return False
|
||||
else:
|
||||
logger.error("OAuth authorization failed")
|
||||
return False
|
||||
|
||||
def revoke_oauth(self) -> bool:
|
||||
"""
|
||||
Revoke OAuth authorization by deleting stored token
|
||||
|
||||
Returns:
|
||||
True if revoked successfully
|
||||
"""
|
||||
logger.info("Revoking OAuth authorization")
|
||||
return self.token_handler.delete_token()
|
||||
|
||||
def save_api_key(self, api_key: str) -> bool:
|
||||
"""
|
||||
Save API key (legacy fallback)
|
||||
|
||||
Args:
|
||||
api_key: Nexus API key
|
||||
|
||||
Returns:
|
||||
True if saved successfully
|
||||
"""
|
||||
return self.api_key_service.save_api_key(api_key)
|
||||
|
||||
def validate_api_key(self, api_key: Optional[str] = None) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Validate API key against Nexus API
|
||||
|
||||
Args:
|
||||
api_key: Optional API key to validate (uses stored if not provided)
|
||||
|
||||
Returns:
|
||||
Tuple of (valid, username_or_error)
|
||||
"""
|
||||
return self.api_key_service.validate_api_key(api_key)
|
||||
|
||||
def ensure_valid_auth(self) -> Optional[str]:
|
||||
"""
|
||||
Ensure we have valid authentication, refreshing if needed
|
||||
This should be called before any Nexus operation
|
||||
|
||||
Returns:
|
||||
Valid auth token (OAuth access token or API key), or None
|
||||
"""
|
||||
auth_token = self.get_auth_token()
|
||||
|
||||
if not auth_token:
|
||||
logger.warning("No authentication available for Nexus operation")
|
||||
|
||||
return auth_token
|
||||
|
||||
def get_auth_for_engine(self) -> Optional[str]:
|
||||
"""
|
||||
Get authentication token for jackify-engine
|
||||
Same as ensure_valid_auth() - engine uses NEXUS_API_KEY env var for both OAuth and API keys
|
||||
(This matches upstream Wabbajack behavior)
|
||||
|
||||
Returns:
|
||||
Valid auth token to pass via NEXUS_API_KEY environment variable, or None
|
||||
"""
|
||||
return self.ensure_valid_auth()
|
||||
|
||||
def clear_all_auth(self) -> bool:
|
||||
"""
|
||||
Clear all authentication (both OAuth and API key)
|
||||
Useful for testing or switching accounts
|
||||
|
||||
Returns:
|
||||
True if any auth was cleared
|
||||
"""
|
||||
oauth_cleared = self.token_handler.delete_token()
|
||||
api_key_cleared = self.api_key_service.clear_api_key()
|
||||
|
||||
if oauth_cleared or api_key_cleared:
|
||||
logger.info("Cleared all Nexus authentication")
|
||||
return True
|
||||
else:
|
||||
logger.debug("No authentication to clear")
|
||||
return False
|
||||
759
jackify/backend/services/nexus_oauth_service.py
Normal file
759
jackify/backend/services/nexus_oauth_service.py
Normal file
@@ -0,0 +1,759 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Nexus OAuth Service
|
||||
Handles OAuth 2.0 authentication flow with Nexus Mods using PKCE
|
||||
"""
|
||||
|
||||
import os
|
||||
import base64
|
||||
import hashlib
|
||||
import secrets
|
||||
import webbrowser
|
||||
import urllib.parse
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
import requests
|
||||
import json
|
||||
import threading
|
||||
import ssl
|
||||
import tempfile
|
||||
import logging
|
||||
import time
|
||||
import subprocess
|
||||
from typing import Optional, Tuple, Dict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NexusOAuthService:
|
||||
"""
|
||||
Handles OAuth 2.0 authentication with Nexus Mods
|
||||
Uses PKCE flow with system browser and localhost callback
|
||||
"""
|
||||
|
||||
# OAuth Configuration
|
||||
CLIENT_ID = "jackify"
|
||||
AUTH_URL = "https://users.nexusmods.com/oauth/authorize"
|
||||
TOKEN_URL = "https://users.nexusmods.com/oauth/token"
|
||||
USERINFO_URL = "https://users.nexusmods.com/oauth/userinfo"
|
||||
SCOPES = "public openid profile"
|
||||
|
||||
# Redirect configuration (custom protocol scheme - no SSL cert needed!)
|
||||
# Requires jackify:// protocol handler to be registered with OS
|
||||
REDIRECT_URI = "jackify://oauth/callback"
|
||||
|
||||
# Callback timeout (5 minutes)
|
||||
CALLBACK_TIMEOUT = 300
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize OAuth service"""
|
||||
self._auth_code = None
|
||||
self._auth_state = None
|
||||
self._auth_error = None
|
||||
self._server_done = threading.Event()
|
||||
|
||||
# Ensure jackify:// protocol is registered on first use
|
||||
self._ensure_protocol_registered()
|
||||
|
||||
def _generate_pkce_params(self) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Generate PKCE code verifier, challenge, and state
|
||||
|
||||
Returns:
|
||||
Tuple of (code_verifier, code_challenge, state)
|
||||
"""
|
||||
# Generate code verifier (43-128 characters, base64url encoded)
|
||||
code_verifier = base64.urlsafe_b64encode(
|
||||
os.urandom(32)
|
||||
).decode('utf-8').rstrip('=')
|
||||
|
||||
# Generate code challenge (SHA256 hash of verifier, base64url encoded)
|
||||
code_challenge = base64.urlsafe_b64encode(
|
||||
hashlib.sha256(code_verifier.encode('utf-8')).digest()
|
||||
).decode('utf-8').rstrip('=')
|
||||
|
||||
# Generate state for CSRF protection
|
||||
state = secrets.token_urlsafe(32)
|
||||
|
||||
return code_verifier, code_challenge, state
|
||||
|
||||
def _ensure_protocol_registered(self) -> bool:
|
||||
"""
|
||||
Ensure jackify:// protocol is registered with the OS
|
||||
|
||||
Returns:
|
||||
True if registration successful or already registered
|
||||
"""
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
if not sys.platform.startswith('linux'):
|
||||
logger.debug("Protocol registration only needed on Linux")
|
||||
return True
|
||||
|
||||
try:
|
||||
# Ensure desktop file exists and has correct Exec path
|
||||
desktop_file = Path.home() / ".local" / "share" / "applications" / "com.jackify.app.desktop"
|
||||
|
||||
# Get environment for AppImage detection
|
||||
env = os.environ
|
||||
|
||||
# Determine executable path (DEV mode vs AppImage)
|
||||
# Check multiple indicators for AppImage execution
|
||||
is_appimage = (
|
||||
getattr(sys, 'frozen', False) or # PyInstaller frozen
|
||||
'APPIMAGE' in env or # AppImage environment variable
|
||||
'APPDIR' in env or # AppImage directory variable
|
||||
(sys.argv[0] and sys.argv[0].endswith('.AppImage')) # Executable name
|
||||
)
|
||||
|
||||
if is_appimage:
|
||||
# Running from AppImage - use the AppImage path directly
|
||||
# CRITICAL: Never use -m flag in AppImage mode - it causes __main__.py windows
|
||||
if 'APPIMAGE' in env:
|
||||
# APPIMAGE env var gives us the exact path to the AppImage
|
||||
exec_path = env['APPIMAGE']
|
||||
logger.info(f"Using APPIMAGE env var: {exec_path}")
|
||||
elif sys.argv[0] and Path(sys.argv[0]).exists():
|
||||
# Use sys.argv[0] if it's a valid path
|
||||
exec_path = str(Path(sys.argv[0]).resolve())
|
||||
logger.info(f"Using resolved sys.argv[0]: {exec_path}")
|
||||
else:
|
||||
# Fallback to sys.argv[0] as-is
|
||||
exec_path = sys.argv[0]
|
||||
logger.warning(f"Using sys.argv[0] as fallback: {exec_path}")
|
||||
else:
|
||||
# Running from source (DEV mode)
|
||||
# Need to ensure we run from the correct directory
|
||||
src_dir = Path(__file__).parent.parent.parent.parent # Go up to src/
|
||||
exec_path = f"cd {src_dir} && {sys.executable} -m jackify.frontends.gui"
|
||||
logger.info(f"DEV mode exec path: {exec_path}")
|
||||
logger.info(f"Source directory: {src_dir}")
|
||||
|
||||
# Check if desktop file needs creation or update
|
||||
needs_update = False
|
||||
if not desktop_file.exists():
|
||||
needs_update = True
|
||||
logger.info("Creating desktop file for protocol handler")
|
||||
else:
|
||||
# Check if Exec path matches current mode
|
||||
current_content = desktop_file.read_text()
|
||||
if f"Exec={exec_path} %u" not in current_content:
|
||||
needs_update = True
|
||||
logger.info(f"Updating desktop file with new Exec path: {exec_path}")
|
||||
|
||||
if needs_update:
|
||||
desktop_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Build desktop file content with proper working directory
|
||||
if is_appimage:
|
||||
# AppImage doesn't need working directory
|
||||
desktop_content = f"""[Desktop Entry]
|
||||
Type=Application
|
||||
Name=Jackify
|
||||
Comment=Wabbajack modlist manager for Linux
|
||||
Exec={exec_path} %u
|
||||
Icon=com.jackify.app
|
||||
Terminal=false
|
||||
Categories=Game;Utility;
|
||||
MimeType=x-scheme-handler/jackify;
|
||||
"""
|
||||
else:
|
||||
# DEV mode needs working directory set to src/
|
||||
# exec_path already contains the correct format: "cd {src_dir} && {sys.executable} -m jackify.frontends.gui"
|
||||
src_dir = Path(__file__).parent.parent.parent.parent # Go up to src/
|
||||
desktop_content = f"""[Desktop Entry]
|
||||
Type=Application
|
||||
Name=Jackify
|
||||
Comment=Wabbajack modlist manager for Linux
|
||||
Exec={exec_path} %u
|
||||
Icon=com.jackify.app
|
||||
Terminal=false
|
||||
Categories=Game;Utility;
|
||||
MimeType=x-scheme-handler/jackify;
|
||||
Path={src_dir}
|
||||
"""
|
||||
|
||||
desktop_file.write_text(desktop_content)
|
||||
logger.info(f"Desktop file written: {desktop_file}")
|
||||
logger.info(f"Exec path: {exec_path}")
|
||||
logger.info(f"AppImage mode: {is_appimage}")
|
||||
|
||||
# Always ensure full registration (don't trust xdg-settings alone)
|
||||
# PopOS/Ubuntu need mimeapps.list even if xdg-settings says registered
|
||||
logger.info("Registering jackify:// protocol handler")
|
||||
|
||||
# Update MIME cache (required for Firefox dialog)
|
||||
apps_dir = Path.home() / ".local" / "share" / "applications"
|
||||
subprocess.run(
|
||||
['update-desktop-database', str(apps_dir)],
|
||||
capture_output=True,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
# Set as default handler using xdg-mime (Firefox compatibility)
|
||||
subprocess.run(
|
||||
['xdg-mime', 'default', 'com.jackify.app.desktop', 'x-scheme-handler/jackify'],
|
||||
capture_output=True,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
# Also use xdg-settings as backup (some systems need both)
|
||||
subprocess.run(
|
||||
['xdg-settings', 'set', 'default-url-scheme-handler', 'jackify', 'com.jackify.app.desktop'],
|
||||
capture_output=True,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
# Manually ensure entry in mimeapps.list (PopOS/Ubuntu require this for GIO)
|
||||
mimeapps_path = Path.home() / ".config" / "mimeapps.list"
|
||||
try:
|
||||
# Read existing content
|
||||
if mimeapps_path.exists():
|
||||
content = mimeapps_path.read_text()
|
||||
else:
|
||||
mimeapps_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = "[Default Applications]\n"
|
||||
|
||||
# Add jackify handler if not present
|
||||
if 'x-scheme-handler/jackify=' not in content:
|
||||
if '[Default Applications]' not in content:
|
||||
content = "[Default Applications]\n" + content
|
||||
|
||||
# Insert after [Default Applications] line
|
||||
lines = content.split('\n')
|
||||
for i, line in enumerate(lines):
|
||||
if line.strip() == '[Default Applications]':
|
||||
lines.insert(i + 1, 'x-scheme-handler/jackify=com.jackify.app.desktop')
|
||||
break
|
||||
|
||||
content = '\n'.join(lines)
|
||||
mimeapps_path.write_text(content)
|
||||
logger.info("Added jackify handler to mimeapps.list")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update mimeapps.list: {e}")
|
||||
|
||||
logger.info("jackify:// protocol registered successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to register jackify:// protocol: {e}")
|
||||
return False
|
||||
|
||||
def _generate_self_signed_cert(self) -> Tuple[Optional[str], Optional[str]]:
|
||||
"""
|
||||
Generate self-signed certificate for HTTPS localhost
|
||||
|
||||
Returns:
|
||||
Tuple of (cert_file_path, key_file_path) or (None, None) on failure
|
||||
"""
|
||||
try:
|
||||
from cryptography import x509
|
||||
from cryptography.x509.oid import NameOID
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
import datetime
|
||||
import ipaddress
|
||||
|
||||
logger.info("Generating self-signed certificate for OAuth callback")
|
||||
|
||||
# Generate private key
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
|
||||
# Create certificate
|
||||
subject = issuer = x509.Name([
|
||||
x509.NameAttribute(NameOID.COUNTRY_NAME, "US"),
|
||||
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Jackify"),
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, self.REDIRECT_HOST),
|
||||
])
|
||||
|
||||
cert = x509.CertificateBuilder().subject_name(
|
||||
subject
|
||||
).issuer_name(
|
||||
issuer
|
||||
).public_key(
|
||||
private_key.public_key()
|
||||
).serial_number(
|
||||
x509.random_serial_number()
|
||||
).not_valid_before(
|
||||
datetime.datetime.now(datetime.UTC)
|
||||
).not_valid_after(
|
||||
datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=365)
|
||||
).add_extension(
|
||||
x509.SubjectAlternativeName([
|
||||
x509.IPAddress(ipaddress.IPv4Address(self.REDIRECT_HOST)),
|
||||
]),
|
||||
critical=False,
|
||||
).sign(private_key, hashes.SHA256())
|
||||
|
||||
# Save to temp files
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
cert_file = os.path.join(temp_dir, "oauth_cert.pem")
|
||||
key_file = os.path.join(temp_dir, "oauth_key.pem")
|
||||
|
||||
with open(cert_file, "wb") as f:
|
||||
f.write(cert.public_bytes(serialization.Encoding.PEM))
|
||||
|
||||
with open(key_file, "wb") as f:
|
||||
f.write(private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=serialization.NoEncryption()
|
||||
))
|
||||
|
||||
return cert_file, key_file
|
||||
|
||||
except ImportError:
|
||||
logger.error("cryptography package not installed - required for OAuth")
|
||||
return None, None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate SSL certificate: {e}")
|
||||
return None, None
|
||||
|
||||
def _build_authorization_url(self, code_challenge: str, state: str) -> str:
|
||||
"""
|
||||
Build OAuth authorization URL
|
||||
|
||||
Args:
|
||||
code_challenge: PKCE code challenge
|
||||
state: CSRF protection state
|
||||
|
||||
Returns:
|
||||
Authorization URL
|
||||
"""
|
||||
params = {
|
||||
'response_type': 'code',
|
||||
'client_id': self.CLIENT_ID,
|
||||
'redirect_uri': self.REDIRECT_URI,
|
||||
'scope': self.SCOPES,
|
||||
'code_challenge': code_challenge,
|
||||
'code_challenge_method': 'S256',
|
||||
'state': state
|
||||
}
|
||||
|
||||
return f"{self.AUTH_URL}?{urllib.parse.urlencode(params)}"
|
||||
|
||||
def _create_callback_handler(self):
|
||||
"""Create HTTP request handler class for OAuth callback"""
|
||||
service = self
|
||||
|
||||
class OAuthCallbackHandler(BaseHTTPRequestHandler):
|
||||
"""HTTP request handler for OAuth callback"""
|
||||
|
||||
def log_message(self, format, *args):
|
||||
"""Log OAuth callback requests"""
|
||||
logger.debug(f"OAuth callback: {format % args}")
|
||||
|
||||
def do_GET(self):
|
||||
"""Handle GET request from OAuth redirect"""
|
||||
logger.info(f"OAuth callback received: {self.path}")
|
||||
|
||||
# Parse query parameters
|
||||
parsed = urllib.parse.urlparse(self.path)
|
||||
params = urllib.parse.parse_qs(parsed.query)
|
||||
|
||||
# Ignore favicon and other non-OAuth requests
|
||||
if parsed.path == '/favicon.ico':
|
||||
self.send_response(404)
|
||||
self.end_headers()
|
||||
return
|
||||
|
||||
if 'code' in params:
|
||||
service._auth_code = params['code'][0]
|
||||
service._auth_state = params.get('state', [None])[0]
|
||||
logger.info(f"OAuth authorization code received: {service._auth_code[:10]}...")
|
||||
|
||||
# Send success response
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'text/html')
|
||||
self.end_headers()
|
||||
|
||||
html = """
|
||||
<html>
|
||||
<head><title>Authorization Successful</title></head>
|
||||
<body style="font-family: Arial, sans-serif; text-align: center; padding: 50px;">
|
||||
<h1>Authorization Successful!</h1>
|
||||
<p>You can close this window and return to Jackify.</p>
|
||||
<script>setTimeout(function() { window.close(); }, 3000);</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
self.wfile.write(html.encode())
|
||||
|
||||
elif 'error' in params:
|
||||
service._auth_error = params['error'][0]
|
||||
error_desc = params.get('error_description', ['Unknown error'])[0]
|
||||
|
||||
# Send error response
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'text/html')
|
||||
self.end_headers()
|
||||
|
||||
html = f"""
|
||||
<html>
|
||||
<head><title>Authorization Failed</title></head>
|
||||
<body style="font-family: Arial, sans-serif; text-align: center; padding: 50px;">
|
||||
<h1>Authorization Failed</h1>
|
||||
<p>Error: {service._auth_error}</p>
|
||||
<p>{error_desc}</p>
|
||||
<p>You can close this window and try again in Jackify.</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
self.wfile.write(html.encode())
|
||||
else:
|
||||
# Unexpected callback format
|
||||
logger.warning(f"OAuth callback with no code or error: {params}")
|
||||
self.send_response(400)
|
||||
self.send_header('Content-type', 'text/html')
|
||||
self.end_headers()
|
||||
html = """
|
||||
<html>
|
||||
<head><title>Invalid Request</title></head>
|
||||
<body style="font-family: Arial, sans-serif; text-align: center; padding: 50px;">
|
||||
<h1>Invalid OAuth Callback</h1>
|
||||
<p>You can close this window.</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
self.wfile.write(html.encode())
|
||||
|
||||
# Signal server to shut down
|
||||
service._server_done.set()
|
||||
logger.debug("OAuth callback handler signaled server to shut down")
|
||||
|
||||
return OAuthCallbackHandler
|
||||
|
||||
def _wait_for_callback(self) -> bool:
|
||||
"""
|
||||
Wait for OAuth callback via jackify:// protocol handler
|
||||
|
||||
Returns:
|
||||
True if callback received, False on timeout
|
||||
"""
|
||||
from pathlib import Path
|
||||
import time
|
||||
|
||||
callback_file = Path.home() / ".config" / "jackify" / "oauth_callback.tmp"
|
||||
|
||||
# Delete any old callback file
|
||||
if callback_file.exists():
|
||||
callback_file.unlink()
|
||||
|
||||
logger.info("Waiting for OAuth callback via jackify:// protocol")
|
||||
|
||||
# Poll for callback file with periodic user feedback
|
||||
start_time = time.time()
|
||||
last_reminder = 0
|
||||
while (time.time() - start_time) < self.CALLBACK_TIMEOUT:
|
||||
if callback_file.exists():
|
||||
try:
|
||||
# Read callback data
|
||||
lines = callback_file.read_text().strip().split('\n')
|
||||
if len(lines) >= 2:
|
||||
self._auth_code = lines[0]
|
||||
self._auth_state = lines[1]
|
||||
logger.info(f"OAuth callback received: code={self._auth_code[:10]}...")
|
||||
|
||||
# Clean up
|
||||
callback_file.unlink()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to read callback file: {e}")
|
||||
return False
|
||||
|
||||
# Show periodic reminder about protocol handler
|
||||
elapsed = time.time() - start_time
|
||||
if elapsed - last_reminder > 30: # Every 30 seconds
|
||||
logger.info(f"Still waiting for OAuth callback... ({int(elapsed)}s elapsed)")
|
||||
if elapsed > 60:
|
||||
logger.warning(
|
||||
"If you see a blank browser tab or popup blocker, "
|
||||
"check for browser notifications asking to 'Open Jackify'"
|
||||
)
|
||||
last_reminder = elapsed
|
||||
|
||||
time.sleep(0.5) # Poll every 500ms
|
||||
|
||||
logger.error(f"OAuth callback timeout after {self.CALLBACK_TIMEOUT} seconds")
|
||||
logger.error(
|
||||
"Protocol handler may not be working. Check:\n"
|
||||
" 1. Browser asked 'Open Jackify?' and you clicked Allow\n"
|
||||
" 2. No popup blocker notifications\n"
|
||||
" 3. Desktop file exists: ~/.local/share/applications/com.jackify.app.desktop"
|
||||
)
|
||||
return False
|
||||
|
||||
def _send_desktop_notification(self, title: str, message: str):
|
||||
"""
|
||||
Send desktop notification if available
|
||||
|
||||
Args:
|
||||
title: Notification title
|
||||
message: Notification message
|
||||
"""
|
||||
try:
|
||||
# Try notify-send (Linux)
|
||||
subprocess.run(
|
||||
['notify-send', title, message],
|
||||
check=False,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
timeout=2
|
||||
)
|
||||
except (FileNotFoundError, subprocess.TimeoutExpired):
|
||||
pass
|
||||
|
||||
def _exchange_code_for_token(
|
||||
self,
|
||||
auth_code: str,
|
||||
code_verifier: str
|
||||
) -> Optional[Dict]:
|
||||
"""
|
||||
Exchange authorization code for access token
|
||||
|
||||
Args:
|
||||
auth_code: Authorization code from callback
|
||||
code_verifier: PKCE code verifier
|
||||
|
||||
Returns:
|
||||
Token response dict or None on failure
|
||||
"""
|
||||
data = {
|
||||
'grant_type': 'authorization_code',
|
||||
'client_id': self.CLIENT_ID,
|
||||
'redirect_uri': self.REDIRECT_URI,
|
||||
'code': auth_code,
|
||||
'code_verifier': code_verifier
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.post(self.TOKEN_URL, data=data, timeout=10)
|
||||
|
||||
if response.status_code == 200:
|
||||
token_data = response.json()
|
||||
logger.info("Successfully exchanged authorization code for token")
|
||||
return token_data
|
||||
else:
|
||||
logger.error(f"Token exchange failed: {response.status_code} - {response.text}")
|
||||
return None
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"Token exchange request failed: {e}")
|
||||
return None
|
||||
|
||||
def refresh_token(self, refresh_token: str) -> Optional[Dict]:
|
||||
"""
|
||||
Refresh an access token using refresh token
|
||||
|
||||
Args:
|
||||
refresh_token: Refresh token from previous authentication
|
||||
|
||||
Returns:
|
||||
New token response dict or None on failure
|
||||
"""
|
||||
data = {
|
||||
'grant_type': 'refresh_token',
|
||||
'client_id': self.CLIENT_ID,
|
||||
'refresh_token': refresh_token
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.post(self.TOKEN_URL, data=data, timeout=10)
|
||||
|
||||
if response.status_code == 200:
|
||||
token_data = response.json()
|
||||
logger.info("Successfully refreshed access token")
|
||||
return token_data
|
||||
else:
|
||||
logger.error(f"Token refresh failed: {response.status_code} - {response.text}")
|
||||
return None
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"Token refresh request failed: {e}")
|
||||
return None
|
||||
|
||||
def get_user_info(self, access_token: str) -> Optional[Dict]:
|
||||
"""
|
||||
Get user information using access token
|
||||
|
||||
Args:
|
||||
access_token: OAuth access token
|
||||
|
||||
Returns:
|
||||
User info dict or None on failure
|
||||
"""
|
||||
headers = {
|
||||
'Authorization': f'Bearer {access_token}'
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.get(self.USERINFO_URL, headers=headers, timeout=10)
|
||||
|
||||
if response.status_code == 200:
|
||||
user_info = response.json()
|
||||
logger.info(f"Retrieved user info for: {user_info.get('name', 'unknown')}")
|
||||
return user_info
|
||||
else:
|
||||
logger.error(f"User info request failed: {response.status_code}")
|
||||
return None
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"User info request failed: {e}")
|
||||
return None
|
||||
|
||||
def authorize(self, show_browser_message_callback=None) -> Optional[Dict]:
|
||||
"""
|
||||
Perform full OAuth authorization flow
|
||||
|
||||
Args:
|
||||
show_browser_message_callback: Optional callback to display message about browser opening
|
||||
|
||||
Returns:
|
||||
Token response dict or None on failure
|
||||
"""
|
||||
logger.info("Starting Nexus OAuth authorization flow")
|
||||
|
||||
# Reset state
|
||||
self._auth_code = None
|
||||
self._auth_state = None
|
||||
self._auth_error = None
|
||||
self._server_done.clear()
|
||||
|
||||
# Generate PKCE parameters
|
||||
code_verifier, code_challenge, state = self._generate_pkce_params()
|
||||
logger.debug(f"Generated PKCE parameters (state: {state[:10]}...)")
|
||||
|
||||
# Build authorization URL
|
||||
auth_url = self._build_authorization_url(code_challenge, state)
|
||||
|
||||
# Open browser
|
||||
logger.info("Opening browser for authorisation")
|
||||
|
||||
try:
|
||||
# When running from AppImage, we need to clean the environment to avoid
|
||||
# library conflicts with system tools (xdg-open, kde-open, etc.)
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
env = os.environ.copy()
|
||||
|
||||
# Remove AppImage-specific environment variables that can cause conflicts
|
||||
# These variables inject AppImage's bundled libraries into child processes
|
||||
appimage_vars = [
|
||||
'LD_LIBRARY_PATH',
|
||||
'PYTHONPATH',
|
||||
'PYTHONHOME',
|
||||
'QT_PLUGIN_PATH',
|
||||
'QML2_IMPORT_PATH',
|
||||
]
|
||||
|
||||
# Check if we're running from AppImage
|
||||
if 'APPIMAGE' in env or 'APPDIR' in env:
|
||||
logger.debug("Running from AppImage - cleaning environment for browser launch")
|
||||
for var in appimage_vars:
|
||||
if var in env:
|
||||
del env[var]
|
||||
logger.debug(f"Removed {var} from browser environment")
|
||||
|
||||
# Use Popen instead of run to avoid waiting for browser to close
|
||||
# xdg-open may not return until the browser closes, which could be never
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
['xdg-open', auth_url],
|
||||
env=env,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
start_new_session=True # Detach from parent process
|
||||
)
|
||||
# Give it a moment to fail if it's going to fail
|
||||
import time
|
||||
time.sleep(0.5)
|
||||
|
||||
# Check if process is still running or has exited successfully
|
||||
poll_result = process.poll()
|
||||
if poll_result is None:
|
||||
# Process still running - browser is opening/open
|
||||
logger.info("Browser opened successfully via xdg-open (process running)")
|
||||
browser_opened = True
|
||||
elif poll_result == 0:
|
||||
# Process exited successfully
|
||||
logger.info("Browser opened successfully via xdg-open (exit code 0)")
|
||||
browser_opened = True
|
||||
else:
|
||||
# Process exited with error
|
||||
logger.warning(f"xdg-open exited with code {poll_result}, trying webbrowser module")
|
||||
if webbrowser.open(auth_url):
|
||||
logger.info("Browser opened successfully via webbrowser module")
|
||||
browser_opened = True
|
||||
else:
|
||||
logger.warning("webbrowser.open returned False")
|
||||
browser_opened = False
|
||||
except FileNotFoundError:
|
||||
# xdg-open not found - try webbrowser module
|
||||
logger.warning("xdg-open not found, trying webbrowser module")
|
||||
if webbrowser.open(auth_url):
|
||||
logger.info("Browser opened successfully via webbrowser module")
|
||||
browser_opened = True
|
||||
else:
|
||||
logger.warning("webbrowser.open returned False")
|
||||
browser_opened = False
|
||||
except Exception as e:
|
||||
logger.error(f"Error opening browser: {e}")
|
||||
browser_opened = False
|
||||
|
||||
# Send desktop notification
|
||||
self._send_desktop_notification(
|
||||
"Jackify - Nexus Authorisation",
|
||||
"Please check your browser to authorise Jackify"
|
||||
)
|
||||
|
||||
# Show message via callback if provided (AFTER browser opens)
|
||||
if show_browser_message_callback:
|
||||
if browser_opened:
|
||||
show_browser_message_callback(
|
||||
"Browser opened for Nexus authorisation.\n\n"
|
||||
"After clicking 'Authorize', your browser may ask to\n"
|
||||
"open Jackify or show a popup blocker notification.\n\n"
|
||||
"Please click 'Open' or 'Allow' to complete authorization."
|
||||
)
|
||||
else:
|
||||
show_browser_message_callback(
|
||||
f"Could not open browser automatically.\n\n"
|
||||
f"Please open this URL manually:\n{auth_url}"
|
||||
)
|
||||
|
||||
# Wait for callback via jackify:// protocol
|
||||
if not self._wait_for_callback():
|
||||
return None
|
||||
|
||||
# Check for errors
|
||||
if self._auth_error:
|
||||
logger.error(f"Authorization failed: {self._auth_error}")
|
||||
return None
|
||||
|
||||
if not self._auth_code:
|
||||
logger.error("No authorization code received")
|
||||
return None
|
||||
|
||||
# Verify state matches
|
||||
if self._auth_state != state:
|
||||
logger.error("State mismatch - possible CSRF attack")
|
||||
return None
|
||||
|
||||
logger.info("Authorization code received, exchanging for token")
|
||||
|
||||
# Exchange code for token
|
||||
token_data = self._exchange_code_for_token(self._auth_code, code_verifier)
|
||||
|
||||
if token_data:
|
||||
logger.info("OAuth authorization flow completed successfully")
|
||||
else:
|
||||
logger.error("Failed to exchange authorization code for token")
|
||||
|
||||
return token_data
|
||||
@@ -6,8 +6,11 @@ Centralized service for detecting and managing protontricks installation across
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import importlib.util
|
||||
from typing import Optional, Tuple
|
||||
from ..handlers.protontricks_handler import ProtontricksHandler
|
||||
from ..handlers.config_handler import ConfigHandler
|
||||
@@ -44,11 +47,11 @@ class ProtontricksDetectionService:
|
||||
|
||||
def detect_protontricks(self, use_cache: bool = True) -> Tuple[bool, str, str]:
|
||||
"""
|
||||
Detect if protontricks is installed and get installation details
|
||||
|
||||
Detect if system protontricks is installed and get installation details
|
||||
|
||||
Args:
|
||||
use_cache (bool): Whether to use cached detection result
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple[bool, str, str]: (is_installed, installation_type, details_message)
|
||||
- is_installed: True if protontricks is available
|
||||
@@ -82,7 +85,7 @@ class ProtontricksDetectionService:
|
||||
details_message = "Protontricks is installed (unknown type)"
|
||||
else:
|
||||
installation_type = 'none'
|
||||
details_message = "Protontricks not found - required for Jackify functionality"
|
||||
details_message = "Protontricks not found - install via flatpak or package manager"
|
||||
|
||||
# Cache the result
|
||||
self._last_detection_result = (is_installed, installation_type, details_message)
|
||||
@@ -93,55 +96,22 @@ class ProtontricksDetectionService:
|
||||
|
||||
def _detect_without_prompts(self, handler: ProtontricksHandler) -> bool:
|
||||
"""
|
||||
Detect protontricks without user prompts or installation attempts
|
||||
|
||||
Detect system protontricks (flatpak or native) without user prompts.
|
||||
|
||||
Args:
|
||||
handler (ProtontricksHandler): Handler instance to use
|
||||
|
||||
|
||||
Returns:
|
||||
bool: True if protontricks is found
|
||||
bool: True if system protontricks is found
|
||||
"""
|
||||
# Use the handler's silent detection method
|
||||
return handler.detect_protontricks()
|
||||
|
||||
def is_bundled_mode(self) -> bool:
|
||||
"""
|
||||
DEPRECATED: Bundled protontricks no longer supported.
|
||||
Always returns False for backwards compatibility.
|
||||
"""
|
||||
import shutil
|
||||
|
||||
# Check if protontricks exists as a command
|
||||
protontricks_path_which = shutil.which("protontricks")
|
||||
|
||||
if protontricks_path_which:
|
||||
# Check if it's a flatpak wrapper
|
||||
try:
|
||||
with open(protontricks_path_which, 'r') as f:
|
||||
content = f.read()
|
||||
if "flatpak run" in content:
|
||||
logger.debug(f"Detected Protontricks is a Flatpak wrapper at {protontricks_path_which}")
|
||||
handler.which_protontricks = 'flatpak'
|
||||
# Continue to check flatpak list just to be sure
|
||||
else:
|
||||
logger.info(f"Native Protontricks found at {protontricks_path_which}")
|
||||
handler.which_protontricks = 'native'
|
||||
handler.protontricks_path = protontricks_path_which
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading protontricks executable: {e}")
|
||||
|
||||
# Check if flatpak protontricks is installed
|
||||
try:
|
||||
env = handler._get_clean_subprocess_env()
|
||||
result = subprocess.run(
|
||||
["flatpak", "list"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.DEVNULL, # Suppress stderr to avoid error messages
|
||||
text=True,
|
||||
env=env
|
||||
)
|
||||
if result.returncode == 0 and "com.github.Matoking.protontricks" in result.stdout:
|
||||
logger.info("Flatpak Protontricks is installed")
|
||||
handler.which_protontricks = 'flatpak'
|
||||
return True
|
||||
except FileNotFoundError:
|
||||
logger.warning("'flatpak' command not found. Cannot check for Flatpak Protontricks.")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error checking flatpak: {e}")
|
||||
|
||||
return False
|
||||
|
||||
def install_flatpak_protontricks(self) -> Tuple[bool, str]:
|
||||
|
||||
@@ -10,42 +10,82 @@ from typing import Callable, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
STRATEGY_JACKIFY = "jackify"
|
||||
STRATEGY_NAK_SIMPLE = "nak_simple"
|
||||
|
||||
|
||||
def _get_restart_strategy() -> str:
|
||||
"""Read restart strategy from config with safe fallback."""
|
||||
try:
|
||||
from jackify.backend.handlers.config_handler import ConfigHandler
|
||||
|
||||
strategy = ConfigHandler().get("steam_restart_strategy", STRATEGY_JACKIFY)
|
||||
if strategy not in (STRATEGY_JACKIFY, STRATEGY_NAK_SIMPLE):
|
||||
return STRATEGY_JACKIFY
|
||||
return strategy
|
||||
except Exception as exc: # pragma: no cover - defensive logging only
|
||||
logger.debug(f"Steam restart: Unable to read strategy from config: {exc}")
|
||||
return STRATEGY_JACKIFY
|
||||
|
||||
|
||||
def _strategy_label(strategy: str) -> str:
|
||||
if strategy == STRATEGY_NAK_SIMPLE:
|
||||
return "NaK simple restart"
|
||||
return "Jackify hardened restart"
|
||||
|
||||
def _get_clean_subprocess_env():
|
||||
"""
|
||||
Create a clean environment for subprocess calls by removing PyInstaller-specific
|
||||
environment variables that can interfere with Steam execution.
|
||||
Create a clean environment for subprocess calls by stripping bundle-specific
|
||||
environment variables (e.g., frozen AppImage remnants) that can interfere with Steam.
|
||||
|
||||
CRITICAL: Preserves all display/session variables that Steam needs for GUI:
|
||||
- DISPLAY, WAYLAND_DISPLAY, XDG_SESSION_TYPE, DBUS_SESSION_BUS_ADDRESS,
|
||||
XDG_RUNTIME_DIR, XAUTHORITY, etc.
|
||||
|
||||
Returns:
|
||||
dict: Cleaned environment dictionary
|
||||
dict: Cleaned environment dictionary with GUI variables preserved
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
pyinstaller_vars_removed = []
|
||||
bundle_vars_removed = []
|
||||
|
||||
# Remove PyInstaller-specific environment variables
|
||||
# CRITICAL: Preserve display/session variables that Steam GUI needs
|
||||
# These MUST be kept for Steam to open its GUI window
|
||||
gui_vars_to_preserve = [
|
||||
'DISPLAY', 'WAYLAND_DISPLAY', 'XDG_SESSION_TYPE', 'DBUS_SESSION_BUS_ADDRESS',
|
||||
'XDG_RUNTIME_DIR', 'XAUTHORITY', 'XDG_CURRENT_DESKTOP', 'XDG_SESSION_DESKTOP',
|
||||
'QT_QPA_PLATFORM', 'GDK_BACKEND', 'XDG_DATA_DIRS', 'XDG_CONFIG_DIRS'
|
||||
]
|
||||
preserved_gui_vars = {}
|
||||
for var in gui_vars_to_preserve:
|
||||
if var in env:
|
||||
preserved_gui_vars[var] = env[var]
|
||||
logger.debug(f"Steam restart: Preserving GUI variable {var}={env[var][:50] if len(str(env[var])) > 50 else env[var]}")
|
||||
|
||||
# Remove bundle-specific environment variables
|
||||
if env.pop('_MEIPASS', None):
|
||||
pyinstaller_vars_removed.append('_MEIPASS')
|
||||
bundle_vars_removed.append('_MEIPASS')
|
||||
if env.pop('_MEIPASS2', None):
|
||||
pyinstaller_vars_removed.append('_MEIPASS2')
|
||||
bundle_vars_removed.append('_MEIPASS2')
|
||||
|
||||
# Clean library path variables that PyInstaller modifies (Linux/Unix)
|
||||
# Clean library path variables that frozen bundles modify (Linux/Unix)
|
||||
if 'LD_LIBRARY_PATH_ORIG' in env:
|
||||
# Restore original LD_LIBRARY_PATH if it was backed up by PyInstaller
|
||||
# Restore original LD_LIBRARY_PATH if it was backed up by the bundler
|
||||
env['LD_LIBRARY_PATH'] = env['LD_LIBRARY_PATH_ORIG']
|
||||
pyinstaller_vars_removed.append('LD_LIBRARY_PATH (restored from _ORIG)')
|
||||
bundle_vars_removed.append('LD_LIBRARY_PATH (restored from _ORIG)')
|
||||
else:
|
||||
# Remove PyInstaller-modified LD_LIBRARY_PATH
|
||||
# Remove modified LD_LIBRARY_PATH entries
|
||||
if env.pop('LD_LIBRARY_PATH', None):
|
||||
pyinstaller_vars_removed.append('LD_LIBRARY_PATH (removed)')
|
||||
bundle_vars_removed.append('LD_LIBRARY_PATH (removed)')
|
||||
|
||||
# Clean PATH of PyInstaller-specific entries
|
||||
# Clean PATH of bundle-specific entries
|
||||
if 'PATH' in env and hasattr(sys, '_MEIPASS'):
|
||||
path_entries = env['PATH'].split(os.pathsep)
|
||||
original_count = len(path_entries)
|
||||
# Remove any PATH entries that point to PyInstaller temp directory
|
||||
# Remove any PATH entries that point to the bundle's temp directory
|
||||
cleaned_path = [p for p in path_entries if not p.startswith(sys._MEIPASS)]
|
||||
env['PATH'] = os.pathsep.join(cleaned_path)
|
||||
if len(cleaned_path) < original_count:
|
||||
pyinstaller_vars_removed.append(f'PATH (removed {original_count - len(cleaned_path)} PyInstaller entries)')
|
||||
bundle_vars_removed.append(f'PATH (removed {original_count - len(cleaned_path)} bundle entries)')
|
||||
|
||||
# Clean macOS library path (if present)
|
||||
if 'DYLD_LIBRARY_PATH' in env and hasattr(sys, '_MEIPASS'):
|
||||
@@ -53,16 +93,26 @@ def _get_clean_subprocess_env():
|
||||
cleaned_dyld = [p for p in dyld_entries if not p.startswith(sys._MEIPASS)]
|
||||
if cleaned_dyld:
|
||||
env['DYLD_LIBRARY_PATH'] = os.pathsep.join(cleaned_dyld)
|
||||
pyinstaller_vars_removed.append('DYLD_LIBRARY_PATH (cleaned)')
|
||||
bundle_vars_removed.append('DYLD_LIBRARY_PATH (cleaned)')
|
||||
else:
|
||||
env.pop('DYLD_LIBRARY_PATH', None)
|
||||
pyinstaller_vars_removed.append('DYLD_LIBRARY_PATH (removed)')
|
||||
bundle_vars_removed.append('DYLD_LIBRARY_PATH (removed)')
|
||||
|
||||
# Ensure GUI variables are still present (they should be, but double-check)
|
||||
for var, value in preserved_gui_vars.items():
|
||||
if var not in env:
|
||||
env[var] = value
|
||||
logger.warning(f"Steam restart: Restored GUI variable {var} that was accidentally removed")
|
||||
|
||||
# Log what was cleaned for debugging
|
||||
if pyinstaller_vars_removed:
|
||||
logger.debug(f"Steam restart: Cleaned PyInstaller environment variables: {', '.join(pyinstaller_vars_removed)}")
|
||||
if bundle_vars_removed:
|
||||
logger.debug(f"Steam restart: Cleaned bundled environment variables: {', '.join(bundle_vars_removed)}")
|
||||
else:
|
||||
logger.debug("Steam restart: No PyInstaller environment variables detected (likely DEV mode)")
|
||||
logger.debug("Steam restart: No bundled environment variables detected (likely DEV mode)")
|
||||
|
||||
# Log preserved GUI variables for debugging
|
||||
if preserved_gui_vars:
|
||||
logger.debug(f"Steam restart: Preserved {len(preserved_gui_vars)} GUI environment variables")
|
||||
|
||||
return env
|
||||
|
||||
@@ -138,22 +188,99 @@ def wait_for_steam_exit(timeout: int = 60, check_interval: float = 0.5) -> bool:
|
||||
time.sleep(check_interval)
|
||||
return False
|
||||
|
||||
def start_steam() -> bool:
|
||||
"""Attempt to start Steam using the exact methods from existing working logic."""
|
||||
env = _get_clean_subprocess_env()
|
||||
def _start_steam_nak_style(is_steamdeck_flag=False, is_flatpak_flag=False, env_override=None) -> bool:
|
||||
"""
|
||||
Start Steam using a simplified NaK-style restart (single command, no env cleanup).
|
||||
|
||||
CRITICAL: Do NOT use start_new_session - Steam needs to inherit the session
|
||||
to connect to display/tray. Ensure all GUI environment variables are preserved.
|
||||
"""
|
||||
env = env_override if env_override is not None else os.environ.copy()
|
||||
|
||||
# Log critical GUI variables for debugging
|
||||
gui_vars = ['DISPLAY', 'WAYLAND_DISPLAY', 'XDG_SESSION_TYPE', 'DBUS_SESSION_BUS_ADDRESS', 'XDG_RUNTIME_DIR']
|
||||
for var in gui_vars:
|
||||
if var in env:
|
||||
logger.debug(f"NaK-style restart: {var}={env[var][:50] if len(str(env[var])) > 50 else env[var]}")
|
||||
else:
|
||||
logger.warning(f"NaK-style restart: {var} is NOT SET - Steam GUI may fail!")
|
||||
|
||||
try:
|
||||
if is_steamdeck_flag:
|
||||
logger.info("NaK-style restart: Steam Deck detected, restarting via systemctl.")
|
||||
subprocess.Popen(["systemctl", "--user", "restart", "app-steam@autostart.service"], env=env)
|
||||
elif is_flatpak_flag:
|
||||
logger.info("NaK-style restart: Flatpak Steam detected, running flatpak command.")
|
||||
subprocess.Popen(["flatpak", "run", "com.valvesoftware.Steam"],
|
||||
env=env, stderr=subprocess.DEVNULL)
|
||||
else:
|
||||
logger.info("NaK-style restart: launching Steam directly (inheriting session for GUI).")
|
||||
# NaK uses simple "steam" command without -foreground flag
|
||||
# Do NOT use start_new_session - Steam needs session access for GUI
|
||||
# Use shell=True to ensure proper environment inheritance
|
||||
# This helps with GUI display access on some systems
|
||||
subprocess.Popen("steam", shell=True, env=env)
|
||||
|
||||
time.sleep(5)
|
||||
check_result = subprocess.run(['pgrep', '-f', 'steam'], capture_output=True, timeout=10, env=env)
|
||||
if check_result.returncode == 0:
|
||||
logger.info("NaK-style restart detected running Steam process.")
|
||||
return True
|
||||
|
||||
logger.warning("NaK-style restart did not detect Steam process after launch.")
|
||||
return False
|
||||
except FileNotFoundError as exc:
|
||||
logger.error(f"NaK-style restart command not found: {exc}")
|
||||
return False
|
||||
except Exception as exc:
|
||||
logger.error(f"NaK-style restart encountered an error: {exc}")
|
||||
return False
|
||||
|
||||
|
||||
def start_steam(is_steamdeck_flag=None, is_flatpak_flag=None, env_override=None, strategy: str = STRATEGY_JACKIFY) -> bool:
|
||||
"""
|
||||
Attempt to start Steam using the exact methods from existing working logic.
|
||||
|
||||
Args:
|
||||
is_steamdeck_flag: Optional pre-detected Steam Deck status
|
||||
is_flatpak_flag: Optional pre-detected Flatpak Steam status
|
||||
env_override: Optional environment dictionary for subprocess calls
|
||||
strategy: Restart strategy identifier
|
||||
"""
|
||||
if strategy == STRATEGY_NAK_SIMPLE:
|
||||
return _start_steam_nak_style(
|
||||
is_steamdeck_flag=is_steamdeck_flag,
|
||||
is_flatpak_flag=is_flatpak_flag,
|
||||
env_override=env_override or os.environ.copy(),
|
||||
)
|
||||
|
||||
env = env_override if env_override is not None else _get_clean_subprocess_env()
|
||||
|
||||
# Use provided flags or detect
|
||||
_is_steam_deck = is_steamdeck_flag if is_steamdeck_flag is not None else is_steam_deck()
|
||||
_is_flatpak = is_flatpak_flag if is_flatpak_flag is not None else is_flatpak_steam()
|
||||
logger.info(
|
||||
"Starting Steam (strategy=%s, steam_deck=%s, flatpak=%s)",
|
||||
strategy,
|
||||
_is_steam_deck,
|
||||
_is_flatpak,
|
||||
)
|
||||
|
||||
try:
|
||||
# Try systemd user service (Steam Deck) - HIGHEST PRIORITY
|
||||
if is_steam_deck():
|
||||
if _is_steam_deck:
|
||||
logger.debug("Using systemctl restart for Steam Deck.")
|
||||
subprocess.Popen(["systemctl", "--user", "restart", "app-steam@autostart.service"], env=env)
|
||||
return True
|
||||
|
||||
# Check if Flatpak Steam (only if not Steam Deck)
|
||||
if is_flatpak_steam():
|
||||
if _is_flatpak:
|
||||
logger.info("Flatpak Steam detected - using flatpak run command")
|
||||
try:
|
||||
# Redirect flatpak's stderr to suppress "app not installed" errors on systems without flatpak Steam
|
||||
# Steam's own stdout/stderr will still go through (flatpak forwards them)
|
||||
subprocess.Popen(["flatpak", "run", "com.valvesoftware.Steam", "-silent"],
|
||||
# Use -foreground to ensure GUI opens (not -silent)
|
||||
# CRITICAL: Do NOT use start_new_session - Steam needs to inherit the session
|
||||
logger.debug("Executing: flatpak run com.valvesoftware.Steam -foreground (inheriting session for GUI)")
|
||||
subprocess.Popen(["flatpak", "run", "com.valvesoftware.Steam", "-foreground"],
|
||||
env=env, stderr=subprocess.DEVNULL)
|
||||
time.sleep(5)
|
||||
check_result = subprocess.run(['pgrep', '-f', 'steam'], capture_output=True, timeout=10, env=env)
|
||||
@@ -161,18 +288,15 @@ def start_steam() -> bool:
|
||||
logger.info("Flatpak Steam process detected after start.")
|
||||
return True
|
||||
else:
|
||||
logger.warning("Flatpak Steam process not detected after start attempt.")
|
||||
return False
|
||||
logger.warning("Flatpak Steam start failed, falling back to normal Steam start methods")
|
||||
except Exception as e:
|
||||
logger.error(f"Error starting Flatpak Steam: {e}")
|
||||
return False
|
||||
logger.warning(f"Flatpak Steam start failed ({e}), falling back to normal Steam start methods")
|
||||
|
||||
# Use startup methods with only -silent flag (no -minimized or -no-browser)
|
||||
# Don't redirect stdout/stderr or use start_new_session to allow Steam to connect to display/tray
|
||||
# Use startup methods with -foreground flag to ensure GUI opens
|
||||
start_methods = [
|
||||
{"name": "Popen", "cmd": ["steam", "-silent"], "kwargs": {"env": env}},
|
||||
{"name": "setsid", "cmd": ["setsid", "steam", "-silent"], "kwargs": {"env": env}},
|
||||
{"name": "nohup", "cmd": ["nohup", "steam", "-silent"], "kwargs": {"preexec_fn": os.setpgrp, "env": env}}
|
||||
{"name": "Popen", "cmd": ["steam", "-foreground"], "kwargs": {"stdout": subprocess.DEVNULL, "stderr": subprocess.DEVNULL, "stdin": subprocess.DEVNULL, "start_new_session": True, "env": env}},
|
||||
{"name": "setsid", "cmd": ["setsid", "steam", "-foreground"], "kwargs": {"stdout": subprocess.DEVNULL, "stderr": subprocess.DEVNULL, "stdin": subprocess.DEVNULL, "env": env}},
|
||||
{"name": "nohup", "cmd": ["nohup", "steam", "-foreground"], "kwargs": {"stdout": subprocess.DEVNULL, "stderr": subprocess.DEVNULL, "stdin": subprocess.DEVNULL, "start_new_session": True, "preexec_fn": os.setpgrp, "env": env}}
|
||||
]
|
||||
|
||||
for method in start_methods:
|
||||
@@ -201,36 +325,48 @@ def start_steam() -> bool:
|
||||
logger.error(f"Error starting Steam: {e}")
|
||||
return False
|
||||
|
||||
def robust_steam_restart(progress_callback: Optional[Callable[[str], None]] = None, timeout: int = 60) -> bool:
|
||||
def robust_steam_restart(progress_callback: Optional[Callable[[str], None]] = None, timeout: int = 60, system_info=None) -> bool:
|
||||
"""
|
||||
Robustly restart Steam across all distros. Returns True on success, False on failure.
|
||||
Optionally accepts a progress_callback(message: str) for UI feedback.
|
||||
Uses aggressive pkill approach for maximum reliability.
|
||||
|
||||
Args:
|
||||
progress_callback: Optional callback for progress updates
|
||||
timeout: Timeout in seconds for restart operation
|
||||
system_info: Optional SystemInfo object with pre-detected Steam installation types
|
||||
"""
|
||||
env = _get_clean_subprocess_env()
|
||||
|
||||
shutdown_env = _get_clean_subprocess_env()
|
||||
strategy = _get_restart_strategy()
|
||||
start_env = shutdown_env if strategy == STRATEGY_JACKIFY else os.environ.copy()
|
||||
|
||||
# Use cached detection from system_info if available, otherwise detect
|
||||
_is_steam_deck = system_info.is_steamdeck if system_info else is_steam_deck()
|
||||
_is_flatpak = system_info.is_flatpak_steam if system_info else is_flatpak_steam()
|
||||
|
||||
def report(msg):
|
||||
logger.info(msg)
|
||||
if progress_callback:
|
||||
progress_callback(msg)
|
||||
|
||||
report("Shutting down Steam...")
|
||||
report(f"Steam restart strategy: {_strategy_label(strategy)}")
|
||||
|
||||
# Steam Deck: Use systemctl for shutdown (special handling) - HIGHEST PRIORITY
|
||||
if is_steam_deck():
|
||||
if _is_steam_deck:
|
||||
try:
|
||||
report("Steam Deck detected - using systemctl shutdown...")
|
||||
subprocess.run(['systemctl', '--user', 'stop', 'app-steam@autostart.service'],
|
||||
timeout=15, check=False, capture_output=True, env=env)
|
||||
timeout=15, check=False, capture_output=True, env=shutdown_env)
|
||||
time.sleep(2)
|
||||
except Exception as e:
|
||||
logger.debug(f"systemctl stop failed on Steam Deck: {e}")
|
||||
# Flatpak Steam: Use flatpak kill command (only if not Steam Deck)
|
||||
elif is_flatpak_steam():
|
||||
elif _is_flatpak:
|
||||
try:
|
||||
report("Flatpak Steam detected - stopping via flatpak...")
|
||||
subprocess.run(['flatpak', 'kill', 'com.valvesoftware.Steam'],
|
||||
timeout=15, check=False, capture_output=True, stderr=subprocess.DEVNULL, env=env)
|
||||
timeout=15, check=False, capture_output=True, stderr=subprocess.DEVNULL, env=shutdown_env)
|
||||
time.sleep(2)
|
||||
except Exception as e:
|
||||
logger.debug(f"flatpak kill failed: {e}")
|
||||
@@ -238,21 +374,21 @@ def robust_steam_restart(progress_callback: Optional[Callable[[str], None]] = No
|
||||
# All systems: Use pkill approach (proven 15/16 test success rate)
|
||||
try:
|
||||
# Skip unreliable steam -shutdown, go straight to pkill
|
||||
pkill_result = subprocess.run(['pkill', 'steam'], timeout=15, check=False, capture_output=True, env=env)
|
||||
pkill_result = subprocess.run(['pkill', 'steam'], timeout=15, check=False, capture_output=True, env=shutdown_env)
|
||||
logger.debug(f"pkill steam result: {pkill_result.returncode}")
|
||||
time.sleep(2)
|
||||
|
||||
# Check if Steam is still running
|
||||
check_result = subprocess.run(['pgrep', '-f', 'steamwebhelper'], capture_output=True, timeout=10, env=env)
|
||||
check_result = subprocess.run(['pgrep', '-f', 'steamwebhelper'], capture_output=True, timeout=10, env=shutdown_env)
|
||||
if check_result.returncode == 0:
|
||||
# Force kill if still running
|
||||
report("Steam still running - force terminating...")
|
||||
force_result = subprocess.run(['pkill', '-9', 'steam'], timeout=15, check=False, capture_output=True, env=env)
|
||||
force_result = subprocess.run(['pkill', '-9', 'steam'], timeout=15, check=False, capture_output=True, env=shutdown_env)
|
||||
logger.debug(f"pkill -9 steam result: {force_result.returncode}")
|
||||
time.sleep(2)
|
||||
|
||||
# Final check
|
||||
final_check = subprocess.run(['pgrep', '-f', 'steamwebhelper'], capture_output=True, timeout=10, env=env)
|
||||
final_check = subprocess.run(['pgrep', '-f', 'steamwebhelper'], capture_output=True, timeout=10, env=shutdown_env)
|
||||
if final_check.returncode != 0:
|
||||
logger.info("Steam processes successfully force terminated.")
|
||||
else:
|
||||
@@ -269,19 +405,24 @@ def robust_steam_restart(progress_callback: Optional[Callable[[str], None]] = No
|
||||
|
||||
# Start Steam using platform-specific logic
|
||||
report("Starting Steam...")
|
||||
|
||||
|
||||
# Steam Deck: Use systemctl restart (keep existing working approach)
|
||||
if is_steam_deck():
|
||||
if _is_steam_deck:
|
||||
try:
|
||||
subprocess.Popen(["systemctl", "--user", "restart", "app-steam@autostart.service"], env=env)
|
||||
subprocess.Popen(["systemctl", "--user", "restart", "app-steam@autostart.service"], env=start_env)
|
||||
logger.info("Steam Deck: Initiated systemctl restart")
|
||||
except Exception as e:
|
||||
logger.error(f"Steam Deck systemctl restart failed: {e}")
|
||||
report("Failed to restart Steam on Steam Deck.")
|
||||
return False
|
||||
else:
|
||||
# All other distros: Use proven steam -silent method
|
||||
if not start_steam():
|
||||
# All other distros: Use start_steam() which now uses -foreground to ensure GUI opens
|
||||
if not start_steam(
|
||||
is_steamdeck_flag=_is_steam_deck,
|
||||
is_flatpak_flag=_is_flatpak,
|
||||
env_override=start_env,
|
||||
strategy=strategy,
|
||||
):
|
||||
report("Failed to start Steam.")
|
||||
return False
|
||||
|
||||
@@ -294,7 +435,7 @@ def robust_steam_restart(progress_callback: Optional[Callable[[str], None]] = No
|
||||
|
||||
while elapsed_wait < max_startup_wait:
|
||||
try:
|
||||
result = subprocess.run(['pgrep', '-f', 'steam'], capture_output=True, timeout=10, env=env)
|
||||
result = subprocess.run(['pgrep', '-f', 'steam'], capture_output=True, timeout=10, env=start_env)
|
||||
if result.returncode == 0:
|
||||
if not initial_wait_done:
|
||||
logger.info("Steam process detected. Waiting additional time for full initialization...")
|
||||
@@ -302,7 +443,7 @@ def robust_steam_restart(progress_callback: Optional[Callable[[str], None]] = No
|
||||
time.sleep(5)
|
||||
elapsed_wait += 5
|
||||
if initial_wait_done and elapsed_wait >= 15:
|
||||
final_check = subprocess.run(['pgrep', '-f', 'steam'], capture_output=True, timeout=10, env=env)
|
||||
final_check = subprocess.run(['pgrep', '-f', 'steam'], capture_output=True, timeout=10, env=start_env)
|
||||
if final_check.returncode == 0:
|
||||
report("Steam started successfully.")
|
||||
logger.info("Steam confirmed running after wait.")
|
||||
|
||||
Reference in New Issue
Block a user