Sync from development - prepare for v0.1.1

This commit is contained in:
Omni
2025-09-15 20:18:13 +01:00
parent 0b6e32beac
commit 70b18004e1
64 changed files with 5142 additions and 1164 deletions

View File

@@ -168,7 +168,7 @@ def main():
print(f"Error: {diagnosis['error']}")
return
print(f"\n📊 Diagnosis Results:")
print(f"\nDiagnosis Results:")
print(f" Average CPU: {diagnosis['avg_cpu']:.1f}% (Range: {diagnosis['min_cpu']:.1f}% - {diagnosis['max_cpu']:.1f}%)")
print(f" Memory usage: {diagnosis['avg_memory_mb']:.1f}MB (Peak: {diagnosis['max_memory_mb']:.1f}MB)")
print(f" Low CPU samples: {diagnosis['low_cpu_samples']}/{diagnosis['samples']} "

View File

@@ -761,7 +761,14 @@ class ModlistHandler:
# Conditionally update binary and working directory paths
# Skip for jackify-engine workflows since paths are already correct
if not getattr(self, 'engine_installed', False):
steam_libraries = [self.steam_library] if self.steam_library else None
# Convert steamapps/common path to library root path
steam_libraries = None
if self.steam_library:
# self.steam_library is steamapps/common, need to go up 2 levels to get library root
steam_library_root = Path(self.steam_library).parent.parent
steam_libraries = [steam_library_root]
self.logger.debug(f"Using Steam library root: {steam_library_root}")
if not self.path_handler.edit_binary_working_paths(
modlist_ini_path=modlist_ini_path_obj,
modlist_dir_path=modlist_dir_path_obj,

View File

@@ -723,13 +723,17 @@ class ModlistInstallCLI:
if chunk == b'\n':
# Complete line - decode and print
line = buffer.decode('utf-8', errors='replace')
print(line, end='')
# Enhance Nexus download errors with modlist context
enhanced_line = self._enhance_nexus_error(line)
print(enhanced_line, end='')
buffer = b''
last_progress_time = time.time()
elif chunk == b'\r':
# Carriage return - decode and print without newline
line = buffer.decode('utf-8', errors='replace')
print(line, end='')
# Enhance Nexus download errors with modlist context
enhanced_line = self._enhance_nexus_error(line)
print(enhanced_line, end='')
sys.stdout.flush()
buffer = b''
last_progress_time = time.time()
@@ -1098,4 +1102,36 @@ class ModlistInstallCLI:
print(f"Nexus API Key: [SET]")
else:
print(f"Nexus API Key: [NOT SET - WILL LIKELY FAIL]")
print(f"{COLOR_INFO}----------------------------------------{COLOR_RESET}")
print(f"{COLOR_INFO}----------------------------------------{COLOR_RESET}")
def _enhance_nexus_error(self, line: str) -> str:
"""
Enhance Nexus download error messages by adding the mod URL for easier troubleshooting.
"""
import re
# Pattern to match Nexus download errors with ModID and FileID
nexus_error_pattern = r"Failed to download '[^']+' from Nexus \(Game: ([^,]+), ModID: (\d+), FileID: \d+\):"
match = re.search(nexus_error_pattern, line)
if match:
game_name = match.group(1)
mod_id = match.group(2)
# Map game names to Nexus URL segments
game_url_map = {
'SkyrimSpecialEdition': 'skyrimspecialedition',
'Skyrim': 'skyrim',
'Fallout4': 'fallout4',
'FalloutNewVegas': 'newvegas',
'Oblivion': 'oblivion',
'Starfield': 'starfield'
}
game_url = game_url_map.get(game_name, game_name.lower())
mod_url = f"https://www.nexusmods.com/{game_url}/mods/{mod_id}"
# Add URL on next line for easier debugging
return f"{line}\n Nexus URL: {mod_url}"
return line

View File

@@ -815,11 +815,12 @@ class PathHandler:
subpath = value_part[idx:].lstrip('/')
correct_steam_lib = None
for lib in steam_libraries:
if (lib / subpath.split('/')[2]).exists():
correct_steam_lib = lib.parent
# Check if the actual game folder exists in this library
if len(subpath.split('/')) > 3 and (lib / subpath.split('/')[2] / subpath.split('/')[3]).exists():
correct_steam_lib = lib
break
if not correct_steam_lib and steam_libraries:
correct_steam_lib = steam_libraries[0].parent
correct_steam_lib = steam_libraries[0]
if correct_steam_lib:
new_binary_path = f"{drive_prefix}/{correct_steam_lib}/{subpath}".replace('\\', '/').replace('//', '/')
else:

View File

@@ -1,141 +0,0 @@
import os
import sys
import json
import requests
import shutil
import tempfile
import time
from pathlib import Path
GITHUB_OWNER = "Omni-guides"
GITHUB_REPO = "Jackify"
ASSET_NAME = "jackify"
CONFIG_DIR = os.path.expanduser("~/.config/jackify")
TOKEN_PATH = os.path.join(CONFIG_DIR, "github_token")
LAST_CHECK_PATH = os.path.join(CONFIG_DIR, "last_update_check.json")
THROTTLE_HOURS = 6
def get_github_token():
if os.path.exists(TOKEN_PATH):
with open(TOKEN_PATH, "r") as f:
return f.read().strip()
return None
def get_latest_release_info():
url = f"https://api.github.com/repos/{GITHUB_OWNER}/{GITHUB_REPO}/releases/latest"
headers = {}
token = get_github_token()
if token:
headers["Authorization"] = f"token {token}"
resp = requests.get(url, headers=headers, verify=True)
if resp.status_code == 200:
return resp.json()
else:
raise RuntimeError(f"Failed to fetch release info: {resp.status_code} {resp.text}")
def get_current_version():
# This should match however Jackify stores its version
try:
from jackify import __version__
return __version__
except ImportError:
return None
def should_check_for_update():
try:
if os.path.exists(LAST_CHECK_PATH):
with open(LAST_CHECK_PATH, "r") as f:
data = json.load(f)
last_check = data.get("last_check", 0)
now = int(time.time())
if now - last_check < THROTTLE_HOURS * 3600:
return False
return True
except Exception as e:
print(f"[WARN] Could not read last update check timestamp: {e}")
return True
def record_update_check():
try:
with open(LAST_CHECK_PATH, "w") as f:
json.dump({"last_check": int(time.time())}, f)
except Exception as e:
print(f"[WARN] Could not write last update check timestamp: {e}")
def check_for_update():
if not should_check_for_update():
return False, None, None
try:
release = get_latest_release_info()
latest_version = release["tag_name"].lstrip("v")
current_version = get_current_version()
if current_version is None:
print("[WARN] Could not determine current version.")
record_update_check()
return False, None, None
if latest_version > current_version:
record_update_check()
return True, latest_version, release
record_update_check()
return False, latest_version, release
except Exception as e:
print(f"[ERROR] Update check failed: {e}")
record_update_check()
return False, None, None
def download_latest_asset(release):
token = get_github_token()
headers = {"Accept": "application/octet-stream"}
if token:
headers["Authorization"] = f"token {token}"
for asset in release["assets"]:
if asset["name"] == ASSET_NAME:
download_url = asset["url"]
resp = requests.get(download_url, headers=headers, stream=True, verify=True)
if resp.status_code == 200:
return resp.content
else:
raise RuntimeError(f"Failed to download asset: {resp.status_code} {resp.text}")
raise RuntimeError(f"Asset '{ASSET_NAME}' not found in release.")
def replace_current_binary(new_binary_bytes):
current_exe = os.path.realpath(sys.argv[0])
backup_path = current_exe + ".bak"
try:
# Write to a temp file first
with tempfile.NamedTemporaryFile(delete=False, dir=os.path.dirname(current_exe)) as tmpf:
tmpf.write(new_binary_bytes)
tmp_path = tmpf.name
# Backup current binary
shutil.copy2(current_exe, backup_path)
# Replace atomically
os.replace(tmp_path, current_exe)
os.chmod(current_exe, 0o755)
print(f"[INFO] Updated binary written to {current_exe}. Backup at {backup_path}.")
return True
except Exception as e:
print(f"[ERROR] Failed to replace binary: {e}")
return False
def main():
if '--update' in sys.argv:
print("Checking for updates...")
update_available, latest_version, release = check_for_update()
if update_available:
print(f"A new version (v{latest_version}) is available. Downloading...")
try:
new_bin = download_latest_asset(release)
if replace_current_binary(new_bin):
print("Update complete! Please restart Jackify.")
else:
print("Update failed during binary replacement.")
except Exception as e:
print(f"[ERROR] Update failed: {e}")
else:
print("You are already running the latest version.")
sys.exit(0)
# For direct CLI testing
if __name__ == "__main__":
main()

View File

@@ -101,7 +101,7 @@ class AutomatedPrefixService:
logger.info(f" Native Steam service created shortcut successfully with AppID: {app_id}")
return True, app_id
else:
logger.error("Native Steam service failed to create shortcut")
logger.error("Native Steam service failed to create shortcut")
return False, None
except Exception as e:
@@ -471,7 +471,7 @@ exit"""
logger.warning(f"Error running protontricks -l on attempt {i+1}: {e}")
time.sleep(1)
logger.error(f"Shortcut '{shortcut_name}' not found in protontricks after 30 seconds")
logger.error(f"Shortcut '{shortcut_name}' not found in protontricks after 30 seconds")
return None
except Exception as e:
@@ -939,7 +939,7 @@ echo Prefix creation complete.
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.TimeoutExpired):
continue
logger.info(" No more processes to kill")
logger.info("No more processes to kill")
return True
except Exception as e:
@@ -1296,7 +1296,7 @@ echo Prefix creation complete.
time.sleep(1)
logger.warning(f"Timeout waiting for prefix completion after {timeout} seconds")
logger.warning(f"Timeout waiting for prefix completion after {timeout} seconds")
return False
except Exception as e:
@@ -1356,7 +1356,7 @@ echo Prefix creation complete.
if killed_count > 0:
logger.info(f" Killed {killed_count} ModOrganizer processes")
else:
logger.warning("No ModOrganizer processes found to kill")
logger.warning("No ModOrganizer processes found to kill")
return killed_count
@@ -1624,11 +1624,11 @@ echo Prefix creation complete.
return True
logger.error(f"Shortcut '{shortcut_name}' not found for CompatTool setting")
logger.error(f"Shortcut '{shortcut_name}' not found for CompatTool setting")
return False
except Exception as e:
logger.error(f"Error setting CompatTool on shortcut: {e}")
logger.error(f"Error setting CompatTool on shortcut: {e}")
return False
def _set_proton_on_shortcut(self, shortcut_name: str) -> bool:
@@ -2633,7 +2633,7 @@ echo Prefix creation complete.
logger.info(f" Proton prefix created at: {pfx}")
return True
else:
logger.warning(f"⚠️ Proton prefix not found at: {pfx}")
logger.warning(f"Proton prefix not found at: {pfx}")
return False
except subprocess.TimeoutExpired:
@@ -2735,7 +2735,7 @@ echo Prefix creation complete.
logger.info(" Compatibility tool persists")
return True
else:
logger.warning("⚠️ Compatibility tool not found")
logger.warning("Compatibility tool not found")
return False
except Exception as e:

View File

@@ -228,14 +228,14 @@ class NativeSteamService:
# Write back to file
if self.write_shortcuts_vdf(data):
logger.info(f"Shortcut created successfully at index {next_index}")
logger.info(f"Shortcut created successfully at index {next_index}")
return True, unsigned_app_id
else:
logger.error("Failed to write shortcut to VDF")
logger.error("Failed to write shortcut to VDF")
return False, None
except Exception as e:
logger.error(f"Error creating shortcut: {e}")
logger.error(f"Error creating shortcut: {e}")
return False, None
def set_proton_version(self, app_id: int, proton_version: str = "proton_experimental") -> bool:
@@ -320,11 +320,11 @@ class NativeSteamService:
with open(config_path, 'w', encoding='utf-8') as f:
f.write(new_config_text)
logger.info(f"Successfully set Proton version '{proton_version}' for AppID {app_id} using config.vdf only (steam-conductor method)")
logger.info(f"Successfully set Proton version '{proton_version}' for AppID {app_id} using config.vdf only (steam-conductor method)")
return True
except Exception as e:
logger.error(f"Error setting Proton version: {e}")
logger.error(f"Error setting Proton version: {e}")
return False
def create_shortcut_with_proton(self, app_name: str, exe_path: str, start_dir: str = None,
@@ -351,7 +351,7 @@ class NativeSteamService:
logger.error("Failed to set Proton version (shortcut still created)")
return False, app_id # Shortcut exists but Proton setting failed
logger.info(f"Complete workflow successful: '{app_name}' with '{proton_version}'")
logger.info(f"Complete workflow successful: '{app_name}' with '{proton_version}'")
return True, app_id
def list_shortcuts(self) -> Dict[str, str]:
@@ -388,12 +388,12 @@ class NativeSteamService:
# Write back
if self.write_shortcuts_vdf(data):
logger.info(f"Removed shortcut '{app_name}'")
logger.info(f"Removed shortcut '{app_name}'")
return True
else:
logger.error("Failed to write updated shortcuts")
logger.error("Failed to write updated shortcuts")
return False
except Exception as e:
logger.error(f"Error removing shortcut: {e}")
logger.error(f"Error removing shortcut: {e}")
return False

View File

@@ -33,6 +33,7 @@ class UpdateInfo:
download_url: str
file_size: Optional[int] = None
is_critical: bool = False
is_delta_update: bool = False
class UpdateService:
@@ -72,24 +73,44 @@ class UpdateService:
latest_version = release_data['tag_name'].lstrip('v')
if self._is_newer_version(latest_version):
# Find AppImage asset
# Check if this version was skipped
if self._is_version_skipped(latest_version):
logger.debug(f"Version {latest_version} was skipped by user")
return None
# Find AppImage asset (prefer delta update if available)
download_url = None
file_size = None
# Look for delta update first (smaller download)
for asset in release_data.get('assets', []):
if asset['name'].endswith('.AppImage'):
if asset['name'].endswith('.AppImage.delta') or 'delta' in asset['name'].lower():
download_url = asset['browser_download_url']
file_size = asset['size']
logger.debug(f"Found delta update: {asset['name']} ({file_size} bytes)")
break
# Fallback to full AppImage if no delta available
if not download_url:
for asset in release_data.get('assets', []):
if asset['name'].endswith('.AppImage'):
download_url = asset['browser_download_url']
file_size = asset['size']
logger.debug(f"Found full AppImage: {asset['name']} ({file_size} bytes)")
break
if download_url:
# Determine if this is a delta update
is_delta = '.delta' in download_url or 'delta' in download_url.lower()
return UpdateInfo(
version=latest_version,
tag_name=release_data['tag_name'],
release_date=release_data['published_at'],
changelog=release_data.get('body', ''),
download_url=download_url,
file_size=file_size
file_size=file_size,
is_delta_update=is_delta
)
else:
logger.warning(f"No AppImage found in release {latest_version}")
@@ -123,6 +144,25 @@ class UpdateService:
logger.warning(f"Could not parse version: {version}")
return False
def _is_version_skipped(self, version: str) -> bool:
"""
Check if a version was skipped by the user.
Args:
version: Version to check
Returns:
bool: True if version was skipped, False otherwise
"""
try:
from ...backend.handlers.config_handler import ConfigHandler
config_handler = ConfigHandler()
skipped_versions = config_handler.get('skipped_versions', [])
return version in skipped_versions
except Exception as e:
logger.warning(f"Error checking skipped versions: {e}")
return False
def check_for_updates_async(self, callback: Callable[[Optional[UpdateInfo]], None]) -> None:
"""
Check for updates in background thread.
@@ -152,16 +192,25 @@ class UpdateService:
logger.debug("Not running as AppImage - updates not supported")
return False
appimage_path = get_appimage_path()
if not appimage_path:
logger.debug("AppImage path validation failed - updates not supported")
return False
if not can_self_update():
logger.debug("Cannot write to AppImage - updates not possible")
return False
logger.debug(f"Self-updating enabled for AppImage: {appimage_path}")
return True
def download_update(self, update_info: UpdateInfo,
progress_callback: Optional[Callable[[int, int], None]] = None) -> Optional[Path]:
"""
Download update to temporary location.
Download update using full AppImage replacement.
Since we can't rely on external tools being available, we use a reliable
full replacement approach that works on all systems without dependencies.
Args:
update_info: Information about the update to download
@@ -171,7 +220,27 @@ class UpdateService:
Path to downloaded file, or None if download failed
"""
try:
logger.info(f"Downloading update {update_info.version} from {update_info.download_url}")
logger.info(f"Downloading update {update_info.version} (full replacement)")
return self._download_update_manual(update_info, progress_callback)
except Exception as e:
logger.error(f"Failed to download update: {e}")
return None
def _download_update_manual(self, update_info: UpdateInfo,
progress_callback: Optional[Callable[[int, int], None]] = None) -> Optional[Path]:
"""
Fallback manual download method.
Args:
update_info: Information about the update to download
progress_callback: Optional callback for download progress
Returns:
Path to downloaded file, or None if download failed
"""
try:
logger.info(f"Manual download of update {update_info.version} from {update_info.download_url}")
response = requests.get(update_info.download_url, stream=True)
response.raise_for_status()
@@ -179,11 +248,12 @@ class UpdateService:
total_size = int(response.headers.get('content-length', 0))
downloaded_size = 0
# Create temporary file
temp_dir = Path(tempfile.gettempdir()) / "jackify_updates"
temp_dir.mkdir(exist_ok=True)
# Create update directory in user's home directory
home_dir = Path.home()
update_dir = home_dir / "Jackify" / "updates"
update_dir.mkdir(parents=True, exist_ok=True)
temp_file = temp_dir / f"Jackify-{update_info.version}.AppImage"
temp_file = update_dir / f"Jackify-{update_info.version}.AppImage"
with open(temp_file, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
@@ -197,11 +267,11 @@ class UpdateService:
# Make executable
temp_file.chmod(0o755)
logger.info(f"Update downloaded successfully to {temp_file}")
logger.info(f"Manual update downloaded successfully to {temp_file}")
return temp_file
except Exception as e:
logger.error(f"Failed to download update: {e}")
logger.error(f"Failed to download update manually: {e}")
return None
def apply_update(self, new_appimage_path: Path) -> bool:
@@ -252,10 +322,12 @@ class UpdateService:
Path to helper script, or None if creation failed
"""
try:
temp_dir = Path(tempfile.gettempdir()) / "jackify_updates"
temp_dir.mkdir(exist_ok=True)
# Create update directory in user's home directory
home_dir = Path.home()
update_dir = home_dir / "Jackify" / "updates"
update_dir.mkdir(parents=True, exist_ok=True)
helper_script = temp_dir / "update_helper.sh"
helper_script = update_dir / "update_helper.sh"
script_content = f'''#!/bin/bash
# Jackify Update Helper Script