mirror of
https://github.com/Omni-guides/Jackify.git
synced 2026-01-17 19:47:00 +01:00
Initial public release v0.1.0 - Linux Wabbajack Modlist Application
Jackify provides native Linux support for Wabbajack modlist installation and management with automated Steam integration and Proton configuration. Key Features: - Almost Native Linux implementation (texconv.exe run via proton) - Automated Steam shortcut creation and Proton prefix management - Both CLI and GUI interfaces, with Steam Deck optimization Supported Games: - Skyrim Special Edition - Fallout 4 - Fallout New Vegas - Oblivion, Starfield, Enderal, and diverse other games Technical Architecture: - Clean separation between frontend and backend services - Powered by jackify-engine 0.3.x for Wabbajack-matching modlist installation
This commit is contained in:
5
jackify/shared/__init__.py
Normal file
5
jackify/shared/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""
|
||||
Shared Utilities
|
||||
|
||||
Common utilities used by both CLI and GUI frontends.
|
||||
"""
|
||||
16
jackify/shared/colors.py
Normal file
16
jackify/shared/colors.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
UI Color Constants
|
||||
"""
|
||||
|
||||
COLOR_PROMPT = '\033[93m' # Yellow
|
||||
COLOR_SELECTION = '\033[96m' # Cyan
|
||||
COLOR_RESET = '\033[0m'
|
||||
COLOR_INFO = '\033[94m' # Blue
|
||||
COLOR_ERROR = '\033[91m' # Red
|
||||
COLOR_SUCCESS = '\033[92m' # Green
|
||||
COLOR_WARNING = '\033[93m' # Yellow (reusing prompt color)
|
||||
COLOR_DISABLED = '\033[90m' # Grey
|
||||
|
||||
COLOR_ACTION = '\033[97m' # Bright White for action/descriptions
|
||||
COLOR_INPUT = '\033[97m' # Bright White for input prompts
|
||||
201
jackify/shared/logging.py
Normal file
201
jackify/shared/logging.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
LoggingHandler module for managing logging operations.
|
||||
This module handles log file creation, rotation, and management.
|
||||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
import logging.handlers
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, List
|
||||
from datetime import datetime
|
||||
import shutil
|
||||
|
||||
class LoggingHandler:
|
||||
"""
|
||||
Central logging handler for Jackify.
|
||||
- Uses ~/Jackify/logs/ as the log directory.
|
||||
- Supports per-function log files (e.g., jackify-install-wabbajack.log).
|
||||
- Handles log rotation and log directory creation.
|
||||
Usage:
|
||||
logger = LoggingHandler().setup_logger('install_wabbajack', 'jackify-install-wabbajack.log')
|
||||
"""
|
||||
def __init__(self):
|
||||
self.log_dir = Path.home() / "Jackify" / "logs"
|
||||
self.ensure_log_directory()
|
||||
|
||||
def ensure_log_directory(self) -> None:
|
||||
"""Ensure the log directory exists."""
|
||||
try:
|
||||
self.log_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as e:
|
||||
print(f"Failed to create log directory: {e}")
|
||||
|
||||
def rotate_log_file_per_run(self, log_file_path: Path, backup_count: int = 5):
|
||||
"""Rotate the log file on every run, keeping up to backup_count backups."""
|
||||
if log_file_path.exists():
|
||||
# Remove the oldest backup if it exists
|
||||
oldest = log_file_path.with_suffix(log_file_path.suffix + f'.{backup_count}')
|
||||
if oldest.exists():
|
||||
oldest.unlink()
|
||||
# Shift backups
|
||||
for i in range(backup_count - 1, 0, -1):
|
||||
src = log_file_path.with_suffix(log_file_path.suffix + f'.{i}')
|
||||
dst = log_file_path.with_suffix(log_file_path.suffix + f'.{i+1}')
|
||||
if src.exists():
|
||||
src.rename(dst)
|
||||
# Move current log to .1
|
||||
log_file_path.rename(log_file_path.with_suffix(log_file_path.suffix + '.1'))
|
||||
|
||||
def rotate_log_for_logger(self, name: str, log_file: Optional[str] = None, backup_count: int = 5):
|
||||
"""
|
||||
Rotate the log file for a logger before any logging occurs.
|
||||
Must be called BEFORE any log is written or file handler is attached.
|
||||
"""
|
||||
file_path = self.log_dir / (log_file if log_file else "jackify-cli.log")
|
||||
self.rotate_log_file_per_run(file_path, backup_count=backup_count)
|
||||
|
||||
def setup_logger(self, name: str, log_file: Optional[str] = None, is_general: bool = False) -> logging.Logger:
|
||||
"""Set up a logger with file and console handlers. Call rotate_log_for_logger before this if you want per-run rotation."""
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.propagate = False
|
||||
|
||||
# Create formatters
|
||||
file_formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
console_formatter = logging.Formatter(
|
||||
'%(levelname)s: %(message)s'
|
||||
)
|
||||
|
||||
# Add console handler - check debug mode from config
|
||||
console_handler = logging.StreamHandler()
|
||||
|
||||
# Check if debug mode is enabled
|
||||
try:
|
||||
from jackify.backend.handlers.config_handler import ConfigHandler
|
||||
config_handler = ConfigHandler()
|
||||
debug_mode = config_handler.get('debug_mode', False)
|
||||
if debug_mode:
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
else:
|
||||
console_handler.setLevel(logging.ERROR)
|
||||
except Exception:
|
||||
# Fallback to ERROR level if config can't be loaded
|
||||
console_handler.setLevel(logging.ERROR)
|
||||
console_handler.setFormatter(console_formatter)
|
||||
if not any(isinstance(h, logging.StreamHandler) for h in logger.handlers):
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# Add file handler if log_file is specified, or use default for general
|
||||
if log_file or is_general:
|
||||
file_path = self.log_dir / (log_file if log_file else "jackify-cli.log")
|
||||
file_handler = logging.handlers.RotatingFileHandler(
|
||||
file_path, mode='a', encoding='utf-8', maxBytes=1024*1024, backupCount=5
|
||||
)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(file_formatter)
|
||||
if not any(isinstance(h, logging.handlers.RotatingFileHandler) and getattr(h, 'baseFilename', None) == str(file_path) for h in logger.handlers):
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
return logger
|
||||
|
||||
def rotate_logs(self, max_bytes: int = 1024 * 1024, backup_count: int = 5) -> None:
|
||||
"""Rotate log files based on size."""
|
||||
for log_file in self.get_log_files():
|
||||
try:
|
||||
if log_file.stat().st_size > max_bytes:
|
||||
# Create backup
|
||||
backup_path = log_file.with_suffix(f'.{datetime.now().strftime("%Y%m%d_%H%M%S")}.log')
|
||||
log_file.rename(backup_path)
|
||||
|
||||
# Clean up old backups
|
||||
backups = sorted(log_file.parent.glob(f"{log_file.stem}.*.log"))
|
||||
if len(backups) > backup_count:
|
||||
for old_backup in backups[:-backup_count]:
|
||||
old_backup.unlink()
|
||||
except Exception as e:
|
||||
print(f"Failed to rotate log file {log_file}: {e}")
|
||||
|
||||
def cleanup_old_logs(self, days: int = 30) -> None:
|
||||
"""Clean up log files older than specified days."""
|
||||
cutoff = datetime.now().timestamp() - (days * 24 * 60 * 60)
|
||||
for log_file in self.get_log_files():
|
||||
try:
|
||||
if log_file.stat().st_mtime < cutoff:
|
||||
log_file.unlink()
|
||||
except Exception as e:
|
||||
print(f"Failed to clean up log file {log_file}: {e}")
|
||||
|
||||
def get_log_files(self) -> List[Path]:
|
||||
"""Get a list of all log files."""
|
||||
return list(self.log_dir.glob("*.log"))
|
||||
|
||||
def get_log_content(self, log_file: Path, lines: int = 100) -> List[str]:
|
||||
"""Get the last N lines of a log file."""
|
||||
try:
|
||||
with open(log_file, 'r') as f:
|
||||
return f.readlines()[-lines:]
|
||||
except Exception as e:
|
||||
print(f"Failed to read log file {log_file}: {e}")
|
||||
return []
|
||||
|
||||
def search_logs(self, pattern: str) -> Dict[Path, List[str]]:
|
||||
"""Search all log files for a pattern."""
|
||||
results = {}
|
||||
for log_file in self.get_log_files():
|
||||
try:
|
||||
with open(log_file, 'r') as f:
|
||||
matches = [line for line in f if pattern in line]
|
||||
if matches:
|
||||
results[log_file] = matches
|
||||
except Exception as e:
|
||||
print(f"Failed to search log file {log_file}: {e}")
|
||||
return results
|
||||
|
||||
def export_logs(self, output_dir: Path) -> bool:
|
||||
"""Export all logs to a directory."""
|
||||
try:
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
for log_file in self.get_log_files():
|
||||
shutil.copy2(log_file, output_dir / log_file.name)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Failed to export logs: {e}")
|
||||
return False
|
||||
|
||||
def set_log_level(self, level: int) -> None:
|
||||
"""Set the logging level for all loggers."""
|
||||
for logger_name in logging.root.manager.loggerDict:
|
||||
logger = logging.getLogger(logger_name)
|
||||
logger.setLevel(level)
|
||||
|
||||
def get_log_stats(self) -> Dict:
|
||||
"""Get statistics about log files."""
|
||||
stats = {
|
||||
'total_files': 0,
|
||||
'total_size': 0,
|
||||
'largest_file': None,
|
||||
'oldest_file': None,
|
||||
'newest_file': None
|
||||
}
|
||||
|
||||
try:
|
||||
log_files = self.get_log_files()
|
||||
stats['total_files'] = len(log_files)
|
||||
|
||||
if log_files:
|
||||
stats['total_size'] = sum(f.stat().st_size for f in log_files)
|
||||
stats['largest_file'] = max(log_files, key=lambda x: x.stat().st_size)
|
||||
stats['oldest_file'] = min(log_files, key=lambda x: x.stat().st_mtime)
|
||||
stats['newest_file'] = max(log_files, key=lambda x: x.stat().st_mtime)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Failed to get log stats: {e}")
|
||||
|
||||
return stats
|
||||
|
||||
def get_general_logger(self):
|
||||
"""Get the general CLI logger (~/Jackify/logs/jackify-cli.log)."""
|
||||
return self.setup_logger('jackify_cli', is_general=True)
|
||||
958
jackify/shared/paths.py
Normal file
958
jackify/shared/paths.py
Normal file
@@ -0,0 +1,958 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Path Handler Module
|
||||
Handles path-related operations for ModOrganizer.ini and other configuration files
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union, Dict, Any, List, Tuple
|
||||
from datetime import datetime
|
||||
|
||||
# Initialize logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# --- Configuration (Adapted from Proposal) ---
|
||||
# Define known script extender executables (lowercase for comparisons)
|
||||
TARGET_EXECUTABLES_LOWER = ["skse64_loader.exe", "f4se_loader.exe", "nvse_loader.exe", "obse_loader.exe", "falloutnv.exe"]
|
||||
# Define known stock game folder names (case-sensitive, as they appear on disk)
|
||||
STOCK_GAME_FOLDERS = ["Stock Game", "Game Root", "Stock Folder", "Skyrim Stock"]
|
||||
# Define the SD card path prefix on Steam Deck/Linux
|
||||
SDCARD_PREFIX = '/run/media/mmcblk0p1/'
|
||||
|
||||
class PathHandler:
|
||||
"""
|
||||
Handles path-related operations for ModOrganizer.ini and other configuration files
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _strip_sdcard_path_prefix(path_obj: Path) -> str:
|
||||
"""
|
||||
Removes the '/run/media/mmcblk0p1/' prefix if present.
|
||||
Returns the path as a POSIX-style string (using /).
|
||||
"""
|
||||
path_str = path_obj.as_posix() # Work with consistent forward slashes
|
||||
if path_str.lower().startswith(SDCARD_PREFIX.lower()):
|
||||
# Return the part *after* the prefix, ensuring no leading slash remains unless root
|
||||
relative_part = path_str[len(SDCARD_PREFIX):]
|
||||
return relative_part if relative_part else "." # Return '.' if it was exactly the prefix
|
||||
return path_str
|
||||
|
||||
@staticmethod
|
||||
def update_mo2_ini_paths(
|
||||
modlist_ini_path: Path,
|
||||
modlist_dir_path: Path,
|
||||
modlist_sdcard: bool,
|
||||
steam_library_common_path: Optional[Path] = None,
|
||||
basegame_dir_name: Optional[str] = None,
|
||||
basegame_sdcard: bool = False # Default to False if not provided
|
||||
) -> bool:
|
||||
logger.info(f"[DEBUG] update_mo2_ini_paths called with: modlist_ini_path={modlist_ini_path}, modlist_dir_path={modlist_dir_path}, modlist_sdcard={modlist_sdcard}, steam_library_common_path={steam_library_common_path}, basegame_dir_name={basegame_dir_name}, basegame_sdcard={basegame_sdcard}")
|
||||
if not modlist_ini_path.is_file():
|
||||
logger.error(f"ModOrganizer.ini not found at specified path: {modlist_ini_path}")
|
||||
# Attempt to create a minimal INI
|
||||
try:
|
||||
logger.warning("Creating minimal ModOrganizer.ini with [General] section.")
|
||||
with open(modlist_ini_path, 'w', encoding='utf-8') as f:
|
||||
f.write('[General]\n')
|
||||
# Continue as if file existed
|
||||
except Exception as e:
|
||||
logger.critical(f"Failed to create minimal ModOrganizer.ini: {e}")
|
||||
return False
|
||||
if not modlist_dir_path.is_dir():
|
||||
logger.error(f"Modlist directory not found or not a directory: {modlist_dir_path}")
|
||||
# Warn but continue
|
||||
|
||||
# --- Bulletproof game directory detection ---
|
||||
# 1. Get all Steam libraries and log them
|
||||
all_steam_libraries = PathHandler.get_all_steam_library_paths()
|
||||
logger.info(f"[DEBUG] Detected Steam libraries: {all_steam_libraries}")
|
||||
import sys
|
||||
if hasattr(sys, 'argv') and any(arg in ('--debug', '-d') for arg in sys.argv):
|
||||
# Debug logging for Steam libraries detection - use logger if available
|
||||
if hasattr(globals(), 'logger') and logger:
|
||||
logger.debug(f"Detected Steam libraries: {all_steam_libraries}")
|
||||
# If no logger available, this debug info is not critical for user operation
|
||||
|
||||
# 2. For each library, check for the canonical vanilla game directory
|
||||
GAME_DIR_NAMES = {
|
||||
"Skyrim Special Edition": "Skyrim Special Edition",
|
||||
"Fallout 4": "Fallout 4",
|
||||
"Fallout New Vegas": "Fallout New Vegas",
|
||||
"Oblivion": "Oblivion"
|
||||
}
|
||||
canonical_name = None
|
||||
if basegame_dir_name and basegame_dir_name in GAME_DIR_NAMES:
|
||||
canonical_name = GAME_DIR_NAMES[basegame_dir_name]
|
||||
elif basegame_dir_name:
|
||||
canonical_name = basegame_dir_name # fallback, but should match above
|
||||
gamepath_target_dir = None
|
||||
gamepath_target_is_sdcard = modlist_sdcard
|
||||
checked_candidates = []
|
||||
if canonical_name:
|
||||
for lib in all_steam_libraries:
|
||||
candidate = lib / "steamapps" / "common" / canonical_name
|
||||
checked_candidates.append(str(candidate))
|
||||
logger.info(f"[DEBUG] Checking for vanilla game directory: {candidate}")
|
||||
if candidate.is_dir():
|
||||
gamepath_target_dir = candidate
|
||||
logger.info(f"Found vanilla game directory: {candidate}")
|
||||
break
|
||||
if not gamepath_target_dir:
|
||||
logger.error(f"Could not find vanilla game directory '{canonical_name}' in any Steam library. Checked: {checked_candidates}")
|
||||
# 4. Prompt the user for the path
|
||||
print("\nCould not automatically detect a Stock Game or vanilla game directory.")
|
||||
print("Please enter the full path to your vanilla game directory (e.g., /path/to/Skyrim Special Edition):")
|
||||
while True:
|
||||
user_input = input("Game directory path: ").strip()
|
||||
user_path = Path(user_input)
|
||||
logger.info(f"[DEBUG] User entered: {user_input}")
|
||||
if user_path.is_dir():
|
||||
exe_candidates = list(user_path.glob('*.exe'))
|
||||
logger.info(f"[DEBUG] .exe files in user path: {exe_candidates}")
|
||||
if exe_candidates:
|
||||
gamepath_target_dir = user_path
|
||||
logger.info(f"User provided valid vanilla game directory: {gamepath_target_dir}")
|
||||
break
|
||||
else:
|
||||
print("Directory exists but does not appear to contain the game executable. Please check and try again.")
|
||||
logger.warning("User path exists but no .exe files found.")
|
||||
else:
|
||||
print("Directory not found. Please enter a valid path.")
|
||||
logger.warning("User path does not exist.")
|
||||
if not gamepath_target_dir:
|
||||
logger.critical("[FATAL] Could not determine a valid target directory for gamePath. Check configuration and paths. Aborting update.")
|
||||
return False
|
||||
|
||||
# 3. Update gamePath, binary, and workingDirectory entries in the INI
|
||||
logger.debug(f"Determined gamePath target directory: {gamepath_target_dir}")
|
||||
logger.debug(f"gamePath target is on SD card: {gamepath_target_is_sdcard}")
|
||||
try:
|
||||
logger.debug(f"Reading original INI file: {modlist_ini_path}")
|
||||
with open(modlist_ini_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
original_lines = f.readlines()
|
||||
|
||||
# --- Find and robustly update gamePath line ---
|
||||
gamepath_line_num = -1
|
||||
general_section_line = -1
|
||||
for i, line in enumerate(original_lines):
|
||||
if re.match(r'^\s*\[General\]\s*$', line, re.IGNORECASE):
|
||||
general_section_line = i
|
||||
if re.match(r'^\s*gamepath\s*=\s*', line, re.IGNORECASE):
|
||||
gamepath_line_num = i
|
||||
break
|
||||
processed_str = PathHandler._strip_sdcard_path_prefix(gamepath_target_dir)
|
||||
windows_style_single = processed_str.replace('/', '\\')
|
||||
gamepath_drive_letter = "D:" if gamepath_target_is_sdcard else "Z:"
|
||||
# Use robust formatter
|
||||
formatted_gamepath = PathHandler._format_gamepath_for_mo2(f'{gamepath_drive_letter}{windows_style_single}')
|
||||
new_gamepath_line = f'gamePath = @ByteArray({formatted_gamepath})\n'
|
||||
if gamepath_line_num != -1:
|
||||
logger.info(f"Updating existing gamePath line: {original_lines[gamepath_line_num].strip()} -> {new_gamepath_line.strip()}")
|
||||
original_lines[gamepath_line_num] = new_gamepath_line
|
||||
else:
|
||||
insert_at = general_section_line + 1 if general_section_line != -1 else 0
|
||||
logger.info(f"Adding missing gamePath line at line {insert_at+1}: {new_gamepath_line.strip()}")
|
||||
original_lines.insert(insert_at, new_gamepath_line)
|
||||
|
||||
# --- Update customExecutables binaries and workingDirectories ---
|
||||
TARGET_EXECUTABLES_LOWER = [
|
||||
"skse64_loader.exe", "f4se_loader.exe", "nvse_loader.exe", "obse_loader.exe", "falloutnv.exe"
|
||||
]
|
||||
in_custom_exec = False
|
||||
for i, line in enumerate(original_lines):
|
||||
if re.match(r'^\s*\[customExecutables\]\s*$', line, re.IGNORECASE):
|
||||
in_custom_exec = True
|
||||
continue
|
||||
if in_custom_exec and re.match(r'^\s*\[.*\]\s*$', line):
|
||||
in_custom_exec = False
|
||||
if in_custom_exec:
|
||||
m = re.match(r'^(\d+)\\binary\s*=\s*(.*)$', line.strip(), re.IGNORECASE)
|
||||
if m:
|
||||
idx, old_path = m.group(1), m.group(2)
|
||||
exe_name = os.path.basename(old_path).lower()
|
||||
if exe_name in TARGET_EXECUTABLES_LOWER:
|
||||
new_path = f'{gamepath_drive_letter}/{PathHandler._strip_sdcard_path_prefix(gamepath_target_dir)}/{exe_name}'
|
||||
# Use robust formatter
|
||||
new_path = PathHandler._format_binary_for_mo2(new_path)
|
||||
logger.info(f"Updating binary for entry {idx}: {old_path} -> {new_path}")
|
||||
original_lines[i] = f'{idx}\\binary = {new_path}\n'
|
||||
m_wd = re.match(r'^(\d+)\\workingDirectory\s*=\s*(.*)$', line.strip(), re.IGNORECASE)
|
||||
if m_wd:
|
||||
idx, old_wd = m_wd.group(1), m_wd.group(2)
|
||||
new_wd = f'{gamepath_drive_letter}{windows_style_single}'
|
||||
# Use robust formatter
|
||||
new_wd = PathHandler._format_workingdir_for_mo2(new_wd)
|
||||
logger.info(f"Updating workingDirectory for entry {idx}: {old_wd} -> {new_wd}")
|
||||
original_lines[i] = f'{idx}\\workingDirectory = {new_wd}\n'
|
||||
|
||||
# --- Backup and Write New INI ---
|
||||
backup_path = modlist_ini_path.with_suffix(f".{datetime.now().strftime('%Y%m%d_%H%M%S')}.bak")
|
||||
try:
|
||||
shutil.copy2(modlist_ini_path, backup_path)
|
||||
logger.info(f"Backed up original INI to: {backup_path}")
|
||||
except Exception as bak_err:
|
||||
logger.error(f"Failed to backup original INI file: {bak_err}")
|
||||
return False
|
||||
try:
|
||||
with open(modlist_ini_path, 'w', encoding='utf-8') as f:
|
||||
f.writelines(original_lines)
|
||||
logger.info(f"Successfully wrote updated paths to {modlist_ini_path}")
|
||||
return True
|
||||
except Exception as write_err:
|
||||
logger.error(f"Failed to write updated INI file {modlist_ini_path}: {write_err}", exc_info=True)
|
||||
logger.error("Attempting to restore from backup...")
|
||||
try:
|
||||
shutil.move(backup_path, modlist_ini_path)
|
||||
logger.info("Successfully restored original INI from backup.")
|
||||
except Exception as restore_err:
|
||||
logger.critical(f"CRITICAL FAILURE: Could not write new INI and failed to restore backup {backup_path}. Manual intervention required at {modlist_ini_path}! Error: {restore_err}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"An unexpected error occurred during INI path update: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def edit_resolution(modlist_ini, resolution):
|
||||
"""
|
||||
Edit resolution settings in ModOrganizer.ini
|
||||
|
||||
Args:
|
||||
modlist_ini (str): Path to ModOrganizer.ini
|
||||
resolution (str): Resolution in the format "1920x1080"
|
||||
|
||||
Returns:
|
||||
bool: True on success, False on failure
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Editing resolution settings to {resolution}...")
|
||||
|
||||
# Parse resolution
|
||||
width, height = resolution.split('x')
|
||||
|
||||
# Read the current ModOrganizer.ini
|
||||
with open(modlist_ini, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Replace width and height settings
|
||||
content = re.sub(r'^width\s*=\s*\d+$', f'width = {width}', content, flags=re.MULTILINE)
|
||||
content = re.sub(r'^height\s*=\s*\d+$', f'height = {height}', content, flags=re.MULTILINE)
|
||||
|
||||
# Write the updated content back to the file
|
||||
with open(modlist_ini, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
logger.info("Resolution settings edited successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error editing resolution settings: {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def create_dxvk_conf(modlist_dir, modlist_sdcard, steam_library, basegame_sdcard, game_var_full):
|
||||
"""
|
||||
Create dxvk.conf file in the appropriate location
|
||||
|
||||
Args:
|
||||
modlist_dir (str): Path to the modlist directory
|
||||
modlist_sdcard (bool): Whether the modlist is on an SD card
|
||||
steam_library (str): Path to the Steam library
|
||||
basegame_sdcard (bool): Whether the base game is on an SD card
|
||||
game_var_full (str): Full name of the game (e.g., "Skyrim Special Edition")
|
||||
|
||||
Returns:
|
||||
bool: True on success, False on failure
|
||||
"""
|
||||
try:
|
||||
logger.info("Creating dxvk.conf file...")
|
||||
|
||||
# Determine the location for dxvk.conf
|
||||
dxvk_conf_path = None
|
||||
|
||||
# Check for common stock game directories
|
||||
stock_game_paths = [
|
||||
os.path.join(modlist_dir, "Stock Game"),
|
||||
os.path.join(modlist_dir, "STOCK GAME"),
|
||||
os.path.join(modlist_dir, "Game Root"),
|
||||
os.path.join(modlist_dir, "Stock Folder"),
|
||||
os.path.join(modlist_dir, "Skyrim Stock"),
|
||||
os.path.join(modlist_dir, "root", "Skyrim Special Edition"),
|
||||
os.path.join(steam_library, game_var_full)
|
||||
]
|
||||
|
||||
for path in stock_game_paths:
|
||||
if os.path.exists(path):
|
||||
dxvk_conf_path = os.path.join(path, "dxvk.conf")
|
||||
break
|
||||
|
||||
if not dxvk_conf_path:
|
||||
logger.error("Could not determine location for dxvk.conf")
|
||||
return False
|
||||
|
||||
# Create dxvk.conf content
|
||||
dxvk_conf_content = "dxvk.enableGraphicsPipelineLibrary = False\n"
|
||||
|
||||
# Write dxvk.conf to the appropriate location
|
||||
with open(dxvk_conf_path, 'w') as f:
|
||||
f.write(dxvk_conf_content)
|
||||
|
||||
logger.info(f"dxvk.conf created successfully at {dxvk_conf_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating dxvk.conf: {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def find_steam_config_vdf() -> Optional[Path]:
|
||||
"""Finds the active Steam config.vdf file."""
|
||||
logger.debug("Searching for Steam config.vdf...")
|
||||
possible_steam_paths = [
|
||||
Path.home() / ".steam/steam",
|
||||
Path.home() / ".local/share/Steam",
|
||||
Path.home() / ".steam/root"
|
||||
]
|
||||
for steam_path in possible_steam_paths:
|
||||
potential_path = steam_path / "config/config.vdf"
|
||||
if potential_path.is_file():
|
||||
logger.info(f"Found config.vdf at: {potential_path}")
|
||||
return potential_path # Return Path object
|
||||
|
||||
logger.warning("Could not locate Steam's config.vdf file in standard locations.")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def find_steam_library() -> Optional[Path]:
|
||||
"""Find the primary Steam library common directory containing games."""
|
||||
logger.debug("Attempting to find Steam library...")
|
||||
|
||||
# Potential locations for libraryfolders.vdf
|
||||
libraryfolders_vdf_paths = [
|
||||
os.path.expanduser("~/.steam/steam/config/libraryfolders.vdf"),
|
||||
os.path.expanduser("~/.local/share/Steam/config/libraryfolders.vdf"),
|
||||
# Add other potential standard locations if necessary
|
||||
]
|
||||
|
||||
# Simple backup mechanism (optional but good practice)
|
||||
for path in libraryfolders_vdf_paths:
|
||||
if os.path.exists(path):
|
||||
backup_dir = os.path.join(os.path.dirname(path), "backups")
|
||||
if not os.path.exists(backup_dir):
|
||||
try:
|
||||
os.makedirs(backup_dir)
|
||||
except OSError as e:
|
||||
logger.warning(f"Could not create backup directory {backup_dir}: {e}")
|
||||
|
||||
# Create timestamped backup if it doesn't exist for today
|
||||
timestamp = datetime.now().strftime("%Y%m%d")
|
||||
backup_filename = f"libraryfolders_{timestamp}.vdf.bak"
|
||||
backup_path = os.path.join(backup_dir, backup_filename)
|
||||
|
||||
if not os.path.exists(backup_path):
|
||||
try:
|
||||
import shutil
|
||||
shutil.copy2(path, backup_path)
|
||||
logger.debug(f"Created backup of libraryfolders.vdf at {backup_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create backup of libraryfolders.vdf: {e}")
|
||||
# Continue anyway, as we're only reading the file
|
||||
pass
|
||||
|
||||
libraryfolders_vdf_path_obj = None # Will hold the Path object
|
||||
found_path_str = None
|
||||
for path_str in libraryfolders_vdf_paths:
|
||||
if os.path.exists(path_str):
|
||||
found_path_str = path_str # Keep the string path for logging/opening
|
||||
libraryfolders_vdf_path_obj = Path(path_str) # Convert to Path object here
|
||||
logger.debug(f"Found libraryfolders.vdf at: {path_str}")
|
||||
break
|
||||
|
||||
# Check using the Path object's is_file() method
|
||||
if not libraryfolders_vdf_path_obj or not libraryfolders_vdf_path_obj.is_file():
|
||||
logger.warning("libraryfolders.vdf not found or is not a file. Cannot automatically detect Steam Library.")
|
||||
return None
|
||||
|
||||
# Parse the VDF file to extract library paths
|
||||
library_paths = []
|
||||
try:
|
||||
# Open using the original string path is fine, or use the Path object
|
||||
with open(found_path_str, 'r') as f: # Or use libraryfolders_vdf_path_obj
|
||||
content = f.read()
|
||||
|
||||
# Use regex to find all path entries
|
||||
path_matches = re.finditer(r'"path"\s*"([^"]+)"', content)
|
||||
for match in path_matches:
|
||||
library_path_str = match.group(1).replace('\\\\', '\\') # Fix potential double escapes
|
||||
common_path = os.path.join(library_path_str, "steamapps", "common")
|
||||
if os.path.isdir(common_path): # Verify the common path exists
|
||||
library_paths.append(Path(common_path))
|
||||
logger.debug(f"Found potential common path: {common_path}")
|
||||
else:
|
||||
logger.debug(f"Skipping non-existent common path derived from VDF: {common_path}")
|
||||
|
||||
logger.debug(f"Found {len(library_paths)} valid library common paths from VDF.")
|
||||
|
||||
# Return the first valid path found
|
||||
if library_paths:
|
||||
logger.info(f"Using Steam library common path: {library_paths[0]}")
|
||||
return library_paths[0]
|
||||
|
||||
# If no valid paths found in VDF, try the default structure
|
||||
logger.debug("No valid common paths found in VDF, checking default location...")
|
||||
default_common_path = Path.home() / ".steam/steam/steamapps/common"
|
||||
if default_common_path.is_dir():
|
||||
logger.info(f"Using default Steam library common path: {default_common_path}")
|
||||
return default_common_path
|
||||
|
||||
default_common_path_local = Path.home() / ".local/share/Steam/steamapps/common"
|
||||
if default_common_path_local.is_dir():
|
||||
logger.info(f"Using default local Steam library common path: {default_common_path_local}")
|
||||
return default_common_path_local
|
||||
|
||||
logger.error("No valid Steam library common path found in VDF or default locations.")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing libraryfolders.vdf or finding Steam library: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def find_compat_data(appid: str) -> Optional[Path]:
|
||||
"""Find the compatdata directory for a given AppID."""
|
||||
if not appid or not appid.isdigit():
|
||||
logger.error(f"Invalid AppID provided for compatdata search: {appid}")
|
||||
return None
|
||||
|
||||
logger.debug(f"Searching for compatdata directory for AppID: {appid}")
|
||||
|
||||
# Prefer standard Steam locations
|
||||
possible_bases = [
|
||||
Path.home() / ".steam/steam/steamapps/compatdata",
|
||||
Path.home() / ".local/share/Steam/steamapps/compatdata",
|
||||
# Add likely SD card mount points if applicable
|
||||
# Path("/run/media/mmcblk0p1/steamapps/compatdata")
|
||||
]
|
||||
|
||||
# Check user's Steam Library path if available (more reliable)
|
||||
# Assuming PathHandler might store or be passed the library path
|
||||
# steam_lib_path = self.find_steam_library() # Or get from instance var if stored
|
||||
# if steam_lib_path and (steam_lib_path / "steamapps/compatdata").is_dir():
|
||||
# possible_bases.insert(0, steam_lib_path / "steamapps/compatdata") # Prioritize
|
||||
|
||||
for base_path in possible_bases:
|
||||
if not base_path.is_dir():
|
||||
logger.debug(f"Compatdata base path does not exist or is not a directory: {base_path}")
|
||||
continue
|
||||
|
||||
potential_path = base_path / appid
|
||||
if potential_path.is_dir():
|
||||
logger.info(f"Found compatdata directory: {potential_path}")
|
||||
return potential_path # Return Path object
|
||||
else:
|
||||
logger.debug(f"Compatdata for {appid} not found in {base_path}")
|
||||
|
||||
# Fallback: Broad search (can be slow, consider if needed)
|
||||
# try:
|
||||
# logger.debug(f"Compatdata not found in standard locations, attempting wider search...")
|
||||
# # This can be very slow and resource-intensive
|
||||
# # find_output = subprocess.check_output(['find', '/', '-type', 'd', '-name', appid, '-path', '*/compatdata/*', '-print', '-quit', '2>/dev/null'], text=True).strip()
|
||||
# # if find_output:
|
||||
# # logger.info(f"Found compatdata via find command: {find_output}")
|
||||
# # return Path(find_output)
|
||||
# except Exception as e:
|
||||
# logger.warning(f"Error during 'find' command for compatdata: {e}")
|
||||
|
||||
logger.warning(f"Compatdata directory for AppID {appid} not found.")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def detect_stock_game_path(game_type: str, steam_library: Path) -> Optional[Path]:
|
||||
"""
|
||||
Detect the stock game path for a given game type and Steam library
|
||||
Returns the path if found, None otherwise
|
||||
"""
|
||||
try:
|
||||
# Map of game types to their Steam App IDs
|
||||
game_app_ids = {
|
||||
'skyrim': '489830', # Skyrim Special Edition
|
||||
'fallout4': '377160', # Fallout 4
|
||||
'fnv': '22380', # Fallout: New Vegas
|
||||
'oblivion': '22330' # The Elder Scrolls IV: Oblivion
|
||||
}
|
||||
|
||||
if game_type not in game_app_ids:
|
||||
return None
|
||||
|
||||
app_id = game_app_ids[game_type]
|
||||
game_path = steam_library / 'steamapps' / 'common'
|
||||
|
||||
# List of possible game directory names
|
||||
possible_names = {
|
||||
'skyrim': ['Skyrim Special Edition', 'Skyrim'],
|
||||
'fallout4': ['Fallout 4'],
|
||||
'fnv': ['Fallout New Vegas', 'FalloutNV'],
|
||||
'oblivion': ['Oblivion']
|
||||
}
|
||||
|
||||
if game_type not in possible_names:
|
||||
return None
|
||||
|
||||
# Check each possible directory name
|
||||
for name in possible_names[game_type]:
|
||||
potential_path = game_path / name
|
||||
if potential_path.exists():
|
||||
return potential_path
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error detecting stock game path: {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_steam_library_path(steam_path: str) -> Optional[str]:
|
||||
"""Get the Steam library path from libraryfolders.vdf."""
|
||||
try:
|
||||
libraryfolders_path = os.path.join(steam_path, 'steamapps', 'libraryfolders.vdf')
|
||||
if not os.path.exists(libraryfolders_path):
|
||||
return None
|
||||
|
||||
with open(libraryfolders_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
# Parse the VDF content
|
||||
libraries = {}
|
||||
current_library = None
|
||||
for line in content.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith('"path"'):
|
||||
current_library = line.split('"')[3].replace('\\\\', '\\')
|
||||
elif line.startswith('"apps"') and current_library:
|
||||
libraries[current_library] = True
|
||||
|
||||
# Return the first library path that exists
|
||||
for library_path in libraries:
|
||||
if os.path.exists(library_path):
|
||||
return library_path
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting Steam library path: {str(e)}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_all_steam_library_paths() -> List[Path]:
|
||||
"""Finds all Steam library paths listed in all known libraryfolders.vdf files (including Flatpak)."""
|
||||
logger.info("[DEBUG] Searching for all Steam libraryfolders.vdf files...")
|
||||
vdf_paths = [
|
||||
Path.home() / ".steam/steam/config/libraryfolders.vdf",
|
||||
Path.home() / ".local/share/Steam/config/libraryfolders.vdf",
|
||||
Path.home() / ".steam/root/config/libraryfolders.vdf",
|
||||
Path.home() / ".var/app/com.valvesoftware.Steam/.local/share/Steam/config/libraryfolders.vdf", # Flatpak
|
||||
]
|
||||
library_paths = set()
|
||||
for vdf_path in vdf_paths:
|
||||
if vdf_path.is_file():
|
||||
logger.info(f"[DEBUG] Parsing libraryfolders.vdf: {vdf_path}")
|
||||
try:
|
||||
with open(vdf_path) as f:
|
||||
for line in f:
|
||||
m = re.search(r'"path"\s*"([^"]+)"', line)
|
||||
if m:
|
||||
lib_path = Path(m.group(1))
|
||||
library_paths.add(lib_path)
|
||||
except Exception as e:
|
||||
logger.error(f"[DEBUG] Failed to parse {vdf_path}: {e}")
|
||||
logger.info(f"[DEBUG] All detected Steam libraries: {library_paths}")
|
||||
return list(library_paths)
|
||||
|
||||
# Moved _find_shortcuts_vdf here from ShortcutHandler
|
||||
def _find_shortcuts_vdf(self) -> Optional[str]:
|
||||
"""Helper to find the active shortcuts.vdf file for a user.
|
||||
|
||||
Iterates through userdata directories and returns the path to the
|
||||
first found shortcuts.vdf file.
|
||||
|
||||
Returns:
|
||||
Optional[str]: The full path to the shortcuts.vdf file, or None if not found.
|
||||
"""
|
||||
# This implementation was moved from ShortcutHandler
|
||||
userdata_base_paths = [
|
||||
os.path.expanduser("~/.steam/steam/userdata"),
|
||||
os.path.expanduser("~/.local/share/Steam/userdata"),
|
||||
os.path.expanduser("~/.var/app/com.valvesoftware.Steam/.local/share/Steam/userdata")
|
||||
]
|
||||
found_vdf_path = None
|
||||
for base_path in userdata_base_paths:
|
||||
if not os.path.isdir(base_path):
|
||||
logger.debug(f"Userdata base path not found or not a directory: {base_path}")
|
||||
continue
|
||||
logger.debug(f"Searching for user IDs in: {base_path}")
|
||||
try:
|
||||
for item in os.listdir(base_path):
|
||||
user_path = os.path.join(base_path, item)
|
||||
if os.path.isdir(user_path) and item.isdigit():
|
||||
logger.debug(f"Checking user directory: {user_path}")
|
||||
config_path = os.path.join(user_path, "config")
|
||||
shortcuts_file = os.path.join(config_path, "shortcuts.vdf")
|
||||
if os.path.isfile(shortcuts_file):
|
||||
logger.info(f"Found shortcuts.vdf at: {shortcuts_file}")
|
||||
found_vdf_path = shortcuts_file
|
||||
break # Found it for this base path
|
||||
else:
|
||||
logger.debug(f"shortcuts.vdf not found in {config_path}")
|
||||
except OSError as e:
|
||||
logger.warning(f"Could not access directory {base_path}: {e}")
|
||||
continue # Try next base path
|
||||
if found_vdf_path:
|
||||
break # Found it in this base path
|
||||
if not found_vdf_path:
|
||||
logger.error("Could not find any shortcuts.vdf file in common Steam locations.")
|
||||
return found_vdf_path
|
||||
|
||||
@staticmethod
|
||||
def find_game_install_paths(target_appids: Dict[str, str]) -> Dict[str, Path]:
|
||||
"""
|
||||
Find installation paths for multiple specified games using Steam app IDs.
|
||||
|
||||
Args:
|
||||
target_appids: Dictionary mapping game names to app IDs
|
||||
|
||||
Returns:
|
||||
Dictionary mapping game names to their installation paths
|
||||
"""
|
||||
# Get all Steam library paths
|
||||
library_paths = PathHandler.get_all_steam_library_paths()
|
||||
if not library_paths:
|
||||
logger.warning("Failed to find any Steam library paths")
|
||||
return {}
|
||||
|
||||
results = {}
|
||||
|
||||
# For each library path, look for each target game
|
||||
for library_path in library_paths:
|
||||
# Check if the common directory exists
|
||||
common_dir = library_path / "common"
|
||||
if not common_dir.is_dir():
|
||||
logger.debug(f"No 'common' directory in library: {library_path}")
|
||||
continue
|
||||
|
||||
# Get subdirectories in common dir
|
||||
try:
|
||||
game_dirs = [d for d in common_dir.iterdir() if d.is_dir()]
|
||||
except (PermissionError, OSError) as e:
|
||||
logger.warning(f"Cannot access directory {common_dir}: {e}")
|
||||
continue
|
||||
|
||||
# For each app ID, check if we find its directory
|
||||
for game_name, app_id in target_appids.items():
|
||||
if game_name in results:
|
||||
continue # Already found this game
|
||||
|
||||
# Try to find by appmanifest
|
||||
appmanifest_path = library_path / f"appmanifest_{app_id}.acf"
|
||||
if appmanifest_path.is_file():
|
||||
# Find the installdir value
|
||||
try:
|
||||
with open(appmanifest_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
match = re.search(r'"installdir"\s+"([^"]+)"', content)
|
||||
if match:
|
||||
install_dir_name = match.group(1)
|
||||
install_path = common_dir / install_dir_name
|
||||
if install_path.is_dir():
|
||||
results[game_name] = install_path
|
||||
logger.info(f"Found {game_name} at {install_path}")
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.warning(f"Error reading appmanifest for {game_name}: {e}")
|
||||
|
||||
return results
|
||||
|
||||
def replace_gamepath(self, modlist_ini_path: Path, new_game_path: Path, modlist_sdcard: bool = False) -> bool:
|
||||
"""
|
||||
Updates the gamePath value in ModOrganizer.ini to the specified path.
|
||||
Strictly matches the bash script: only replaces an existing gamePath line.
|
||||
If the file or line does not exist, logs error and aborts.
|
||||
"""
|
||||
logger.info(f"Replacing gamePath in {modlist_ini_path} with {new_game_path}")
|
||||
if not modlist_ini_path.is_file():
|
||||
logger.error(f"ModOrganizer.ini not found at: {modlist_ini_path}")
|
||||
return False
|
||||
try:
|
||||
with open(modlist_ini_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
lines = f.readlines()
|
||||
drive_letter = "D:" if modlist_sdcard else "Z:"
|
||||
processed_path = self._strip_sdcard_path_prefix(new_game_path)
|
||||
windows_style = processed_path.replace('/', '\\')
|
||||
windows_style_double = windows_style.replace('\\', '\\\\')
|
||||
new_gamepath_line = f'gamePath=@ByteArray({drive_letter}{windows_style_double})\n'
|
||||
gamepath_found = False
|
||||
for i, line in enumerate(lines):
|
||||
# Make the check case-insensitive and robust to whitespace
|
||||
if re.match(r'^\s*gamepath\s*=.*$', line, re.IGNORECASE):
|
||||
lines[i] = new_gamepath_line
|
||||
gamepath_found = True
|
||||
break
|
||||
if not gamepath_found:
|
||||
logger.error("No gamePath line found in ModOrganizer.ini")
|
||||
return False
|
||||
with open(modlist_ini_path, 'w', encoding='utf-8') as f:
|
||||
f.writelines(lines)
|
||||
logger.info(f"Successfully updated gamePath to {new_game_path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error replacing gamePath: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
# =====================================================================================
|
||||
# CRITICAL: DO NOT CHANGE THIS FUNCTION WITHOUT UPDATING TESTS AND CONSULTING PROJECT LEAD
|
||||
# This function implements the exact path rewriting logic required for ModOrganizer.ini
|
||||
# to match the original, robust bash script. Any change here risks breaking modlist
|
||||
# configuration for users. If you must change this, update all relevant tests and
|
||||
# consult the Project Lead for Jackify. See also omni-guides.sh for reference logic.
|
||||
# =====================================================================================
|
||||
def edit_binary_working_paths(self, modlist_ini_path: Path, modlist_dir_path: Path, modlist_sdcard: bool, steam_libraries: Optional[List[Path]] = None) -> bool:
|
||||
"""
|
||||
Update all binary paths and working directories in a ModOrganizer.ini file.
|
||||
Handles various ModOrganizer.ini formats (single or double backslashes in keys).
|
||||
When updating gamePath, binary, and workingDirectory, retain the original stock folder (Stock Game, Game Root, etc) if present in the current value.
|
||||
steam_libraries: Optional[List[Path]] - already-discovered Steam library paths to use for vanilla detection.
|
||||
|
||||
# DO NOT CHANGE THIS LOGIC WITHOUT UPDATING TESTS AND CONSULTING THE PROJECT LEAD
|
||||
# This is a critical, regression-prone area. See omni-guides.sh for reference.
|
||||
"""
|
||||
try:
|
||||
logger.debug(f"Updating binary paths and working directories in {modlist_ini_path} to use root: {modlist_dir_path}")
|
||||
if not modlist_ini_path.is_file():
|
||||
logger.error(f"INI file {modlist_ini_path} does not exist")
|
||||
return False
|
||||
with open(modlist_ini_path, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
game_path_updated = False
|
||||
binary_paths_updated = 0
|
||||
working_dirs_updated = 0
|
||||
binary_lines = []
|
||||
working_dir_lines = []
|
||||
for i, line in enumerate(lines):
|
||||
stripped = line.strip()
|
||||
binary_match = re.match(r'^(\d+)(\\+)\s*binary\s*=.*$', stripped, re.IGNORECASE)
|
||||
if binary_match:
|
||||
index = binary_match.group(1)
|
||||
backslash_style = binary_match.group(2)
|
||||
binary_lines.append((i, stripped, index, backslash_style))
|
||||
wd_match = re.match(r'^(\d+)(\\+)\s*workingDirectory\s*=.*$', stripped, re.IGNORECASE)
|
||||
if wd_match:
|
||||
index = wd_match.group(1)
|
||||
backslash_style = wd_match.group(2)
|
||||
working_dir_lines.append((i, stripped, index, backslash_style))
|
||||
binary_paths_by_index = {}
|
||||
# Use provided steam_libraries if available, else detect
|
||||
if steam_libraries is None or not steam_libraries:
|
||||
steam_libraries = PathHandler.get_all_steam_library_paths()
|
||||
for i, line, index, backslash_style in binary_lines:
|
||||
parts = line.split('=', 1)
|
||||
if len(parts) != 2:
|
||||
logger.error(f"Malformed binary line: {line}")
|
||||
continue
|
||||
key_part, value_part = parts
|
||||
exe_name = os.path.basename(value_part)
|
||||
drive_prefix = "D:" if modlist_sdcard else "Z:"
|
||||
rel_path = None
|
||||
# --- BEGIN: FULL PARITY LOGIC ---
|
||||
if 'steamapps' in value_part:
|
||||
idx = value_part.index('steamapps')
|
||||
subpath = value_part[idx:].lstrip('/')
|
||||
correct_steam_lib = None
|
||||
for lib in steam_libraries:
|
||||
if (lib / subpath.split('/')[2]).exists():
|
||||
correct_steam_lib = lib.parent
|
||||
break
|
||||
if not correct_steam_lib and steam_libraries:
|
||||
correct_steam_lib = steam_libraries[0].parent
|
||||
if correct_steam_lib:
|
||||
new_binary_path = f"{drive_prefix}/{correct_steam_lib}/{subpath}".replace('\\', '/').replace('//', '/')
|
||||
else:
|
||||
logger.error("Could not determine correct Steam library for vanilla game path.")
|
||||
continue
|
||||
else:
|
||||
found_stock = None
|
||||
for folder in STOCK_GAME_FOLDERS:
|
||||
folder_pattern = f"/{folder.replace(' ', '')}".lower()
|
||||
value_part_lower = value_part.replace(' ', '').lower()
|
||||
if folder_pattern in value_part_lower:
|
||||
idx = value_part_lower.index(folder_pattern)
|
||||
rel_path = value_part[idx:].lstrip('/')
|
||||
found_stock = folder
|
||||
break
|
||||
if not rel_path:
|
||||
mods_pattern = "/mods/"
|
||||
if mods_pattern in value_part:
|
||||
idx = value_part.index(mods_pattern)
|
||||
rel_path = value_part[idx:].lstrip('/')
|
||||
else:
|
||||
rel_path = exe_name
|
||||
new_binary_path = f"{drive_prefix}/{modlist_dir_path}/{rel_path}".replace('\\', '/').replace('//', '/')
|
||||
formatted_binary_path = PathHandler._format_binary_for_mo2(new_binary_path)
|
||||
new_binary_line = f"{index}{backslash_style}binary={formatted_binary_path}"
|
||||
logger.debug(f"Updating binary path: {line.strip()} -> {new_binary_line}")
|
||||
lines[i] = new_binary_line + "\n"
|
||||
binary_paths_updated += 1
|
||||
binary_paths_by_index[index] = formatted_binary_path
|
||||
for j, wd_line, index, backslash_style in working_dir_lines:
|
||||
if index in binary_paths_by_index:
|
||||
binary_path = binary_paths_by_index[index]
|
||||
wd_path = os.path.dirname(binary_path)
|
||||
drive_prefix = "D:" if modlist_sdcard else "Z:"
|
||||
if wd_path.startswith("D:") or wd_path.startswith("Z:"):
|
||||
wd_path = wd_path[2:]
|
||||
wd_path = drive_prefix + wd_path
|
||||
formatted_wd_path = PathHandler._format_workingdir_for_mo2(wd_path)
|
||||
key_part = f"{index}{backslash_style}workingDirectory"
|
||||
new_wd_line = f"{key_part}={formatted_wd_path}"
|
||||
logger.debug(f"Updating working directory: {wd_line.strip()} -> {new_wd_line}")
|
||||
lines[j] = new_wd_line + "\n"
|
||||
working_dirs_updated += 1
|
||||
with open(modlist_ini_path, 'w', encoding='utf-8') as f:
|
||||
f.writelines(lines)
|
||||
logger.info(f"edit_binary_working_paths completed: Game path updated: {game_path_updated}, Binary paths updated: {binary_paths_updated}, Working directories updated: {working_dirs_updated}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating binary paths in {modlist_ini_path}: {str(e)}")
|
||||
return False
|
||||
|
||||
def _format_path_for_mo2(self, path: str) -> str:
|
||||
"""Format a path for MO2's ModOrganizer.ini file (working directories)."""
|
||||
# Replace forward slashes with double backslashes
|
||||
formatted = path.replace('/', '\\')
|
||||
# Ensure we have a Windows drive letter format
|
||||
if not re.match(r'^[A-Za-z]:', formatted):
|
||||
formatted = 'D:' + formatted
|
||||
# Double the backslashes for the INI file format
|
||||
formatted = formatted.replace('\\', '\\\\')
|
||||
return formatted
|
||||
|
||||
def _format_binary_path_for_mo2(self, path_str):
|
||||
"""Format a binary path for MO2 config file.
|
||||
|
||||
Binary paths need forward slashes (/) in the path portion.
|
||||
"""
|
||||
# Replace backslashes with forward slashes
|
||||
return path_str.replace('\\', '/')
|
||||
|
||||
def _format_working_dir_for_mo2(self, path_str):
|
||||
"""
|
||||
Format a working directory path for MO2 config file.
|
||||
Ensures double backslashes throughout, as required by ModOrganizer.ini.
|
||||
"""
|
||||
import re
|
||||
path = path_str.replace('/', '\\')
|
||||
path = path.replace('\\', '\\\\') # Double all backslashes
|
||||
# Ensure only one double backslash after drive letter
|
||||
path = re.sub(r'^([A-Z]:)\\\\+', r'\1\\\\', path)
|
||||
return path
|
||||
|
||||
@staticmethod
|
||||
def find_vanilla_game_paths(game_names=None) -> Dict[str, Path]:
|
||||
"""
|
||||
For each known game, iterate all Steam libraries and look for the canonical game directory name in steamapps/common.
|
||||
Returns a dict of found games and their paths.
|
||||
Args:
|
||||
game_names: Optional list of game names to check. If None, uses default supported games.
|
||||
Returns:
|
||||
Dict[str, Path]: Mapping of game name to found install Path.
|
||||
"""
|
||||
# Canonical game directory names (allow list for Fallout 3)
|
||||
GAME_DIR_NAMES = {
|
||||
"Skyrim Special Edition": ["Skyrim Special Edition"],
|
||||
"Fallout 4": ["Fallout 4"],
|
||||
"Fallout New Vegas": ["Fallout New Vegas"],
|
||||
"Oblivion": ["Oblivion"],
|
||||
"Fallout 3": ["Fallout 3", "Fallout 3 goty"]
|
||||
}
|
||||
if game_names is None:
|
||||
game_names = list(GAME_DIR_NAMES.keys())
|
||||
all_steam_libraries = PathHandler.get_all_steam_library_paths()
|
||||
logger.info(f"[DEBUG] Detected Steam libraries: {all_steam_libraries}")
|
||||
found_games = {}
|
||||
for game in game_names:
|
||||
possible_names = GAME_DIR_NAMES.get(game, [game])
|
||||
for lib in all_steam_libraries:
|
||||
for name in possible_names:
|
||||
candidate = lib / "steamapps" / "common" / name
|
||||
logger.info(f"[DEBUG] Checking for vanilla game directory: {candidate}")
|
||||
if candidate.is_dir():
|
||||
found_games[game] = candidate
|
||||
logger.info(f"Found vanilla game directory for {game}: {candidate}")
|
||||
break # Stop after first found location
|
||||
if game in found_games:
|
||||
break
|
||||
return found_games
|
||||
|
||||
def _detect_stock_game_path(self):
|
||||
"""Detects common 'Stock Game' or 'Game Root' directories within the modlist path."""
|
||||
self.logger.info("Step 7a: Detecting Stock Game/Game Root directory...")
|
||||
if not self.modlist_dir:
|
||||
self.logger.error("Modlist directory not set, cannot detect stock game path.")
|
||||
return False
|
||||
|
||||
modlist_path = Path(self.modlist_dir)
|
||||
# Always prefer 'Stock Game' if it exists, then fallback to others
|
||||
preferred_order = [
|
||||
"Stock Game",
|
||||
"STOCK GAME",
|
||||
"Skyrim Stock",
|
||||
"Stock Game Folder",
|
||||
"Stock Folder",
|
||||
Path("root/Skyrim Special Edition"),
|
||||
"Game Root" # 'Game Root' is now last
|
||||
]
|
||||
|
||||
found_path = None
|
||||
for name in preferred_order:
|
||||
potential_path = modlist_path / name
|
||||
if potential_path.is_dir():
|
||||
found_path = str(potential_path)
|
||||
self.logger.info(f"Found potential stock game directory: {found_path}")
|
||||
break # Found the first match
|
||||
if found_path:
|
||||
self.stock_game_path = found_path
|
||||
return True
|
||||
else:
|
||||
self.stock_game_path = None
|
||||
self.logger.info("No common Stock Game/Game Root directory found. Will assume vanilla game path is needed for some operations.")
|
||||
return True
|
||||
|
||||
# --- Add robust path formatters for INI fields ---
|
||||
@staticmethod
|
||||
def _format_gamepath_for_mo2(path: str) -> str:
|
||||
import re
|
||||
path = path.replace('/', '\\')
|
||||
path = re.sub(r'\\+', r'\\', path) # Collapse multiple backslashes
|
||||
# Ensure only one double backslash after drive letter
|
||||
path = re.sub(r'^([A-Z]:)\\+', r'\1\\', path)
|
||||
return path
|
||||
|
||||
@staticmethod
|
||||
def _format_binary_for_mo2(path: str) -> str:
|
||||
import re
|
||||
path = path.replace('\\', '/')
|
||||
# Collapse multiple forward slashes after drive letter
|
||||
path = re.sub(r'^([A-Z]:)//+', r'\1/', path)
|
||||
return path
|
||||
|
||||
@staticmethod
|
||||
def _format_workingdir_for_mo2(path: str) -> str:
|
||||
import re
|
||||
path = path.replace('/', '\\')
|
||||
path = path.replace('\\', '\\\\') # Double all backslashes
|
||||
# Ensure only one double backslash after drive letter
|
||||
path = re.sub(r'^([A-Z]:)\\\\+', r'\1\\\\', path)
|
||||
return path
|
||||
|
||||
# --- End of PathHandler ---
|
||||
84
jackify/shared/timing.py
Normal file
84
jackify/shared/timing.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""
|
||||
Simple shared timing for consistent progress timestamps across all Jackify services.
|
||||
"""
|
||||
import time
|
||||
import re
|
||||
|
||||
# Global state for shared timing
|
||||
_start_time = None
|
||||
_base_offset = 0
|
||||
|
||||
def initialize_from_console_output(console_text: str = None):
|
||||
"""Initialize timing, optionally continuing from jackify-engine output"""
|
||||
global _start_time, _base_offset
|
||||
|
||||
if _start_time is not None:
|
||||
return # Already initialized
|
||||
|
||||
if console_text:
|
||||
# Parse last timestamp from jackify-engine
|
||||
timestamp_pattern = r'\[(\d{2}):(\d{2}):(\d{2})\]'
|
||||
matches = list(re.finditer(timestamp_pattern, console_text))
|
||||
|
||||
if matches:
|
||||
last_match = matches[-1]
|
||||
hours = int(last_match.group(1))
|
||||
minutes = int(last_match.group(2))
|
||||
seconds = int(last_match.group(3))
|
||||
_base_offset = hours * 3600 + minutes * 60 + seconds + 1
|
||||
|
||||
_start_time = time.time()
|
||||
|
||||
def continue_from_timestamp(timestamp_str: str):
|
||||
"""Continue timing from a specific timestamp string like '[00:00:31]'"""
|
||||
global _start_time, _base_offset
|
||||
|
||||
# Parse timestamp like [00:00:31]
|
||||
timestamp_pattern = r'\[(\d{2}):(\d{2}):(\d{2})\]'
|
||||
match = re.match(timestamp_pattern, timestamp_str)
|
||||
|
||||
if match:
|
||||
hours = int(match.group(1))
|
||||
minutes = int(match.group(2))
|
||||
seconds = int(match.group(3))
|
||||
_base_offset = hours * 3600 + minutes * 60 + seconds + 1
|
||||
_start_time = time.time()
|
||||
else:
|
||||
# Fallback to normal initialization
|
||||
initialize_from_console_output()
|
||||
|
||||
def start_new_phase():
|
||||
"""Start a new phase with timing reset to [00:00:00]"""
|
||||
global _start_time, _base_offset
|
||||
_start_time = time.time()
|
||||
_base_offset = 0
|
||||
|
||||
def set_base_offset_from_installation_end():
|
||||
"""Set base offset to continue from where Installation phase typically ends"""
|
||||
global _start_time, _base_offset
|
||||
|
||||
# Installation phase typically ends around 1-2 minutes, so start from 1:30
|
||||
_base_offset = 90 # 1 minute 30 seconds
|
||||
_start_time = time.time()
|
||||
|
||||
def get_timestamp():
|
||||
"""Get current timestamp in [HH:MM:SS] format"""
|
||||
global _start_time, _base_offset
|
||||
|
||||
if _start_time is None:
|
||||
initialize_from_console_output()
|
||||
|
||||
elapsed = int(time.time() - _start_time)
|
||||
total_seconds = _base_offset + elapsed
|
||||
|
||||
hours = total_seconds // 3600
|
||||
minutes = (total_seconds % 3600) // 60
|
||||
seconds = total_seconds % 60
|
||||
|
||||
return f"[{hours:02d}:{minutes:02d}:{seconds:02d}]"
|
||||
|
||||
def reset():
|
||||
"""Reset timing (for testing)"""
|
||||
global _start_time, _base_offset
|
||||
_start_time = None
|
||||
_base_offset = 0
|
||||
23
jackify/shared/ui_utils.py
Normal file
23
jackify/shared/ui_utils.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
UI Utilities for Jackify
|
||||
Shared UI components and utilities used across frontend interfaces
|
||||
"""
|
||||
|
||||
def print_jackify_banner():
|
||||
"""Print the Jackify application banner"""
|
||||
print("""
|
||||
╔════════════════════════════════════════════════════════════════════════╗
|
||||
║ Jackify CLI (pre-alpha) ║
|
||||
║ ║
|
||||
║ A tool for installing and configuring modlists ║
|
||||
║ & associated utilities on Linux ║
|
||||
╚════════════════════════════════════════════════════════════════════════╝
|
||||
""")
|
||||
|
||||
def print_section_header(title):
|
||||
"""Print a section header with formatting"""
|
||||
print(f"\n{'='*30}\n{title}\n{'='*30}\n")
|
||||
|
||||
def print_subsection_header(title):
|
||||
"""Print a subsection header with formatting"""
|
||||
print(f"[ {title} ]\n")
|
||||
283
jackify/shared/validation.py
Normal file
283
jackify/shared/validation.py
Normal file
@@ -0,0 +1,283 @@
|
||||
"""
|
||||
ValidationHandler module for managing validation operations.
|
||||
This module handles input validation, path validation, and configuration validation.
|
||||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
import shutil
|
||||
import vdf
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, List, Tuple, Any
|
||||
|
||||
class ValidationHandler:
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def validate_path(self, path: Path, must_exist: bool = True) -> Tuple[bool, str]:
|
||||
"""Validate a path."""
|
||||
try:
|
||||
if not isinstance(path, Path):
|
||||
return False, "Path must be a Path object"
|
||||
|
||||
if must_exist and not path.exists():
|
||||
return False, f"Path does not exist: {path}"
|
||||
|
||||
if not os.access(path, os.R_OK | os.W_OK):
|
||||
return False, f"Path is not accessible: {path}"
|
||||
|
||||
return True, "Path is valid"
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate path {path}: {e}")
|
||||
return False, str(e)
|
||||
|
||||
def validate_input(self, value: Any, rules: Dict) -> Tuple[bool, str]:
|
||||
"""Validate user input against rules."""
|
||||
try:
|
||||
# Check required
|
||||
if rules.get('required', False) and not value:
|
||||
return False, "Value is required"
|
||||
|
||||
# Check type
|
||||
if 'type' in rules and not isinstance(value, rules['type']):
|
||||
return False, f"Value must be of type {rules['type'].__name__}"
|
||||
|
||||
# Check min/max length for strings
|
||||
if isinstance(value, str):
|
||||
if 'min_length' in rules and len(value) < rules['min_length']:
|
||||
return False, f"Value must be at least {rules['min_length']} characters"
|
||||
if 'max_length' in rules and len(value) > rules['max_length']:
|
||||
return False, f"Value must be at most {rules['max_length']} characters"
|
||||
|
||||
# Check min/max value for numbers
|
||||
if isinstance(value, (int, float)):
|
||||
if 'min_value' in rules and value < rules['min_value']:
|
||||
return False, f"Value must be at least {rules['min_value']}"
|
||||
if 'max_value' in rules and value > rules['max_value']:
|
||||
return False, f"Value must be at most {rules['max_value']}"
|
||||
|
||||
# Check pattern for strings
|
||||
if isinstance(value, str) and 'pattern' in rules:
|
||||
if not re.match(rules['pattern'], value):
|
||||
return False, f"Value must match pattern: {rules['pattern']}"
|
||||
|
||||
# Check custom validation function
|
||||
if 'validate' in rules and callable(rules['validate']):
|
||||
result = rules['validate'](value)
|
||||
if isinstance(result, tuple):
|
||||
return result
|
||||
elif not result:
|
||||
return False, "Custom validation failed"
|
||||
|
||||
return True, "Input is valid"
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate input: {e}")
|
||||
return False, str(e)
|
||||
|
||||
def validate_config(self, config: Dict, schema: Dict) -> Tuple[bool, List[str]]:
|
||||
"""Validate configuration against a schema."""
|
||||
try:
|
||||
errors = []
|
||||
|
||||
# Check required fields
|
||||
for field, rules in schema.items():
|
||||
if rules.get('required', False) and field not in config:
|
||||
errors.append(f"Missing required field: {field}")
|
||||
|
||||
# Check field types and values
|
||||
for field, value in config.items():
|
||||
if field not in schema:
|
||||
errors.append(f"Unknown field: {field}")
|
||||
continue
|
||||
|
||||
rules = schema[field]
|
||||
if 'type' in rules and not isinstance(value, rules['type']):
|
||||
errors.append(f"Invalid type for {field}: expected {rules['type'].__name__}")
|
||||
|
||||
if isinstance(value, str):
|
||||
if 'min_length' in rules and len(value) < rules['min_length']:
|
||||
errors.append(f"{field} must be at least {rules['min_length']} characters")
|
||||
if 'max_length' in rules and len(value) > rules['max_length']:
|
||||
errors.append(f"{field} must be at most {rules['max_length']} characters")
|
||||
if 'pattern' in rules and not re.match(rules['pattern'], value):
|
||||
errors.append(f"{field} must match pattern: {rules['pattern']}")
|
||||
|
||||
if isinstance(value, (int, float)):
|
||||
if 'min_value' in rules and value < rules['min_value']:
|
||||
errors.append(f"{field} must be at least {rules['min_value']}")
|
||||
if 'max_value' in rules and value > rules['max_value']:
|
||||
errors.append(f"{field} must be at most {rules['max_value']}")
|
||||
|
||||
if 'validate' in rules and callable(rules['validate']):
|
||||
result = rules['validate'](value)
|
||||
if isinstance(result, tuple):
|
||||
if not result[0]:
|
||||
errors.append(f"{field}: {result[1]}")
|
||||
elif not result:
|
||||
errors.append(f"Custom validation failed for {field}")
|
||||
|
||||
return len(errors) == 0, errors
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate config: {e}")
|
||||
return False, [str(e)]
|
||||
|
||||
def validate_dependencies(self, dependencies: List[str]) -> Tuple[bool, List[str]]:
|
||||
"""Validate system dependencies."""
|
||||
try:
|
||||
missing = []
|
||||
for dep in dependencies:
|
||||
if not shutil.which(dep):
|
||||
missing.append(dep)
|
||||
return len(missing) == 0, missing
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate dependencies: {e}")
|
||||
return False, [str(e)]
|
||||
|
||||
def validate_game_installation(self, game_type: str, path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a game installation."""
|
||||
try:
|
||||
# Check if path exists
|
||||
if not path.exists():
|
||||
return False, f"Game path does not exist: {path}"
|
||||
|
||||
# Check if path is accessible
|
||||
if not os.access(path, os.R_OK | os.W_OK):
|
||||
return False, f"Game path is not accessible: {path}"
|
||||
|
||||
# Check for game-specific files
|
||||
if game_type == 'skyrim':
|
||||
if not (path / 'SkyrimSE.exe').exists():
|
||||
return False, "SkyrimSE.exe not found"
|
||||
elif game_type == 'fallout4':
|
||||
if not (path / 'Fallout4.exe').exists():
|
||||
return False, "Fallout4.exe not found"
|
||||
elif game_type == 'falloutnv':
|
||||
if not (path / 'FalloutNV.exe').exists():
|
||||
return False, "FalloutNV.exe not found"
|
||||
elif game_type == 'oblivion':
|
||||
if not (path / 'Oblivion.exe').exists():
|
||||
return False, "Oblivion.exe not found"
|
||||
else:
|
||||
return False, f"Unknown game type: {game_type}"
|
||||
|
||||
return True, "Game installation is valid"
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate game installation: {e}")
|
||||
return False, str(e)
|
||||
|
||||
def validate_modlist(self, modlist_path: Path) -> Tuple[bool, List[str]]:
|
||||
"""Validate a modlist installation."""
|
||||
try:
|
||||
errors = []
|
||||
|
||||
# Check if path exists
|
||||
if not modlist_path.exists():
|
||||
errors.append(f"Modlist path does not exist: {modlist_path}")
|
||||
return False, errors
|
||||
|
||||
# Check if path is accessible
|
||||
if not os.access(modlist_path, os.R_OK | os.W_OK):
|
||||
errors.append(f"Modlist path is not accessible: {modlist_path}")
|
||||
return False, errors
|
||||
|
||||
# Check for ModOrganizer.ini
|
||||
if not (modlist_path / 'ModOrganizer.ini').exists():
|
||||
errors.append("ModOrganizer.ini not found")
|
||||
|
||||
# Check for mods directory
|
||||
if not (modlist_path / 'mods').exists():
|
||||
errors.append("mods directory not found")
|
||||
|
||||
# Check for profiles directory
|
||||
if not (modlist_path / 'profiles').exists():
|
||||
errors.append("profiles directory not found")
|
||||
|
||||
return len(errors) == 0, errors
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate modlist: {e}")
|
||||
return False, [str(e)]
|
||||
|
||||
def validate_wine_prefix(self, app_id: str) -> Tuple[bool, str]:
|
||||
"""Validate a Wine prefix."""
|
||||
try:
|
||||
# Check if prefix exists
|
||||
prefix_path = Path.home() / '.steam' / 'steam' / 'steamapps' / 'compatdata' / app_id / 'pfx'
|
||||
if not prefix_path.exists():
|
||||
return False, f"Wine prefix does not exist: {prefix_path}"
|
||||
|
||||
# Check if prefix is accessible
|
||||
if not os.access(prefix_path, os.R_OK | os.W_OK):
|
||||
return False, f"Wine prefix is not accessible: {prefix_path}"
|
||||
|
||||
# Check for system.reg
|
||||
if not (prefix_path / 'system.reg').exists():
|
||||
return False, "system.reg not found"
|
||||
|
||||
return True, "Wine prefix is valid"
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate Wine prefix: {e}")
|
||||
return False, str(e)
|
||||
|
||||
def validate_steam_shortcut(self, app_id: str) -> Tuple[bool, str]:
|
||||
"""Validate a Steam shortcut."""
|
||||
try:
|
||||
# Check if shortcuts.vdf exists
|
||||
shortcuts_path = Path.home() / '.steam' / 'steam' / 'userdata' / '75424832' / 'config' / 'shortcuts.vdf'
|
||||
if not shortcuts_path.exists():
|
||||
return False, "shortcuts.vdf not found"
|
||||
|
||||
# Check if shortcuts.vdf is accessible
|
||||
if not os.access(shortcuts_path, os.R_OK | os.W_OK):
|
||||
return False, "shortcuts.vdf is not accessible"
|
||||
|
||||
# Parse shortcuts.vdf using VDFHandler
|
||||
shortcuts_data = VDFHandler.load(str(shortcuts_path), binary=True)
|
||||
|
||||
# Check if shortcut exists
|
||||
for shortcut in shortcuts_data.get('shortcuts', {}).values():
|
||||
if str(shortcut.get('appid')) == app_id:
|
||||
return True, "Steam shortcut is valid"
|
||||
|
||||
return False, f"Steam shortcut not found: {app_id}"
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate Steam shortcut: {e}")
|
||||
return False, str(e)
|
||||
|
||||
def validate_resolution(self, resolution: str) -> Tuple[bool, str]:
|
||||
"""Validate a resolution string."""
|
||||
try:
|
||||
# Check format
|
||||
if not re.match(r'^\d+x\d+$', resolution):
|
||||
return False, "Resolution must be in format WIDTHxHEIGHT"
|
||||
|
||||
# Parse dimensions
|
||||
width, height = map(int, resolution.split('x'))
|
||||
|
||||
# Check minimum dimensions
|
||||
if width < 640 or height < 480:
|
||||
return False, "Resolution must be at least 640x480"
|
||||
|
||||
# Check maximum dimensions
|
||||
if width > 7680 or height > 4320:
|
||||
return False, "Resolution must be at most 7680x4320"
|
||||
|
||||
return True, "Resolution is valid"
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate resolution: {e}")
|
||||
return False, str(e)
|
||||
|
||||
def validate_permissions(self, path: Path, required_permissions: int) -> Tuple[bool, str]:
|
||||
"""Validate file or directory permissions."""
|
||||
try:
|
||||
# Get current permissions
|
||||
current_permissions = os.stat(path).st_mode & 0o777
|
||||
|
||||
# Check if current permissions include required permissions
|
||||
if current_permissions & required_permissions != required_permissions:
|
||||
return False, f"Missing required permissions: {required_permissions:o}"
|
||||
|
||||
return True, "Permissions are valid"
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to validate permissions: {e}")
|
||||
return False, str(e)
|
||||
Reference in New Issue
Block a user