d
This commit is contained in:
@@ -21,7 +21,7 @@ from pathlib import Path, PurePosixPath
|
|||||||
from threading import RLock
|
from threading import RLock
|
||||||
from typing import Optional, Dict, Any, List, Tuple, Set
|
from typing import Optional, Dict, Any, List, Tuple, Set
|
||||||
|
|
||||||
from SYS.utils import sha256_file
|
from SYS.utils import sha256_file, expand_path
|
||||||
from SYS.logger import debug as mm_debug
|
from SYS.logger import debug as mm_debug
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -208,7 +208,7 @@ class API_folder_store:
|
|||||||
Args:
|
Args:
|
||||||
library_root: Path to the local library root directory
|
library_root: Path to the local library root directory
|
||||||
"""
|
"""
|
||||||
self.library_root = Path(library_root)
|
self.library_root = expand_path(library_root)
|
||||||
self.db_path = self.library_root / self.DB_NAME
|
self.db_path = self.library_root / self.DB_NAME
|
||||||
self.connection: Optional[sqlite3.Connection] = None
|
self.connection: Optional[sqlite3.Connection] = None
|
||||||
# sqlite3 connections are not safe for concurrent use across threads.
|
# sqlite3 connections are not safe for concurrent use across threads.
|
||||||
@@ -218,8 +218,13 @@ class API_folder_store:
|
|||||||
self._init_db()
|
self._init_db()
|
||||||
|
|
||||||
def _normalize_input_path(self, file_path: Path) -> Path:
|
def _normalize_input_path(self, file_path: Path) -> Path:
|
||||||
p = Path(file_path).expanduser()
|
p = expand_path(file_path)
|
||||||
if not p.is_absolute():
|
if not p.is_absolute():
|
||||||
|
# Check if it already seems to start with library_root but just wasn't absolute
|
||||||
|
# (e.g. library_root is "C:\foo" and p is "foo\bar" which might happen in some cases)
|
||||||
|
# though usually it's better to just join.
|
||||||
|
# But the recursive case happened because library_root was "$home/files" (not absolute)
|
||||||
|
# and p was "$home/files/..." (not absolute).
|
||||||
p = self.library_root / p
|
p = self.library_root / p
|
||||||
return p
|
return p
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import re
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
from SYS.logger import log
|
from SYS.logger import log
|
||||||
|
from SYS.utils import expand_path
|
||||||
|
|
||||||
DEFAULT_CONFIG_FILENAME = "config.conf"
|
DEFAULT_CONFIG_FILENAME = "config.conf"
|
||||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||||
@@ -13,6 +14,11 @@ SCRIPT_DIR = Path(__file__).resolve().parent
|
|||||||
_CONFIG_CACHE: Dict[str, Dict[str, Any]] = {}
|
_CONFIG_CACHE: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def clear_config_cache() -> None:
|
||||||
|
"""Clear the configuration cache."""
|
||||||
|
_CONFIG_CACHE.clear()
|
||||||
|
|
||||||
|
|
||||||
def _strip_inline_comment(line: str) -> str:
|
def _strip_inline_comment(line: str) -> str:
|
||||||
# Strip comments in a way that's friendly to common .conf usage:
|
# Strip comments in a way that's friendly to common .conf usage:
|
||||||
# - Full-line comments starting with '#' or ';'
|
# - Full-line comments starting with '#' or ';'
|
||||||
@@ -438,7 +444,7 @@ def resolve_output_dir(config: Dict[str, Any]) -> Path:
|
|||||||
temp_value = config.get("temp")
|
temp_value = config.get("temp")
|
||||||
if temp_value:
|
if temp_value:
|
||||||
try:
|
try:
|
||||||
path = Path(str(temp_value)).expanduser()
|
path = expand_path(temp_value)
|
||||||
# Verify we can access it (not a system directory with permission issues)
|
# Verify we can access it (not a system directory with permission issues)
|
||||||
if path.exists() or path.parent.exists():
|
if path.exists() or path.parent.exists():
|
||||||
return path
|
return path
|
||||||
@@ -449,7 +455,7 @@ def resolve_output_dir(config: Dict[str, Any]) -> Path:
|
|||||||
outfile_value = config.get("outfile")
|
outfile_value = config.get("outfile")
|
||||||
if outfile_value:
|
if outfile_value:
|
||||||
try:
|
try:
|
||||||
return Path(str(outfile_value)).expanduser()
|
return expand_path(outfile_value)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -480,7 +486,7 @@ def get_local_storage_path(config: Dict[str, Any]) -> Optional[Path]:
|
|||||||
if isinstance(default_config, dict):
|
if isinstance(default_config, dict):
|
||||||
path_str = default_config.get("path")
|
path_str = default_config.get("path")
|
||||||
if path_str:
|
if path_str:
|
||||||
return Path(str(path_str)).expanduser()
|
return expand_path(path_str)
|
||||||
|
|
||||||
# Fall back to storage.local.path format
|
# Fall back to storage.local.path format
|
||||||
storage = config.get("storage", {})
|
storage = config.get("storage", {})
|
||||||
@@ -489,14 +495,14 @@ def get_local_storage_path(config: Dict[str, Any]) -> Optional[Path]:
|
|||||||
if isinstance(local_config, dict):
|
if isinstance(local_config, dict):
|
||||||
path_str = local_config.get("path")
|
path_str = local_config.get("path")
|
||||||
if path_str:
|
if path_str:
|
||||||
return Path(str(path_str)).expanduser()
|
return expand_path(path_str)
|
||||||
|
|
||||||
# Fall back to old Local format
|
# Fall back to old Local format
|
||||||
local_config = config.get("Local", {})
|
local_config = config.get("Local", {})
|
||||||
if isinstance(local_config, dict):
|
if isinstance(local_config, dict):
|
||||||
path_str = local_config.get("path")
|
path_str = local_config.get("path")
|
||||||
if path_str:
|
if path_str:
|
||||||
return Path(str(path_str)).expanduser()
|
return expand_path(path_str)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -606,9 +612,9 @@ def resolve_cookies_path(
|
|||||||
for value in values:
|
for value in values:
|
||||||
if not value:
|
if not value:
|
||||||
continue
|
continue
|
||||||
candidate = Path(str(value)).expanduser()
|
candidate = expand_path(value)
|
||||||
if not candidate.is_absolute():
|
if not candidate.is_absolute():
|
||||||
candidate = (base_dir / candidate).expanduser()
|
candidate = expand_path(base_dir / candidate)
|
||||||
if candidate.is_file():
|
if candidate.is_file():
|
||||||
return candidate
|
return candidate
|
||||||
|
|
||||||
@@ -622,7 +628,7 @@ def resolve_debug_log(config: Dict[str, Any]) -> Optional[Path]:
|
|||||||
value = config.get("download_debug_log")
|
value = config.get("download_debug_log")
|
||||||
if not value:
|
if not value:
|
||||||
return None
|
return None
|
||||||
path = Path(str(value)).expanduser()
|
path = expand_path(value)
|
||||||
if not path.is_absolute():
|
if not path.is_absolute():
|
||||||
path = Path.cwd() / path
|
path = Path.cwd() / path
|
||||||
return path
|
return path
|
||||||
|
|||||||
11
SYS/utils.py
11
SYS/utils.py
@@ -11,11 +11,12 @@ try:
|
|||||||
import ffmpeg # type: ignore
|
import ffmpeg # type: ignore
|
||||||
except Exception:
|
except Exception:
|
||||||
ffmpeg = None # type: ignore
|
ffmpeg = None # type: ignore
|
||||||
|
import os
|
||||||
import base64
|
import base64
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Iterable
|
from typing import Any, Iterable, Optional
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
@@ -32,6 +33,14 @@ CHUNK_SIZE = 1024 * 1024 # 1 MiB
|
|||||||
_format_logger = logging.getLogger(__name__)
|
_format_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def expand_path(p: str | Path | None) -> Path:
|
||||||
|
"""Expand ~ and environment variables in path."""
|
||||||
|
if p is None:
|
||||||
|
return None # type: ignore
|
||||||
|
expanded = os.path.expandvars(str(p))
|
||||||
|
return Path(expanded).expanduser()
|
||||||
|
|
||||||
|
|
||||||
def ensure_directory(path: Path) -> None:
|
def ensure_directory(path: Path) -> None:
|
||||||
"""Ensure *path* exists as a directory."""
|
"""Ensure *path* exists as a directory."""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from pathlib import Path
|
|||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
from SYS.logger import debug, log
|
from SYS.logger import debug, log
|
||||||
from SYS.utils import sha256_file
|
from SYS.utils import sha256_file, expand_path
|
||||||
|
|
||||||
from Store._base import Store
|
from Store._base import Store
|
||||||
|
|
||||||
@@ -73,9 +73,8 @@ class Folder(Store):
|
|||||||
try:
|
try:
|
||||||
from API.folder import API_folder_store
|
from API.folder import API_folder_store
|
||||||
from API.folder import LocalLibraryInitializer
|
from API.folder import LocalLibraryInitializer
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
location_path = Path(self._location).expanduser()
|
location_path = expand_path(self._location)
|
||||||
|
|
||||||
# Use context manager to ensure connection is properly closed
|
# Use context manager to ensure connection is properly closed
|
||||||
with API_folder_store(location_path) as db:
|
with API_folder_store(location_path) as db:
|
||||||
@@ -124,9 +123,7 @@ class Folder(Store):
|
|||||||
if not location:
|
if not location:
|
||||||
return
|
return
|
||||||
|
|
||||||
from pathlib import Path
|
location_path = expand_path(location)
|
||||||
|
|
||||||
location_path = Path(location).expanduser()
|
|
||||||
location_str = str(location_path)
|
location_str = str(location_path)
|
||||||
|
|
||||||
# Only migrate once per location
|
# Only migrate once per location
|
||||||
@@ -673,7 +670,7 @@ class Folder(Store):
|
|||||||
|
|
||||||
match_all = query == "*" or (not query and bool(ext_filter))
|
match_all = query == "*" or (not query and bool(ext_filter))
|
||||||
results = []
|
results = []
|
||||||
search_dir = Path(self._location).expanduser()
|
search_dir = expand_path(self._location)
|
||||||
|
|
||||||
def _url_like_pattern(value: str) -> str:
|
def _url_like_pattern(value: str) -> str:
|
||||||
# Interpret user patterns as substring matches (with optional glob wildcards).
|
# Interpret user patterns as substring matches (with optional glob wildcards).
|
||||||
@@ -1335,10 +1332,10 @@ class Folder(Store):
|
|||||||
of the file path to find a directory with medios-macina.db."""
|
of the file path to find a directory with medios-macina.db."""
|
||||||
candidates: list[Path] = []
|
candidates: list[Path] = []
|
||||||
if self._location:
|
if self._location:
|
||||||
candidates.append(Path(self._location).expanduser())
|
candidates.append(expand_path(self._location))
|
||||||
cfg_root = get_local_storage_path(config) if config else None
|
cfg_root = get_local_storage_path(config) if config else None
|
||||||
if cfg_root:
|
if cfg_root:
|
||||||
candidates.append(Path(cfg_root).expanduser())
|
candidates.append(expand_path(cfg_root))
|
||||||
|
|
||||||
for root in candidates:
|
for root in candidates:
|
||||||
db_path = root / "medios-macina.db"
|
db_path = root / "medios-macina.db"
|
||||||
@@ -1369,7 +1366,7 @@ class Folder(Store):
|
|||||||
if not normalized_hash:
|
if not normalized_hash:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
search_dir = Path(self._location).expanduser()
|
search_dir = expand_path(self._location)
|
||||||
from API.folder import API_folder_store
|
from API.folder import API_folder_store
|
||||||
|
|
||||||
with API_folder_store(search_dir) as db:
|
with API_folder_store(search_dir) as db:
|
||||||
@@ -1400,7 +1397,7 @@ class Folder(Store):
|
|||||||
if not normalized_hash:
|
if not normalized_hash:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
search_dir = Path(self._location).expanduser()
|
search_dir = expand_path(self._location)
|
||||||
from API.folder import DatabaseAPI
|
from API.folder import DatabaseAPI
|
||||||
|
|
||||||
with DatabaseAPI(search_dir) as api:
|
with DatabaseAPI(search_dir) as api:
|
||||||
@@ -1460,7 +1457,7 @@ class Folder(Store):
|
|||||||
|
|
||||||
from API.folder import API_folder_store
|
from API.folder import API_folder_store
|
||||||
|
|
||||||
with API_folder_store(Path(self._location).expanduser()) as db:
|
with API_folder_store(expand_path(self._location)) as db:
|
||||||
db.set_relationship_by_hash(
|
db.set_relationship_by_hash(
|
||||||
alt_norm,
|
alt_norm,
|
||||||
king_norm,
|
king_norm,
|
||||||
@@ -2150,7 +2147,7 @@ class Folder(Store):
|
|||||||
if not raw:
|
if not raw:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
store_root = Path(self._location).expanduser()
|
store_root = expand_path(self._location)
|
||||||
|
|
||||||
# Support deletion by hash (common for store items where `path` is the hash).
|
# Support deletion by hash (common for store items where `path` is the hash).
|
||||||
file_hash = _normalize_hash(raw)
|
file_hash = _normalize_hash(raw)
|
||||||
@@ -2159,7 +2156,7 @@ class Folder(Store):
|
|||||||
if file_hash:
|
if file_hash:
|
||||||
resolved_path = db.search_hash(file_hash)
|
resolved_path = db.search_hash(file_hash)
|
||||||
else:
|
else:
|
||||||
p = Path(raw)
|
p = expand_path(raw)
|
||||||
resolved_path = p if p.is_absolute() else (store_root / p)
|
resolved_path = p if p.is_absolute() else (store_root / p)
|
||||||
|
|
||||||
if resolved_path is None:
|
if resolved_path is None:
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from pathlib import Path
|
|||||||
from typing import Any, Dict, Iterable, Optional, Type
|
from typing import Any, Dict, Iterable, Optional, Type
|
||||||
|
|
||||||
from SYS.logger import debug
|
from SYS.logger import debug
|
||||||
|
from SYS.utils import expand_path
|
||||||
|
|
||||||
from Store._base import Store as BaseStore
|
from Store._base import Store as BaseStore
|
||||||
|
|
||||||
@@ -169,8 +170,8 @@ class Store:
|
|||||||
if not path_value:
|
if not path_value:
|
||||||
return
|
return
|
||||||
|
|
||||||
temp_path = Path(str(temp_value)).expanduser().resolve()
|
temp_path = expand_path(temp_value).resolve()
|
||||||
backend_path = Path(str(path_value)).expanduser().resolve()
|
backend_path = expand_path(path_value).resolve()
|
||||||
if backend_path != temp_path:
|
if backend_path != temp_path:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -230,7 +231,7 @@ class Store:
|
|||||||
for key in list(kwargs.keys()):
|
for key in list(kwargs.keys()):
|
||||||
if _normalize_config_key(key) in {"PATH",
|
if _normalize_config_key(key) in {"PATH",
|
||||||
"LOCATION"}:
|
"LOCATION"}:
|
||||||
kwargs[key] = str(Path(str(kwargs[key])).expanduser())
|
kwargs[key] = str(expand_path(kwargs[key]))
|
||||||
|
|
||||||
backend = store_cls(**kwargs)
|
backend = store_cls(**kwargs)
|
||||||
|
|
||||||
@@ -411,7 +412,7 @@ def list_configured_backend_names(config: Optional[Dict[str, Any]]) -> list[str]
|
|||||||
try:
|
try:
|
||||||
temp_value = (config or {}).get("temp")
|
temp_value = (config or {}).get("temp")
|
||||||
if temp_value:
|
if temp_value:
|
||||||
temp_path = str(Path(str(temp_value)).expanduser().resolve())
|
temp_path = str(expand_path(temp_value).resolve())
|
||||||
for raw_store_type, instances in store_cfg.items():
|
for raw_store_type, instances in store_cfg.items():
|
||||||
if not isinstance(instances, dict):
|
if not isinstance(instances, dict):
|
||||||
continue
|
continue
|
||||||
@@ -423,7 +424,7 @@ def list_configured_backend_names(config: Optional[Dict[str, Any]]) -> list[str]
|
|||||||
path_value = instance_config.get("PATH") or instance_config.get("path")
|
path_value = instance_config.get("PATH") or instance_config.get("path")
|
||||||
if not path_value:
|
if not path_value:
|
||||||
continue
|
continue
|
||||||
if str(Path(str(path_value)).expanduser().resolve()) == temp_path:
|
if str(expand_path(path_value).resolve()) == temp_path:
|
||||||
if "temp" not in names:
|
if "temp" not in names:
|
||||||
names.append("temp")
|
names.append("temp")
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
3
TUI.py
3
TUI.py
@@ -613,9 +613,10 @@ class PipelineHubApp(App):
|
|||||||
def on_config_closed(self, result: Any = None) -> None:
|
def on_config_closed(self, result: Any = None) -> None:
|
||||||
"""Call when the config modal is dismissed to reload session data."""
|
"""Call when the config modal is dismissed to reload session data."""
|
||||||
try:
|
try:
|
||||||
from SYS.config import load_config
|
from SYS.config import load_config, clear_config_cache
|
||||||
from cmdlet._shared import SharedArgs
|
from cmdlet._shared import SharedArgs
|
||||||
# Force a fresh load from disk
|
# Force a fresh load from disk
|
||||||
|
clear_config_cache()
|
||||||
cfg = load_config()
|
cfg = load_config()
|
||||||
|
|
||||||
# Clear UI state to show a "fresh" start
|
# Clear UI state to show a "fresh" start
|
||||||
|
|||||||
Reference in New Issue
Block a user