This commit is contained in:
2026-01-23 00:24:00 -08:00
parent a329299b3c
commit 072edb4399
3 changed files with 166 additions and 100 deletions

View File

@@ -22,7 +22,7 @@
"((1fichier\\.com|megadl\\.fr|alterupload\\.com|cjoint\\.net|desfichiers\\.com|dfichiers\\.com|mesfichiers\\.org|piecejointe\\.net|pjointe\\.com|tenvoi\\.com|dl4free\\.com)/\\?[a-zA-Z0-9]{5,30}(&pw=[^&]+)?)" "((1fichier\\.com|megadl\\.fr|alterupload\\.com|cjoint\\.net|desfichiers\\.com|dfichiers\\.com|mesfichiers\\.org|piecejointe\\.net|pjointe\\.com|tenvoi\\.com|dl4free\\.com)/\\?[a-zA-Z0-9]{5,30}(&pw=[^&]+)?)"
], ],
"regexp": "((1fichier\\.com|megadl\\.fr|alterupload\\.com|cjoint\\.net|desfichiers\\.com|dfichiers\\.com|mesfichiers\\.org|piecejointe\\.net|pjointe\\.com|tenvoi\\.com|dl4free\\.com)/\\?[a-zA-Z0-9]{5,30}(&pw=[^&]+)?)", "regexp": "((1fichier\\.com|megadl\\.fr|alterupload\\.com|cjoint\\.net|desfichiers\\.com|dfichiers\\.com|mesfichiers\\.org|piecejointe\\.net|pjointe\\.com|tenvoi\\.com|dl4free\\.com)/\\?[a-zA-Z0-9]{5,30}(&pw=[^&]+)?)",
"status": true "status": false
}, },
"rapidgator": { "rapidgator": {
"name": "rapidgator", "name": "rapidgator",
@@ -92,7 +92,7 @@
"(hitfile\\.net/[a-z0-9A-Z]{4,9})" "(hitfile\\.net/[a-z0-9A-Z]{4,9})"
], ],
"regexp": "(hitf\\.(to|cc)/([a-z0-9A-Z]{4,9}))|(htfl\\.(net|to|cc)/([a-z0-9A-Z]{4,9}))|(hitfile\\.(net)/download/free/([a-z0-9A-Z]{4,9}))|((hitfile\\.net/[a-z0-9A-Z]{4,9}))", "regexp": "(hitf\\.(to|cc)/([a-z0-9A-Z]{4,9}))|(htfl\\.(net|to|cc)/([a-z0-9A-Z]{4,9}))|(hitfile\\.(net)/download/free/([a-z0-9A-Z]{4,9}))|((hitfile\\.net/[a-z0-9A-Z]{4,9}))",
"status": true "status": false
}, },
"mega": { "mega": {
"name": "mega", "name": "mega",
@@ -507,7 +507,7 @@
"mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})" "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})"
], ],
"regexp": "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})", "regexp": "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})",
"status": true "status": false
}, },
"mexashare": { "mexashare": {
"name": "mexashare", "name": "mexashare",

View File

@@ -25,6 +25,7 @@ from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence, Tuple from typing import Any, Dict, List, Optional, Sequence, Tuple
from SYS import pipeline as ctx from SYS import pipeline as ctx
from SYS.pipeline_progress import PipelineProgress
from . import _shared as sh from . import _shared as sh
normalize_hash = sh.normalize_hash normalize_hash = sh.normalize_hash
@@ -573,6 +574,16 @@ def _emit_tag_payload(
return 0 return 0
def _finalize_pipeline_progress() -> None:
"""Ensure the pipeline UI shows the stage as complete."""
try:
progress = PipelineProgress(ctx)
progress.clear_status()
progress.set_percent(100)
except Exception:
pass
def _extract_scrapable_identifiers(tags_list: List[str]) -> Dict[str, str]: def _extract_scrapable_identifiers(tags_list: List[str]) -> Dict[str, str]:
"""Extract scrapable identifiers from tags.""" """Extract scrapable identifiers from tags."""
identifiers = {} identifiers = {}
@@ -979,15 +990,23 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"""Get tags from Hydrus, local sidecar, or URL metadata. """Get tags from Hydrus, local sidecar, or URL metadata.
Usage: Usage:
get-tag [-query "hash:<sha256>"] [--store <key>] [--emit] get-tag [-query "hash:<sha256>"] [--store <key>] [--emit]
get-tag -scrape <url|provider> get-tag -scrape <url|provider>
Options: Options:
-query "hash:<sha256>": Override hash to use instead of result's hash -query "hash:<sha256>": Override hash to use instead of result's hash
--store <key>: Store result to this key for pipeline --store <key>: Store result to this key for pipeline
--emit: Emit result without interactive prompt (quiet mode) --emit: Emit result without interactive prompt (quiet mode)
-scrape <url|provider>: Scrape metadata from URL or provider name (itunes, openlibrary, googlebooks, imdb) -scrape <url|provider>: Scrape metadata from URL or provider name (itunes, openlibrary, googlebooks, imdb)
""" """
try:
return _run_impl(result, args, config)
finally:
_finalize_pipeline_progress()
def _run_impl(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"""Internal implementation details for get-tag."""
emit_mode = False emit_mode = False
is_store_backed = False is_store_backed = False
args_list = [str(arg) for arg in (args or [])] args_list = [str(arg) for arg in (args or [])]

View File

@@ -1149,110 +1149,157 @@ def _download_with_sections_via_cli(
if not sections_list: if not sections_list:
return "", {} return "", {}
pipeline = PipelineProgress(pipeline_context)
class _SectionProgressSimulator:
def __init__(self, start_pct: int, max_pct: int, interval: float = 0.5) -> None:
self._start_pct = max(0, min(int(start_pct), 99))
self._max_pct = max(self._start_pct, min(int(max_pct), 98))
self._interval = max(0.1, float(interval))
self._stop_event = threading.Event()
self._thread: Optional[threading.Thread] = None
def _run(self) -> None:
current = self._start_pct
while not self._stop_event.wait(self._interval):
if current < self._max_pct:
current += 1
try:
_set_pipe_percent(current)
except Exception:
pass
def start(self) -> None:
if self._thread is not None or self._start_pct >= self._max_pct:
return
self._thread = threading.Thread(target=self._run, daemon=True)
self._thread.start()
def stop(self) -> None:
self._stop_event.set()
if self._thread is not None:
self._thread.join(timeout=0.5)
self._thread = None
try:
_set_pipe_percent(self._max_pct)
except Exception:
pass
session_id = hashlib.md5((url + str(time.time()) + "".join(random.choices(string.ascii_letters, k=10))).encode()).hexdigest()[:12] session_id = hashlib.md5((url + str(time.time()) + "".join(random.choices(string.ascii_letters, k=10))).encode()).hexdigest()[:12]
first_section_info = None first_section_info = None
total_sections = len(sections_list) total_sections = len(sections_list)
for section_idx, section in enumerate(sections_list, 1): try:
try: for section_idx, section in enumerate(sections_list, 1):
display_pct = 50
if total_sections > 0: if total_sections > 0:
pct = 50 + int(((section_idx - 1) / max(1, total_sections)) * 49) display_pct = 50 + int(((section_idx - 1) / max(1, total_sections)) * 49)
_set_pipe_percent(pct)
except Exception:
pass
base_outtmpl = ytdl_options.get("outtmpl", "%(title)s.%(ext)s")
output_dir_path = Path(base_outtmpl).parent
filename_tmpl = f"{session_id}_{section_idx}"
if base_outtmpl.endswith(".%(ext)s"):
filename_tmpl += ".%(ext)s"
section_outtmpl = str(output_dir_path / filename_tmpl)
if section_idx == 1:
metadata_cmd = ["yt-dlp", "--dump-json", "--skip-download"]
if ytdl_options.get("cookiefile"):
cookies_path = ytdl_options["cookiefile"].replace("\\", "/")
metadata_cmd.extend(["--cookies", cookies_path])
if ytdl_options.get("noplaylist"):
metadata_cmd.append("--no-playlist")
metadata_cmd.append(url)
try: try:
meta_result = subprocess.run(metadata_cmd, capture_output=True, text=True) _set_pipe_percent(display_pct)
if meta_result.returncode == 0 and meta_result.stdout:
try:
info_dict = json.loads(meta_result.stdout.strip())
first_section_info = info_dict
if not quiet:
debug(f"Extracted title from metadata: {info_dict.get('title')}")
except json.JSONDecodeError:
if not quiet:
debug("Could not parse JSON metadata")
except Exception as exc:
if not quiet:
debug(f"Error extracting metadata: {exc}")
cmd = ["yt-dlp"]
if quiet:
cmd.append("--quiet")
cmd.append("--no-warnings")
cmd.append("--no-progress")
cmd.extend(["--postprocessor-args", "ffmpeg:-hide_banner -loglevel error"])
if ytdl_options.get("ffmpeg_location"):
try:
cmd.extend(["--ffmpeg-location", str(ytdl_options["ffmpeg_location"])])
except Exception: except Exception:
pass pass
if ytdl_options.get("format"):
cmd.extend(["-f", ytdl_options["format"]])
if ytdl_options.get("merge_output_format"):
cmd.extend(["--merge-output-format", str(ytdl_options["merge_output_format"])])
postprocessors = ytdl_options.get("postprocessors") pipeline.set_status(f"Downloading & clipping clip section {section_idx}/{total_sections}")
want_add_metadata = bool(ytdl_options.get("addmetadata"))
want_embed_chapters = bool(ytdl_options.get("embedchapters"))
if isinstance(postprocessors, list):
for pp in postprocessors:
if not isinstance(pp, dict):
continue
if str(pp.get("key") or "") == "FFmpegMetadata":
want_add_metadata = True
if bool(pp.get("add_chapters", True)):
want_embed_chapters = True
if want_add_metadata: base_outtmpl = ytdl_options.get("outtmpl", "%(title)s.%(ext)s")
cmd.append("--add-metadata") output_dir_path = Path(base_outtmpl).parent
if want_embed_chapters: filename_tmpl = f"{session_id}_{section_idx}"
cmd.append("--embed-chapters") if base_outtmpl.endswith(".%(ext)s"):
if ytdl_options.get("writesubtitles"): filename_tmpl += ".%(ext)s"
cmd.append("--write-sub") section_outtmpl = str(output_dir_path / filename_tmpl)
cmd.append("--write-auto-sub")
cmd.extend(["--sub-format", "vtt"])
if ytdl_options.get("force_keyframes_at_cuts"):
cmd.append("--force-keyframes-at-cuts")
cmd.extend(["-o", section_outtmpl])
if ytdl_options.get("cookiefile"):
cookies_path = ytdl_options["cookiefile"].replace("\\", "/")
cmd.extend(["--cookies", cookies_path])
if ytdl_options.get("noplaylist"):
cmd.append("--no-playlist")
cmd.extend(["--download-sections", section]) if section_idx == 1:
metadata_cmd = ["yt-dlp", "--dump-json", "--skip-download"]
if ytdl_options.get("cookiefile"):
cookies_path = ytdl_options["cookiefile"].replace("\\", "/")
metadata_cmd.extend(["--cookies", cookies_path])
if ytdl_options.get("noplaylist"):
metadata_cmd.append("--no-playlist")
metadata_cmd.append(url)
try:
meta_result = subprocess.run(metadata_cmd, capture_output=True, text=True)
if meta_result.returncode == 0 and meta_result.stdout:
try:
info_dict = json.loads(meta_result.stdout.strip())
first_section_info = info_dict
if not quiet:
debug(f"Extracted title from metadata: {info_dict.get('title')}")
except json.JSONDecodeError:
if not quiet:
debug("Could not parse JSON metadata")
except Exception as exc:
if not quiet:
debug(f"Error extracting metadata: {exc}")
cmd.append(url) cmd = ["yt-dlp"]
if not quiet:
debug(f"Running yt-dlp for section: {section}")
try:
if quiet: if quiet:
subprocess.run(cmd, check=True, capture_output=True, text=True) cmd.append("--quiet")
else: cmd.append("--no-warnings")
subprocess.run(cmd, check=True) cmd.append("--no-progress")
except subprocess.CalledProcessError as exc: cmd.extend(["--postprocessor-args", "ffmpeg:-hide_banner -loglevel error"])
stderr_text = exc.stderr or "" if ytdl_options.get("ffmpeg_location"):
tail = "\n".join(stderr_text.splitlines()[-12:]).strip() try:
details = f"\n{tail}" if tail else "" cmd.extend(["--ffmpeg-location", str(ytdl_options["ffmpeg_location"])])
raise DownloadError(f"yt-dlp failed for section {section} (exit {exc.returncode}){details}") from exc except Exception:
except Exception as exc: pass
raise DownloadError(f"yt-dlp failed for section {section}: {exc}") from exc if ytdl_options.get("format"):
cmd.extend(["-f", ytdl_options["format"]])
if ytdl_options.get("merge_output_format"):
cmd.extend(["--merge-output-format", str(ytdl_options["merge_output_format"])])
postprocessors = ytdl_options.get("postprocessors")
want_add_metadata = bool(ytdl_options.get("addmetadata"))
want_embed_chapters = bool(ytdl_options.get("embedchapters"))
if isinstance(postprocessors, list):
for pp in postprocessors:
if not isinstance(pp, dict):
continue
if str(pp.get("key") or "") == "FFmpegMetadata":
want_add_metadata = True
if bool(pp.get("add_chapters", True)):
want_embed_chapters = True
if want_add_metadata:
cmd.append("--add-metadata")
if want_embed_chapters:
cmd.append("--embed-chapters")
if ytdl_options.get("writesubtitles"):
cmd.append("--write-sub")
cmd.append("--write-auto-sub")
cmd.extend(["--sub-format", "vtt"])
if ytdl_options.get("force_keyframes_at_cuts"):
cmd.append("--force-keyframes-at-cuts")
cmd.extend(["-o", section_outtmpl])
if ytdl_options.get("cookiefile"):
cookies_path = ytdl_options["cookiefile"].replace("\\", "/")
cmd.extend(["--cookies", cookies_path])
if ytdl_options.get("noplaylist"):
cmd.append("--no-playlist")
cmd.extend(["--download-sections", section])
cmd.append(url)
if not quiet:
debug(f"Running yt-dlp for section: {section}")
progress_end_pct = min(display_pct + 45, 98)
simulator = _SectionProgressSimulator(display_pct, progress_end_pct)
simulator.start()
try:
if quiet:
subprocess.run(cmd, check=True, capture_output=True, text=True)
else:
subprocess.run(cmd, check=True)
except subprocess.CalledProcessError as exc:
stderr_text = exc.stderr or ""
tail = "\n".join(stderr_text.splitlines()[-12:]).strip()
details = f"\n{tail}" if tail else ""
raise DownloadError(f"yt-dlp failed for section {section} (exit {exc.returncode}){details}") from exc
except Exception as exc:
raise DownloadError(f"yt-dlp failed for section {section}: {exc}") from exc
finally:
simulator.stop()
finally:
pipeline.clear_status()
try: try:
_set_pipe_percent(99) _set_pipe_percent(99)