This commit is contained in:
2026-01-23 00:24:00 -08:00
parent a329299b3c
commit 072edb4399
3 changed files with 166 additions and 100 deletions

View File

@@ -22,7 +22,7 @@
"((1fichier\\.com|megadl\\.fr|alterupload\\.com|cjoint\\.net|desfichiers\\.com|dfichiers\\.com|mesfichiers\\.org|piecejointe\\.net|pjointe\\.com|tenvoi\\.com|dl4free\\.com)/\\?[a-zA-Z0-9]{5,30}(&pw=[^&]+)?)"
],
"regexp": "((1fichier\\.com|megadl\\.fr|alterupload\\.com|cjoint\\.net|desfichiers\\.com|dfichiers\\.com|mesfichiers\\.org|piecejointe\\.net|pjointe\\.com|tenvoi\\.com|dl4free\\.com)/\\?[a-zA-Z0-9]{5,30}(&pw=[^&]+)?)",
"status": true
"status": false
},
"rapidgator": {
"name": "rapidgator",
@@ -92,7 +92,7 @@
"(hitfile\\.net/[a-z0-9A-Z]{4,9})"
],
"regexp": "(hitf\\.(to|cc)/([a-z0-9A-Z]{4,9}))|(htfl\\.(net|to|cc)/([a-z0-9A-Z]{4,9}))|(hitfile\\.(net)/download/free/([a-z0-9A-Z]{4,9}))|((hitfile\\.net/[a-z0-9A-Z]{4,9}))",
"status": true
"status": false
},
"mega": {
"name": "mega",
@@ -507,7 +507,7 @@
"mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})"
],
"regexp": "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})",
"status": true
"status": false
},
"mexashare": {
"name": "mexashare",

View File

@@ -25,6 +25,7 @@ from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence, Tuple
from SYS import pipeline as ctx
from SYS.pipeline_progress import PipelineProgress
from . import _shared as sh
normalize_hash = sh.normalize_hash
@@ -573,6 +574,16 @@ def _emit_tag_payload(
return 0
def _finalize_pipeline_progress() -> None:
"""Ensure the pipeline UI shows the stage as complete."""
try:
progress = PipelineProgress(ctx)
progress.clear_status()
progress.set_percent(100)
except Exception:
pass
def _extract_scrapable_identifiers(tags_list: List[str]) -> Dict[str, str]:
"""Extract scrapable identifiers from tags."""
identifiers = {}
@@ -988,6 +999,14 @@ def _run(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
--emit: Emit result without interactive prompt (quiet mode)
-scrape <url|provider>: Scrape metadata from URL or provider name (itunes, openlibrary, googlebooks, imdb)
"""
try:
return _run_impl(result, args, config)
finally:
_finalize_pipeline_progress()
def _run_impl(result: Any, args: Sequence[str], config: Dict[str, Any]) -> int:
"""Internal implementation details for get-tag."""
emit_mode = False
is_store_backed = False
args_list = [str(arg) for arg in (args or [])]

View File

@@ -1149,18 +1149,58 @@ def _download_with_sections_via_cli(
if not sections_list:
return "", {}
pipeline = PipelineProgress(pipeline_context)
class _SectionProgressSimulator:
def __init__(self, start_pct: int, max_pct: int, interval: float = 0.5) -> None:
self._start_pct = max(0, min(int(start_pct), 99))
self._max_pct = max(self._start_pct, min(int(max_pct), 98))
self._interval = max(0.1, float(interval))
self._stop_event = threading.Event()
self._thread: Optional[threading.Thread] = None
def _run(self) -> None:
current = self._start_pct
while not self._stop_event.wait(self._interval):
if current < self._max_pct:
current += 1
try:
_set_pipe_percent(current)
except Exception:
pass
def start(self) -> None:
if self._thread is not None or self._start_pct >= self._max_pct:
return
self._thread = threading.Thread(target=self._run, daemon=True)
self._thread.start()
def stop(self) -> None:
self._stop_event.set()
if self._thread is not None:
self._thread.join(timeout=0.5)
self._thread = None
try:
_set_pipe_percent(self._max_pct)
except Exception:
pass
session_id = hashlib.md5((url + str(time.time()) + "".join(random.choices(string.ascii_letters, k=10))).encode()).hexdigest()[:12]
first_section_info = None
total_sections = len(sections_list)
for section_idx, section in enumerate(sections_list, 1):
try:
for section_idx, section in enumerate(sections_list, 1):
display_pct = 50
if total_sections > 0:
pct = 50 + int(((section_idx - 1) / max(1, total_sections)) * 49)
_set_pipe_percent(pct)
display_pct = 50 + int(((section_idx - 1) / max(1, total_sections)) * 49)
try:
_set_pipe_percent(display_pct)
except Exception:
pass
pipeline.set_status(f"Downloading & clipping clip section {section_idx}/{total_sections}")
base_outtmpl = ytdl_options.get("outtmpl", "%(title)s.%(ext)s")
output_dir_path = Path(base_outtmpl).parent
filename_tmpl = f"{session_id}_{section_idx}"
@@ -1237,10 +1277,13 @@ def _download_with_sections_via_cli(
cmd.append("--no-playlist")
cmd.extend(["--download-sections", section])
cmd.append(url)
if not quiet:
debug(f"Running yt-dlp for section: {section}")
progress_end_pct = min(display_pct + 45, 98)
simulator = _SectionProgressSimulator(display_pct, progress_end_pct)
simulator.start()
try:
if quiet:
subprocess.run(cmd, check=True, capture_output=True, text=True)
@@ -1253,6 +1296,10 @@ def _download_with_sections_via_cli(
raise DownloadError(f"yt-dlp failed for section {section} (exit {exc.returncode}){details}") from exc
except Exception as exc:
raise DownloadError(f"yt-dlp failed for section {section}: {exc}") from exc
finally:
simulator.stop()
finally:
pipeline.clear_status()
try:
_set_pipe_percent(99)