This commit is contained in:
2026-03-26 23:00:25 -07:00
parent 562acd809c
commit 37bb4ca685
8 changed files with 368 additions and 123 deletions

View File

@@ -71,7 +71,7 @@
"(wayupload\\.com/[a-z0-9]{12}\\.html)" "(wayupload\\.com/[a-z0-9]{12}\\.html)"
], ],
"regexp": "(turbobit5?a?\\.(net|cc|com)/([a-z0-9]{12}))|(turbobif\\.(net|cc|com)/([a-z0-9]{12}))|(turb[o]?\\.(to|cc|pw)\\/([a-z0-9]{12}))|(turbobit\\.(net|cc)/download/free/([a-z0-9]{12}))|((trbbt|tourbobit|torbobit|tbit|turbobita|trbt)\\.(net|cc|com|to)/([a-z0-9]{12}))|((turbobit\\.cloud/turbo/[a-z0-9]+))|((wayupload\\.com/[a-z0-9]{12}\\.html))", "regexp": "(turbobit5?a?\\.(net|cc|com)/([a-z0-9]{12}))|(turbobif\\.(net|cc|com)/([a-z0-9]{12}))|(turb[o]?\\.(to|cc|pw)\\/([a-z0-9]{12}))|(turbobit\\.(net|cc)/download/free/([a-z0-9]{12}))|((trbbt|tourbobit|torbobit|tbit|turbobita|trbt)\\.(net|cc|com|to)/([a-z0-9]{12}))|((turbobit\\.cloud/turbo/[a-z0-9]+))|((wayupload\\.com/[a-z0-9]{12}\\.html))",
"status": true "status": false
}, },
"hitfile": { "hitfile": {
"name": "hitfile", "name": "hitfile",
@@ -494,7 +494,7 @@
"mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})" "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})"
], ],
"regexp": "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})", "regexp": "mediafire\\.com/(\\?|download/|file/|download\\.php\\?)([0-9a-z]{15})",
"status": true "status": false
}, },
"mixdrop": { "mixdrop": {
"name": "mixdrop", "name": "mixdrop",

View File

@@ -961,6 +961,14 @@ local function _get_selected_store_conf_path()
return utils.join_path(dir, 'medeia.conf') return utils.join_path(dir, 'medeia.conf')
end end
function M._get_selected_store_state_path()
local dir = _get_script_opts_dir()
if not dir then
return nil
end
return utils.join_path(dir, 'medeia-selected-store.json')
end
function M._get_store_cache_path() function M._get_store_cache_path()
local dir = _get_script_opts_dir() local dir = _get_script_opts_dir()
if not dir then if not dir then
@@ -1030,6 +1038,25 @@ function M._prime_store_cache_from_disk()
end end
local function _load_selected_store_from_disk() local function _load_selected_store_from_disk()
local state_path = M._get_selected_store_state_path()
if state_path then
local fh = io.open(state_path, 'r')
if fh then
local raw = fh:read('*a')
fh:close()
raw = trim(tostring(raw or ''))
if raw ~= '' then
local ok, payload = pcall(utils.parse_json, raw)
if ok and type(payload) == 'table' then
local value = _normalize_store_name(payload.store)
if value ~= '' then
return value
end
end
end
end
end
local path = _get_selected_store_conf_path() local path = _get_selected_store_conf_path()
if not path then if not path then
return nil return nil
@@ -1054,7 +1081,7 @@ local function _load_selected_store_from_disk()
end end
local function _save_selected_store_to_disk(store) local function _save_selected_store_to_disk(store)
local path = _get_selected_store_conf_path() local path = M._get_selected_store_state_path()
if not path then if not path then
return false return false
end end
@@ -1062,8 +1089,7 @@ local function _save_selected_store_to_disk(store)
if not fh then if not fh then
return false return false
end end
fh:write('# Medeia MPV script options\n') fh:write(utils.format_json({ store = _normalize_store_name(store) }))
fh:write('store=' .. tostring(store or '') .. '\n')
fh:close() fh:close()
return true return true
end end
@@ -1095,6 +1121,42 @@ local function _ensure_selected_store_loaded()
if disk ~= '' then if disk ~= '' then
pcall(mp.set_property, SELECTED_STORE_PROP, disk) pcall(mp.set_property, SELECTED_STORE_PROP, disk)
end end
pcall(function()
local legacy_path = _get_selected_store_conf_path()
if not legacy_path then
return
end
local fh = io.open(legacy_path, 'r')
if not fh then
return
end
local raw = fh:read('*a')
fh:close()
raw = tostring(raw or '')
if raw == '' or not raw:lower():find('store%s*=') then
return
end
local lines = {}
for line in raw:gmatch('[^\r\n]+') do
local s = trim(tostring(line or ''))
local k = s:match('^([%w_%-]+)%s*=')
if not (k and k:lower() == 'store') then
lines[#lines + 1] = line
end
end
local out = table.concat(lines, '\n')
if out ~= '' then
out = out .. '\n'
end
local writer = io.open(legacy_path, 'w')
if not writer then
return
end
writer:write(out)
writer:close()
end)
pcall(M._prime_store_cache_from_disk) pcall(M._prime_store_cache_from_disk)
end end
@@ -3662,7 +3724,7 @@ function M._build_web_ytdl_raw_options()
extra[#extra + 1] = 'write-auto-subs=' extra[#extra + 1] = 'write-auto-subs='
end end
if not lower:find('sub%-langs=', 1) then if not lower:find('sub%-langs=', 1) then
extra[#extra + 1] = 'sub-langs=[all,-live_chat]' extra[#extra + 1] = 'sub-langs=[en.*,en,-live_chat]'
end end
if #extra == 0 then if #extra == 0 then
@@ -3697,35 +3759,115 @@ function M._find_subtitle_track_candidate()
return nil, nil, false return nil, nil, false
end end
local first_id = nil local function subtitle_track_blob(track)
local default_id = nil local parts = {}
local selected_id = nil local fields = { 'lang', 'title', 'name', 'external-filename' }
for _, key in ipairs(fields) do
local value = ''
if type(track) == 'table' then
value = trim(tostring(track[key] or '')):lower()
end
if value ~= '' then
parts[#parts + 1] = value
end
end
return table.concat(parts, ' ')
end
local function subtitle_track_is_english(track, blob)
local lang = ''
if type(track) == 'table' then
lang = trim(tostring(track.lang or '')):lower()
end
if lang == 'en' or lang == 'eng' or lang:match('^en[-_]') or lang:match('^eng[-_]') then
return true
end
local text = blob or subtitle_track_blob(track)
if text:match('%f[%a]english%f[%A]') then
return true
end
return false
end
local function subtitle_track_is_autogenerated(track, blob)
local text = blob or subtitle_track_blob(track)
local markers = {
'auto-generated',
'auto generated',
'autogenerated',
'automatic captions',
'automatic subtitles',
'generated automatically',
'asr',
}
for _, marker in ipairs(markers) do
if text:find(marker, 1, true) then
return true
end
end
return false
end
local best_id = nil
local best_source = nil
local best_selected = false
local best_score = nil
for _, track in ipairs(tracks) do for _, track in ipairs(tracks) do
if type(track) == 'table' and tostring(track.type or '') == 'sub' and not track.albumart then if type(track) == 'table' and tostring(track.type or '') == 'sub' and not track.albumart then
local id = tonumber(track.id) local id = tonumber(track.id)
if id then if id then
if first_id == nil then local blob = subtitle_track_blob(track)
first_id = id local selected = track.selected and true or false
local source = 'fallback'
local score = 100
if blob:find('medeia-sub', 1, true) then
source = 'medeia-note'
score = 1000
else
local english = subtitle_track_is_english(track, blob)
local autogenerated = subtitle_track_is_autogenerated(track, blob)
if english and not autogenerated then
source = 'english-manual'
score = 800
elseif english and autogenerated then
source = 'english-auto'
score = 700
elseif selected then
source = 'selected'
score = 300
elseif track.default then
source = 'default'
score = 200
else
source = 'first'
score = 100
end
end end
if track.selected then
selected_id = id if selected then
score = score + 50
end end
if default_id == nil and track.default then if track.default then
default_id = id score = score + 25
end
if type(track.external) == 'boolean' and track.external then
score = score + 10
end
if best_score == nil or score > best_score then
best_score = score
best_id = id
best_source = source
best_selected = selected
end end
end end
end end
end end
if selected_id ~= nil then if best_id ~= nil then
return selected_id, 'selected', true return best_id, best_source, best_selected
end
if default_id ~= nil then
return default_id, 'default', false
end
if first_id ~= nil then
return first_id, 'first', false
end end
return nil, nil, false return nil, nil, false
end end
@@ -4302,6 +4444,12 @@ function FileState:fetch_formats(cb)
return return
end end
if not _is_ytdlp_url(url) then
_lua_log('fetch-formats: skipped (yt-dlp unsupported)')
if cb then cb(false, 'yt-dlp unsupported url') end
return
end
local cached = _get_cached_formats_table(url) local cached = _get_cached_formats_table(url)
if type(cached) == 'table' then if type(cached) == 'table' then
_lua_log('fetch-formats: using cached table') _lua_log('fetch-formats: using cached table')
@@ -4373,6 +4521,11 @@ local function _prefetch_formats_for_url(url, attempt)
if url == '' or not _is_http_url(url) then if url == '' or not _is_http_url(url) then
return return
end end
if not _is_ytdlp_url(url) then
_formats_prefetch_retries[url] = nil
_lua_log('prefetch-formats: skipped (yt-dlp unsupported) url=' .. url)
return
end
attempt = tonumber(attempt or 1) or 1 attempt = tonumber(attempt or 1) or 1
local cached = _get_cached_formats_table(url) local cached = _get_cached_formats_table(url)
@@ -5795,6 +5948,11 @@ mp.register_script_message('medios-load-url-event', function(json)
_log_all('INFO', 'Load URL started: ' .. url) _log_all('INFO', 'Load URL started: ' .. url)
_lua_log('[LOAD-URL] Starting to load: ' .. url) _lua_log('[LOAD-URL] Starting to load: ' .. url)
_set_current_web_url(url) _set_current_web_url(url)
_pending_format_change = nil
pcall(mp.set_property, 'options/ytdl-format', '')
pcall(mp.set_property, 'file-local-options/ytdl-format', '')
pcall(mp.set_property, 'ytdl-format', '')
_lua_log('load-url: cleared stale ytdl format reason=load-url')
local function close_menu() local function close_menu()
_lua_log('[LOAD-URL] Closing menu and resetting input state') _lua_log('[LOAD-URL] Closing menu and resetting input state')

View File

@@ -71,6 +71,7 @@ _NOTES_CACHE_VERSION = 1
_DEFAULT_NOTES_CACHE_TTL_S = 900.0 _DEFAULT_NOTES_CACHE_TTL_S = 900.0
_DEFAULT_NOTES_CACHE_WAIT_S = 1.5 _DEFAULT_NOTES_CACHE_WAIT_S = 1.5
_DEFAULT_NOTES_PENDING_WAIT_S = 12.0 _DEFAULT_NOTES_PENDING_WAIT_S = 12.0
_SUBTITLE_NOTE_ALIASES = ("subtitle", "subtitles", "transcript", "transcription")
def _single_instance_lock_path(ipc_path: str) -> Path: def _single_instance_lock_path(ipc_path: str) -> Path:
@@ -794,16 +795,42 @@ def _extract_lrc_from_notes(notes: Dict[str, str]) -> Optional[str]:
return _extract_note_text(notes, "lyric") return _extract_note_text(notes, "lyric")
def _looks_like_subtitle_text(text: str) -> bool:
t = (text or "").lstrip("\ufeff\r\n").lstrip()
if not t:
return False
upper = t.upper()
if upper.startswith("WEBVTT"):
return True
if upper.startswith("[SCRIPT INFO]"):
return True
if "-->" in t:
return True
if re.search(r"(?m)^Dialogue:\s*", t):
return True
return False
def _extract_sub_from_notes(notes: Dict[str, str]) -> Optional[str]: def _extract_sub_from_notes(notes: Dict[str, str]) -> Optional[str]:
"""Return raw subtitle text from the note named 'sub'.""" """Return raw subtitle text from note-backed subtitle/transcript keys."""
return _extract_note_text(notes, "sub") primary = _extract_note_text(notes, "sub")
if primary:
return primary
for note_name in _SUBTITLE_NOTE_ALIASES:
candidate = _extract_note_text(notes, note_name)
if candidate and _looks_like_subtitle_text(candidate):
return candidate
return None
def _infer_sub_extension(text: str) -> str: def _infer_sub_extension(text: str) -> str:
# Best-effort: mpv generally understands SRT/VTT; choose based on content. # Best-effort: mpv generally understands SRT/VTT; choose based on content.
t = (text or "").lstrip("\ufeff\r\n").lstrip() t = (text or "").lstrip("\ufeff\r\n").lstrip()
if t.upper().startswith("WEBVTT"): upper = t.upper()
if upper.startswith("WEBVTT"):
return ".vtt" return ".vtt"
if upper.startswith("[SCRIPT INFO]") or re.search(r"(?m)^Dialogue:\s*", t):
return ".ass"
if "-->" in t: if "-->" in t:
# SRT typically uses commas for milliseconds, VTT uses dots. # SRT typically uses commas for milliseconds, VTT uses dots.
if re.search(r"\d\d:\d\d:\d\d,\d\d\d\s*-->\s*\d\d:\d\d:\d\d,\d\d\d", t): if re.search(r"\d\d:\d\d:\d\d,\d\d\d\s*-->\s*\d\d:\d\d:\d\d,\d\d\d", t):

View File

@@ -769,76 +769,6 @@ class MPV:
**kwargs, **kwargs,
) )
# Start the persistent pipeline helper eagerly so MPV Lua can issue
# non-blocking requests (e.g., format list prefetch) without needing
# to spawn the helper on-demand from inside mpv.
try:
helper_path = (repo_root / "MPV" / "pipeline_helper.py").resolve()
if helper_path.exists():
py = sys.executable or "python"
if platform.system() == "Windows":
py = _windows_pythonw_exe(py) or py
helper_cmd = [
py,
str(helper_path),
"--ipc",
str(self.ipc_path),
"--timeout",
"30",
]
helper_env = os.environ.copy()
try:
existing_pp = helper_env.get("PYTHONPATH")
helper_env["PYTHONPATH"] = (
str(repo_root) if not existing_pp else
(str(repo_root) + os.pathsep + str(existing_pp))
)
except Exception:
pass
helper_kwargs: Dict[str,
Any] = {}
if platform.system() == "Windows":
flags = 0
try:
flags |= int(
getattr(subprocess,
"DETACHED_PROCESS",
0x00000008)
)
except Exception:
flags |= 0x00000008
try:
flags |= int(
getattr(subprocess,
"CREATE_NO_WINDOW",
0x08000000)
)
except Exception:
flags |= 0x08000000
helper_kwargs["creationflags"] = flags
helper_kwargs.update(
{
k: v
for k, v in _windows_hidden_subprocess_kwargs().items()
if k != "creationflags"
}
)
helper_kwargs["cwd"] = str(repo_root)
helper_kwargs["env"] = helper_env
subprocess.Popen(
helper_cmd,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
**helper_kwargs,
)
except Exception:
pass
def get_ipc_pipe_path() -> str: def get_ipc_pipe_path() -> str:
"""Get the fixed IPC pipe/socket path for persistent MPV connection. """Get the fixed IPC pipe/socket path for persistent MPV connection.

View File

@@ -1172,6 +1172,11 @@ def _get_ipc_lock_path(ipc_path: str) -> Path:
return lock_dir / f"medeia-mpv-helper-{safe}.lock" return lock_dir / f"medeia-mpv-helper-{safe}.lock"
def _get_ipc_lock_meta_path(ipc_path: str) -> Path:
lock_path = _get_ipc_lock_path(ipc_path)
return lock_path.with_suffix(lock_path.suffix + ".json")
def _read_lock_file_pid(ipc_path: str) -> Optional[int]: def _read_lock_file_pid(ipc_path: str) -> Optional[int]:
"""Return the PID recorded in the lock file by the current holder, or None. """Return the PID recorded in the lock file by the current holder, or None.
@@ -1181,7 +1186,7 @@ def _read_lock_file_pid(ipc_path: str) -> Optional[int]:
avoiding the race where concurrent sibling helpers all kill each other. avoiding the race where concurrent sibling helpers all kill each other.
""" """
try: try:
lock_path = _get_ipc_lock_path(ipc_path) lock_path = _get_ipc_lock_meta_path(ipc_path)
with open(str(lock_path), "r", encoding="utf-8", errors="replace") as fh: with open(str(lock_path), "r", encoding="utf-8", errors="replace") as fh:
content = fh.read().strip() content = fh.read().strip()
if not content: if not content:
@@ -1193,6 +1198,57 @@ def _read_lock_file_pid(ipc_path: str) -> Optional[int]:
return None return None
def _write_lock_file_metadata(ipc_path: str) -> None:
meta_path = _get_ipc_lock_meta_path(ipc_path)
meta_path.write_text(
json.dumps(
{
"pid": os.getpid(),
"version": MEDEIA_MPV_HELPER_VERSION,
"ipc": str(ipc_path),
"started_at": int(time.time()),
},
ensure_ascii=False,
),
encoding="utf-8",
errors="replace",
)
def _release_ipc_lock(fh: Any, ipc_path: Optional[str] = None) -> None:
if fh is None:
if ipc_path:
try:
_get_ipc_lock_meta_path(ipc_path).unlink(missing_ok=True)
except Exception:
pass
return
try:
if os.name == "nt":
import msvcrt # type: ignore
try:
fh.seek(0)
except Exception:
pass
msvcrt.locking(fh.fileno(), msvcrt.LK_UNLCK, 1)
else:
import fcntl # type: ignore
fcntl.flock(fh.fileno(), fcntl.LOCK_UN)
except Exception:
pass
try:
fh.close()
except Exception:
pass
if ipc_path:
try:
_get_ipc_lock_meta_path(ipc_path).unlink(missing_ok=True)
except Exception:
pass
def _acquire_ipc_lock(ipc_path: str) -> Optional[Any]: def _acquire_ipc_lock(ipc_path: str) -> Optional[Any]:
"""Best-effort singleton lock per IPC path. """Best-effort singleton lock per IPC path.
@@ -1238,6 +1294,11 @@ def _acquire_ipc_lock(ipc_path: str) -> Optional[Any]:
pass pass
return None return None
try:
_write_lock_file_metadata(ipc_path)
except Exception:
pass
return fh return fh
except Exception: except Exception:
return None return None
@@ -1433,20 +1494,7 @@ def main(argv: Optional[list[str]] = None) -> int:
f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}" f"[helper] version={MEDEIA_MPV_HELPER_VERSION} started ipc={args.ipc}"
) )
try: try:
_lock_fh.seek(0) _write_lock_file_metadata(str(args.ipc))
_lock_fh.truncate()
_lock_fh.write(
json.dumps(
{
"pid": os.getpid(),
"version": MEDEIA_MPV_HELPER_VERSION,
"ipc": str(args.ipc),
"started_at": int(time.time()),
},
ensure_ascii=False,
)
)
_lock_fh.flush()
except Exception: except Exception:
pass pass
try: try:
@@ -2103,6 +2151,10 @@ def main(argv: Optional[list[str]] = None) -> int:
client.disconnect() client.disconnect()
except Exception: except Exception:
pass pass
try:
_release_ipc_lock(_lock_fh, str(args.ipc))
except Exception:
pass
return 0 return 0

View File

@@ -5,9 +5,15 @@ ytdl=yes
# uosc will draw its own window controls and border if you disable window border # uosc will draw its own window controls and border if you disable window border
border=no border=no
cache=yes cache=yes
cache-secs=30 # Give HTTP store streams more room to absorb Hydrus/network jitter before
demuxer-max-bytes=200MiB # mpv restarts audio after an underrun.
demuxer-max-back-bytes=100MiB cache-secs=90
cache-pause=yes
cache-pause-wait=12
demuxer-readahead-secs=90
demuxer-max-bytes=512MiB
demuxer-max-back-bytes=256MiB
audio-buffer=1.0
# Ensure uosc texture/icon fonts are discoverable by libass. # Ensure uosc texture/icon fonts are discoverable by libass.
osd-fonts-dir=~~/scripts/uosc/fonts osd-fonts-dir=~~/scripts/uosc/fonts

View File

@@ -1,2 +1 @@
# Medeia MPV script options # Medeia MPV script options
store=local

View File

@@ -47,6 +47,60 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _normalize_detail_tags(tags: Any) -> List[str]:
if not tags:
return []
if isinstance(tags, str):
source = [part.strip() for part in tags.split(",")]
elif isinstance(tags, (list, tuple, set)):
source = [str(part or "").strip() for part in tags]
else:
source = [str(tags).strip()]
seen: set[str] = set()
normalized: List[str] = []
for tag in source:
text = str(tag or "").strip()
if not text:
continue
key = text.casefold()
if key in seen:
continue
seen.add(key)
normalized.append(text)
return normalized
def _partition_detail_tags(tags: Any) -> tuple[List[str], List[str]]:
normalized = _normalize_detail_tags(tags)
namespace_tags: List[str] = []
freeform_tags: List[str] = []
for tag in normalized:
namespace, sep, value = str(tag).partition(":")
if sep and namespace.strip() and value.strip():
namespace_tags.append(tag)
else:
freeform_tags.append(tag)
namespace_tags.sort(
key=lambda value: (
str(value).partition(":")[0].casefold(),
str(value).partition(":")[2].casefold(),
str(value).casefold(),
)
)
freeform_tags.sort(key=lambda value: str(value).casefold())
return namespace_tags, freeform_tags
def _chunk_detail_tags(tags: List[str], columns: int) -> List[List[str]]:
column_count = max(1, int(columns or 1))
rows: List[List[str]] = []
for index in range(0, len(tags), column_count):
rows.append(tags[index:index + column_count])
return rows
_RESULT_TABLE_ROW_STYLE_LOOP: List[tuple[str, str]] = [ _RESULT_TABLE_ROW_STYLE_LOOP: List[tuple[str, str]] = [
("#ff0000", "#8f00ff"), ("#ff0000", "#8f00ff"),
("#ffa500", "#800080"), ("#ffa500", "#800080"),
@@ -2213,7 +2267,6 @@ class ItemDetailView(Table):
from rich.table import Table as RichTable from rich.table import Table as RichTable
from rich.panel import Panel from rich.panel import Panel
from rich.console import Group from rich.console import Group
from rich.columns import Columns
from rich.text import Text from rich.text import Text
# 1. Create Detail Grid (matching rich_display.py style) # 1. Create Detail Grid (matching rich_display.py style)
@@ -2236,6 +2289,28 @@ class ItemDetailView(Table):
tag_text.append(raw, style="green") tag_text.append(raw, style="green")
return tag_text return tag_text
def _build_tag_renderable(tags: Any) -> Optional[Any]:
namespace_tags, freeform_tags = _partition_detail_tags(tags)
if not namespace_tags and not freeform_tags:
return None
renderables: List[Any] = []
for tag in namespace_tags:
renderables.append(_render_tag_text(tag))
if freeform_tags:
freeform_grid = RichTable.grid(expand=True, padding=(0, 2))
for _ in range(3):
freeform_grid.add_column(ratio=1)
for row_values in _chunk_detail_tags(freeform_tags, 3):
cells = [_render_tag_text(tag) for tag in row_values]
while len(cells) < 3:
cells.append(Text(""))
freeform_grid.add_row(*cells)
renderables.append(freeform_grid)
return Group(*renderables)
# Canonical display order for metadata # Canonical display order for metadata
order = ["Title", "Hash", "Store", "Path", "Ext", "Size", "Duration", "Url", "Relations"] order = ["Title", "Hash", "Store", "Path", "Ext", "Size", "Duration", "Url", "Relations"]
@@ -2280,13 +2355,11 @@ class ItemDetailView(Table):
# Tags Summary # Tags Summary
tags = self.item_metadata.get("Tags") or self.item_metadata.get("tags") or self.item_metadata.get("tag") tags = self.item_metadata.get("Tags") or self.item_metadata.get("tags") or self.item_metadata.get("tag")
if not self.exclude_tags and tags and isinstance(tags, (list, str)): if not self.exclude_tags and tags and isinstance(tags, (list, str)):
if isinstance(tags, str): tag_renderable = _build_tag_renderable(tags)
tags = [t.strip() for t in tags.split(",") if t.strip()] if tag_renderable is not None:
tags_sorted = sorted(map(str, tags)) details_table.add_row("", "")
tag_cols = Columns([_render_tag_text(t) for t in tags_sorted], equal=True, expand=True) details_table.add_row("Tags:", tag_renderable)
details_table.add_row("", "") # Spacer has_details = True
details_table.add_row("Tags:", tag_cols)
has_details = True
# 2. Get the standard table render (if there are rows or a specific title) # 2. Get the standard table render (if there are rows or a specific title)
original_title = self.title original_title = self.title