dfdkflj
This commit is contained in:
@@ -96,7 +96,7 @@ class MPVfile:
|
||||
relationship_metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
original_tags: Dict[str, str] = field(default_factory=dict)
|
||||
known_urls: List[str] = field(default_factory=list)
|
||||
url: List[str] = field(default_factory=list)
|
||||
title: Optional[str] = None
|
||||
source_url: Optional[str] = None
|
||||
clip_time: Optional[str] = None
|
||||
@@ -128,7 +128,7 @@ class MPVfile:
|
||||
"relationship_metadata": self.relationship_metadata,
|
||||
"tags": self.tags,
|
||||
"original_tags": self.original_tags,
|
||||
"known_urls": self.known_urls,
|
||||
"url": self.url,
|
||||
"title": self.title,
|
||||
"source_url": self.source_url,
|
||||
"clip_time": self.clip_time,
|
||||
@@ -293,10 +293,10 @@ class MPVFileBuilder:
|
||||
if s.tags:
|
||||
s.original_tags = {tag: tag for tag in s.tags}
|
||||
|
||||
# known URLs + last_url
|
||||
s.known_urls = _normalise_string_list(p.get("known_urls"))
|
||||
if self.last_url and self.last_url not in s.known_urls:
|
||||
s.known_urls.append(self.last_url)
|
||||
# known url + last_url
|
||||
s.url = _normalise_string_list(p.get("url"))
|
||||
if self.last_url and self.last_url not in s.url:
|
||||
s.url.append(self.last_url)
|
||||
|
||||
# source URL (explicit or fallback to last_url)
|
||||
explicit_source = p.get("source_url")
|
||||
@@ -500,8 +500,8 @@ class MPVFileBuilder:
|
||||
self._apply_hydrus_result(result)
|
||||
self.state.type = "hydrus"
|
||||
matched_url = result.get("matched_url") or result.get("url")
|
||||
if matched_url and matched_url not in self.state.known_urls:
|
||||
self.state.known_urls.append(matched_url)
|
||||
if matched_url and matched_url not in self.state.url:
|
||||
self.state.url.append(matched_url)
|
||||
# Enrich relationships once we know the hash
|
||||
if self.include_relationships and self.state.hash and self.hydrus_settings.base_url:
|
||||
self._enrich_relationships_from_api(self.state.hash)
|
||||
@@ -527,7 +527,7 @@ class MPVFileBuilder:
|
||||
metadata_payload["type"] = "other"
|
||||
self.state.metadata = metadata_payload
|
||||
# Do NOT overwrite MPVfile.type with metadata.type
|
||||
self._merge_known_urls(metadata_payload.get("known_urls") or metadata_payload.get("known_urls_set"))
|
||||
self._merge_url(metadata_payload.get("url") or metadata_payload.get("url_set"))
|
||||
source_url = metadata_payload.get("original_url") or metadata_payload.get("source_url")
|
||||
if source_url and not self.state.source_url:
|
||||
self.state.source_url = self._normalise_url(source_url)
|
||||
@@ -722,7 +722,7 @@ class MPVFileBuilder:
|
||||
include_service_keys_to_tags=True,
|
||||
include_duration=True,
|
||||
include_size=True,
|
||||
include_file_urls=False,
|
||||
include_file_url=False,
|
||||
include_mime=False,
|
||||
)
|
||||
except HydrusRequestError as hre: # pragma: no cover
|
||||
@@ -801,11 +801,11 @@ class MPVFileBuilder:
|
||||
if tag not in self.state.original_tags:
|
||||
self.state.original_tags[tag] = tag
|
||||
|
||||
def _merge_known_urls(self, urls: Optional[Iterable[Any]]) -> None:
|
||||
if not urls:
|
||||
def _merge_url(self, url: Optional[Iterable[Any]]) -> None:
|
||||
if not url:
|
||||
return
|
||||
combined = list(self.state.known_urls or []) + _normalise_string_list(urls)
|
||||
self.state.known_urls = unique_preserve_order(combined)
|
||||
combined = list(self.state.url or []) + _normalise_string_list(url)
|
||||
self.state.url = unique_preserve_order(combined)
|
||||
|
||||
def _load_sidecar_tags(self, local_path: str) -> None:
|
||||
try:
|
||||
@@ -821,7 +821,7 @@ class MPVFileBuilder:
|
||||
if hash_value and not self.state.hash and _looks_like_hash(hash_value):
|
||||
self.state.hash = hash_value.lower()
|
||||
self._merge_tags(tags)
|
||||
self._merge_known_urls(known)
|
||||
self._merge_url(known)
|
||||
break
|
||||
|
||||
def _read_sidecar(self, sidecar_path: Path) -> tuple[Optional[str], List[str], List[str]]:
|
||||
@@ -831,7 +831,7 @@ class MPVFileBuilder:
|
||||
return None, [], []
|
||||
hash_value: Optional[str] = None
|
||||
tags: List[str] = []
|
||||
known_urls: List[str] = []
|
||||
url: List[str] = []
|
||||
for line in raw.splitlines():
|
||||
trimmed = line.strip()
|
||||
if not trimmed:
|
||||
@@ -841,13 +841,13 @@ class MPVFileBuilder:
|
||||
candidate = trimmed.split(":", 1)[1].strip() if ":" in trimmed else ""
|
||||
if candidate:
|
||||
hash_value = candidate
|
||||
elif lowered.startswith("known_url:") or lowered.startswith("url:"):
|
||||
elif lowered.startswith("url:") or lowered.startswith("url:"):
|
||||
candidate = trimmed.split(":", 1)[1].strip() if ":" in trimmed else ""
|
||||
if candidate:
|
||||
known_urls.append(candidate)
|
||||
url.append(candidate)
|
||||
else:
|
||||
tags.append(trimmed)
|
||||
return hash_value, tags, known_urls
|
||||
return hash_value, tags, url
|
||||
|
||||
def _compute_local_hash(self, local_path: str) -> None:
|
||||
try:
|
||||
@@ -864,8 +864,8 @@ class MPVFileBuilder:
|
||||
def _finalise(self) -> None:
|
||||
if self.state.tags:
|
||||
self.state.tags = unique_preserve_order(self.state.tags)
|
||||
if self.state.known_urls:
|
||||
self.state.known_urls = unique_preserve_order(self.state.known_urls)
|
||||
if self.state.url:
|
||||
self.state.url = unique_preserve_order(self.state.url)
|
||||
# Ensure metadata.type is always present for Lua, but do NOT overwrite MPVfile.type
|
||||
if not self.state.title:
|
||||
if self.state.metadata.get("title"):
|
||||
|
||||
Reference in New Issue
Block a user