This commit is contained in:
2026-02-11 19:06:38 -08:00
parent 1d0de1118b
commit ba623cb992
20 changed files with 848 additions and 247 deletions

View File

@@ -34,6 +34,7 @@ logger = logging.getLogger(__name__)
from API.ssl_certs import resolve_verify_value as _resolve_verify_value
from API.ssl_certs import get_requests_verify_value
from API.httpx_shared import get_shared_httpx_client
# Default configuration
DEFAULT_TIMEOUT = 30.0
@@ -504,17 +505,18 @@ class HTTPClient:
if ("certificate verify failed" in msg or "unable to get local issuer certificate" in msg):
logger.info("Certificate verification failed; attempting to retry with a system-aware CA bundle")
try:
import httpx as _httpx
# Use the client's precomputed verify argument (set at init)
verify_override = self._httpx_verify
with _httpx.Client(timeout=self.timeout, verify=verify_override, headers=self._get_headers()) as temp_client:
try:
response = temp_client.request(method, url, **kwargs)
if raise_for_status:
response.raise_for_status()
return response
except Exception as e2:
last_exception = e2
temp_client = get_shared_httpx_client(
timeout=self.timeout,
verify_ssl=self._httpx_verify,
headers=self._get_headers(),
)
try:
response = temp_client.request(method, url, **kwargs)
if raise_for_status:
response.raise_for_status()
return response
except Exception as e2:
last_exception = e2
except Exception:
# certifi/pip-system-certs/httpx not available; fall back to existing retry behavior
pass
@@ -530,17 +532,18 @@ class HTTPClient:
if ("certificate verify failed" in msg or "unable to get local issuer certificate" in msg):
logger.info("Certificate verification failed; attempting to retry with a system-aware CA bundle")
try:
import httpx as _httpx
# Use the client's precomputed verify argument (set at init)
verify_override = self._httpx_verify
with _httpx.Client(timeout=self.timeout, verify=verify_override, headers=self._get_headers()) as temp_client:
try:
response = temp_client.request(method, url, **kwargs)
if raise_for_status:
response.raise_for_status()
return response
except Exception as e2:
last_exception = e2
temp_client = get_shared_httpx_client(
timeout=self.timeout,
verify_ssl=self._httpx_verify,
headers=self._get_headers(),
)
try:
response = temp_client.request(method, url, **kwargs)
if raise_for_status:
response.raise_for_status()
return response
except Exception as e2:
last_exception = e2
except Exception:
# certifi/pip-system-certs/httpx not available; fall back to existing retry behavior
pass

View File

@@ -1,14 +1,16 @@
"""Shared `httpx.Client` helper.
Creating short-lived httpx clients disables connection pooling and costs extra CPU.
This module provides a small singleton client for callers that just need basic
This module provides a small keyed client cache for callers that just need basic
GETs without the full HTTPClient wrapper.
"""
from __future__ import annotations
import atexit
from collections import OrderedDict
import threading
from typing import Dict, Optional
from typing import Any, Dict, Optional, Tuple
import httpx
@@ -20,39 +22,85 @@ _DEFAULT_USER_AGENT = (
)
_lock = threading.Lock()
_shared_client: Optional[httpx.Client] = None
_MAX_SHARED_CLIENTS = 8
_shared_clients: "OrderedDict[Tuple[float, Tuple[str, str], Tuple[Tuple[str, str], ...]], httpx.Client]" = OrderedDict()
def _normalize_headers(headers: Optional[Dict[str, str]]) -> Dict[str, str]:
normalized: Dict[str, str] = {"User-Agent": _DEFAULT_USER_AGENT}
if headers:
normalized.update({str(k): str(v) for k, v in headers.items()})
return normalized
def _verify_key(verify_value: Any) -> Tuple[str, str]:
if isinstance(verify_value, bool):
return ("bool", "1" if verify_value else "0")
if isinstance(verify_value, str):
return ("str", verify_value)
return ("obj", str(id(verify_value)))
def _client_key(
*,
timeout: float,
verify_value: Any,
merged_headers: Dict[str, str],
) -> Tuple[float, Tuple[str, str], Tuple[Tuple[str, str], ...]]:
header_items = tuple(
sorted((str(k).lower(), str(v)) for k, v in merged_headers.items())
)
return (float(timeout), _verify_key(verify_value), header_items)
def get_shared_httpx_client(
*,
timeout: float = 30.0,
verify_ssl: bool = True,
verify_ssl: bool | str = True,
headers: Optional[Dict[str, str]] = None,
) -> httpx.Client:
"""Return a process-wide shared synchronous httpx.Client."""
"""Return a shared synchronous httpx.Client for a specific config key."""
global _shared_client
if _shared_client is None:
with _lock:
if _shared_client is None:
base_headers = {"User-Agent": _DEFAULT_USER_AGENT}
if headers:
base_headers.update({str(k): str(v) for k, v in headers.items()})
_shared_client = httpx.Client(
timeout=timeout,
verify=resolve_verify_value(verify_ssl),
headers=base_headers,
)
verify_value = resolve_verify_value(verify_ssl)
merged_headers = _normalize_headers(headers)
key = _client_key(
timeout=timeout,
verify_value=verify_value,
merged_headers=merged_headers,
)
return _shared_client
with _lock:
existing = _shared_clients.get(key)
if existing is not None:
_shared_clients.move_to_end(key)
return existing
client = httpx.Client(
timeout=timeout,
verify=verify_value,
headers=merged_headers,
)
_shared_clients[key] = client
while len(_shared_clients) > _MAX_SHARED_CLIENTS:
_, old_client = _shared_clients.popitem(last=False)
try:
old_client.close()
except Exception:
pass
return client
def close_shared_httpx_client() -> None:
global _shared_client
client = _shared_client
_shared_client = None
if client is not None:
with _lock:
clients = list(_shared_clients.values())
_shared_clients.clear()
for client in clients:
try:
client.close()
except Exception:
pass
atexit.register(close_shared_httpx_client)

View File

@@ -9,8 +9,11 @@ This module intentionally avoids importing the heavy httpx-based stack.
from __future__ import annotations
import atexit
from collections import OrderedDict
import threading
from typing import Any, Dict, Optional
from typing import Any, Dict, Optional, Tuple
from weakref import WeakSet
import requests
from requests.adapters import HTTPAdapter
@@ -23,6 +26,40 @@ _DEFAULT_USER_AGENT = (
)
_local = threading.local()
_MAX_SESSIONS_PER_THREAD = 4
_session_registry_lock = threading.Lock()
_all_sessions: "WeakSet[requests.Session]" = WeakSet()
def _session_key(
*,
user_agent: str,
verify_ssl: bool,
pool_connections: int,
pool_maxsize: int,
) -> Tuple[str, Any, int, int]:
return (
str(user_agent or _DEFAULT_USER_AGENT),
resolve_verify_value(verify_ssl),
int(pool_connections),
int(pool_maxsize),
)
def _get_thread_session_cache() -> "OrderedDict[Tuple[str, Any, int, int], requests.Session]":
cache = getattr(_local, "session_cache", None)
if cache is None:
cache = OrderedDict()
_local.session_cache = cache
return cache
def _register_session(session: requests.Session) -> None:
try:
with _session_registry_lock:
_all_sessions.add(session)
except Exception:
pass
def get_requests_session(
@@ -32,24 +69,44 @@ def get_requests_session(
pool_connections: int = 16,
pool_maxsize: int = 16,
) -> requests.Session:
"""Return a thread-local shared Session configured for pooling."""
"""Return a thread-local pooled Session keyed by config values."""
session: Optional[requests.Session] = getattr(_local, "session", None)
if session is not None:
return session
key = _session_key(
user_agent=user_agent,
verify_ssl=verify_ssl,
pool_connections=pool_connections,
pool_maxsize=pool_maxsize,
)
cache = _get_thread_session_cache()
existing = cache.get(key)
if existing is not None:
cache.move_to_end(key)
return existing
session = requests.Session()
session.headers.update({"User-Agent": str(user_agent or _DEFAULT_USER_AGENT)})
session.headers.update({"User-Agent": key[0]})
# Expand connection pool; keep max_retries=0 to avoid semantic changes.
adapter = HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=0)
adapter = HTTPAdapter(
pool_connections=pool_connections,
pool_maxsize=pool_maxsize,
max_retries=0,
)
session.mount("http://", adapter)
session.mount("https://", adapter)
# Configure verify once.
session.verify = resolve_verify_value(verify_ssl)
session.verify = key[1]
_register_session(session)
_local.session = session
cache[key] = session
while len(cache) > _MAX_SESSIONS_PER_THREAD:
_, old_session = cache.popitem(last=False)
try:
old_session.close()
except Exception:
pass
return session
@@ -66,3 +123,36 @@ def request(
sess = get_requests_session()
return sess.request(method, url, params=params, headers=headers, timeout=timeout, **kwargs)
def close_requests_sessions() -> None:
"""Close cached requests sessions for the current thread and global registry."""
cache = getattr(_local, "session_cache", None)
if cache:
try:
sessions = list(cache.values())
cache.clear()
except Exception:
sessions = []
for session in sessions:
try:
session.close()
except Exception:
pass
try:
with _session_registry_lock:
all_sessions = list(_all_sessions)
_all_sessions.clear()
except Exception:
all_sessions = []
for session in all_sessions:
try:
session.close()
except Exception:
pass
atexit.register(close_requests_sessions)