fix: thread api_key through ollama num_ctx probe + author map

Follow-up for salvaged PR #3185:
- run_agent.py: pass self.api_key to query_ollama_num_ctx() so Ollama
  behind an auth proxy (same issue class as the LM Studio fix) can be
  probed successfully.
- scripts/release.py AUTHOR_MAP: map @tannerfokkens-maker's local-hostname
  commit email.
This commit is contained in:
Teknium
2026-04-20 20:49:49 -07:00
committed by Teknium
parent cde7283821
commit e00d9630c5
2 changed files with 2 additions and 1 deletions

View File

@@ -1766,7 +1766,7 @@ class AIAgent:
logger.debug("Invalid ollama_num_ctx config value: %r", _ollama_num_ctx_override)
if self._ollama_num_ctx is None and self.base_url and is_local_endpoint(self.base_url):
try:
_detected = query_ollama_num_ctx(self.model, self.base_url)
_detected = query_ollama_num_ctx(self.model, self.base_url, api_key=self.api_key or "")
if _detected and _detected > 0:
self._ollama_num_ctx = _detected
except Exception as exc:

View File

@@ -293,6 +293,7 @@ AUTHOR_MAP = {
"ywt000818@gmail.com": "OwenYWT",
"dhandhalyabhavik@gmail.com": "v1k22",
"rucchizhao@zhaochenfeideMacBook-Pro.local": "RucchiZ",
"tannerfokkens@Mac.attlocal.net": "tannerfokkens-maker",
"lehaolin98@outlook.com": "LehaoLin",
"yuewang1@microsoft.com": "imink",
"1736355688@qq.com": "hedgeho9X",