mirror of
https://github.com/codeflash-ai/codeflash-internal.git
synced 2026-05-04 18:25:18 +00:00
fix: resolve mypy type errors in llm.py
- Replace Lock._loop private attribute access with explicit loop tracking - Add explicit type annotation for clients_to_close list in shutdown() Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
cada44b526
commit
7457b89631
1 changed files with 4 additions and 2 deletions
|
|
@ -76,6 +76,7 @@ class LLMClient:
|
|||
self.client_loop: asyncio.AbstractEventLoop | None = None
|
||||
self.background_tasks: set[asyncio.Task[Any]] = set()
|
||||
self._client_lock: asyncio.Lock | None = None
|
||||
self._client_lock_loop: asyncio.AbstractEventLoop | None = None
|
||||
|
||||
async def call(
|
||||
self,
|
||||
|
|
@ -96,8 +97,9 @@ class LLMClient:
|
|||
loop = asyncio.get_running_loop()
|
||||
|
||||
# Create lock for this event loop if it doesn't exist
|
||||
if self._client_lock is None or self._client_lock._loop is not loop:
|
||||
if self._client_lock is None or self._client_lock_loop is not loop:
|
||||
self._client_lock = asyncio.Lock()
|
||||
self._client_lock_loop = loop
|
||||
|
||||
if loop is not self.client_loop:
|
||||
async with self._client_lock:
|
||||
|
|
@ -243,7 +245,7 @@ class LLMClient:
|
|||
This method ensures all clients are explicitly closed during graceful shutdown,
|
||||
before the event loop is torn down.
|
||||
"""
|
||||
clients_to_close = []
|
||||
clients_to_close: list[tuple[str, AsyncAzureOpenAI | AsyncAnthropicBedrock]] = []
|
||||
|
||||
if self.openai_client is not None:
|
||||
clients_to_close.append(("OpenAI", self.openai_client))
|
||||
|
|
|
|||
Loading…
Reference in a new issue