Merge branch 'main' of github.com:codeflash-ai/codeflash-internal into v4a-diff-patches-for-optimizer-context

This commit is contained in:
ali 2025-12-29 22:33:36 +02:00
commit 14491f63ce
No known key found for this signature in database
GPG key ID: 44F9B42770617B9B
80 changed files with 5254 additions and 1275 deletions

View file

@ -0,0 +1,44 @@
name: Claude Code Review
on:
pull_request:
types: [opened, synchronize]
jobs:
claude-review:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
issues: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code Review
id: claude-review
uses: anthropics/claude-code-action@v1
with:
use_foundry: "true"
use_sticky_comment: true
prompt: |
REPO: ${{ github.repository }}
PR NUMBER: ${{ github.event.pull_request.number }}
Please review this pull request and provide feedback on:
- Code quality and best practices
- Potential bugs or issues
- Performance considerations
- Security concerns
- Test coverage
Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback.
claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"'
env:
ANTHROPIC_FOUNDRY_API_KEY: ${{ secrets.AZURE_ANTHROPIC_API_KEY }}
ANTHROPIC_FOUNDRY_BASE_URL: ${{ secrets.AZURE_ANTHROPIC_ENDPOINT }}

42
.github/workflows/claude.yml vendored Normal file
View file

@ -0,0 +1,42 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
issues: read
id-token: write
actions: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@v1
with:
use_foundry: "true"
additional_permissions: |
actions: read
env:
ANTHROPIC_FOUNDRY_API_KEY: ${{ secrets.AZURE_ANTHROPIC_API_KEY }}
ANTHROPIC_FOUNDRY_BASE_URL: ${{ secrets.AZURE_ANTHROPIC_ENDPOINT }}

View file

@ -1,20 +0,0 @@
on:
pull_request:
types: [opened, reopened, ready_for_review]
issue_comment:
jobs:
pr_agent_job:
if: ${{ github.event.sender.type != 'Bot' }}
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
contents: write
name: Run pr agent on every pull request, respond to user comments
steps:
- name: PR Agent action step
id: pragent
uses: qodo-ai/pr-agent@main
env:
OPENAI_KEY: ${{ secrets.OPENAI_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,5 +1,6 @@
{
"python.analysis.typeCheckingMode": "basic",
"python.defaultInterpreterPath": "${workspaceFolder}/django/aiservice/.venv/bin/python",
"editor.formatOnSave": true,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.codeActionsOnSave": {

5
AGENTS.md Normal file
View file

@ -0,0 +1,5 @@
# Agent Rules <!-- tessl-managed -->
@.tessl/RULES.md follow the [instructions](.tessl/RULES.md)

View file

@ -8,3 +8,5 @@ AZURE_API_BASE=
# To run in local mode, set OPENAI_API_TYPE anything other than azure
OPENAI_API_TYPE=
OPENAI_API_KEY=
AZURE_ANTHROPIC_API_KEY=
AZURE_ANTHROPIC_ENDPOINT=https://codeflash-anthropic-resource.openai.azure.com/anthropic

View file

@ -1,12 +1,13 @@
"""Environment-specific configuration and utilities."""
from __future__ import annotations
import logging
import os
import sys
from typing import TYPE_CHECKING, Literal
from typing import TYPE_CHECKING
from dotenv import load_dotenv
from openai import AsyncOpenAI
if TYPE_CHECKING:
from collections.abc import Callable
@ -20,54 +21,25 @@ logging.getLogger("parso").setLevel(logging.WARNING)
def load_env() -> None:
"""Load environment variables from .env file in non-production."""
if not IS_PRODUCTION:
load_dotenv()
def set_logging_level() -> None:
"""Set logging level based on environment."""
if IS_PRODUCTION:
logging.basicConfig(level=logging.INFO, format=LOGGING_FORMAT, stream=sys.stdout)
else:
logging.basicConfig(level=logging.DEBUG, format=LOGGING_FORMAT, stream=sys.stdout)
def debug_log_sensitive_data(message: str) -> None:
"""Log sensitive data only in non-production environments."""
if not IS_PRODUCTION:
logging.debug(message)
def debug_log_sensitive_data_from_callable(message: Callable[[], str | None]) -> None:
"""Log sensitive data from callable only in non-production environments."""
if not IS_PRODUCTION:
logging.debug(message())
def create_llm_client(model_type: Literal["openai", "anthropic", "google"]) -> AsyncOpenAI | None:
# use azure or openai
openai_api_type = os.environ.get("OPENAI_API_TYPE")
openai_api_base_url = os.environ.get(
"OPENAI_API_BASE"
) # for us it is https://codeflash-openai-service-eastus2-0.openai.azure.com/openai/v1/
# we need both of the above to run on azure
azure_api_key, openai_key, anthropic_key, google_key = (
os.environ.get("AZURE_OPENAI_API_KEY"),
os.environ.get("OPENAI_API_KEY"),
os.environ.get("ANTHROPIC_API_KEY"),
os.environ.get("GEMINI_API_KEY"),
)
if model_type == "openai" and azure_api_key and openai_api_type == "azure" and openai_api_base_url:
# check for azure first
return AsyncOpenAI(api_key=azure_api_key, base_url=openai_api_base_url)
if model_type == "openai" and openai_key:
return AsyncOpenAI(api_key=openai_key) # baseurl not needed for regular openai
if model_type == "anthropic" and anthropic_key:
return AsyncOpenAI(api_key=anthropic_key, base_url="https://api.anthropic.com/v1/")
# # for future use : gemini supported only via GEMINI_API_KEY at the moment, todo for vertex ai
if model_type == "google" and google_key:
return AsyncOpenAI(api_key=google_key, base_url="https://generativelanguage.googleapis.com/v1beta/openai/")
return None
llm_clients = {
"openai": create_llm_client("openai"),
"anthropic": create_llm_client("anthropic"),
# "google": create_llm_client("google"), # no need to instantiate right now as we're not using it
}

View file

@ -0,0 +1,230 @@
"""Unified LLM module for all model definitions, clients, and API calls."""
from __future__ import annotations
import os
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Literal
from anthropic import AsyncAnthropicFoundry
from openai import AsyncOpenAI
from pydantic.dataclasses import dataclass as pydantic_dataclass
if TYPE_CHECKING:
from anthropic.types import Message as AnthropicMessage
from openai.types.chat import ChatCompletion
# =============================================================================
# Model Definitions
# =============================================================================
# Pricing is in USD per 1M tokens. See:
# https://docs.anthropic.com/en/docs/about-claude/pricing
# https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/
@pydantic_dataclass
class LLM:
"""Base LLM configuration with pricing info."""
name: str # On Azure OpenAI Service, this is the deployment name
max_tokens: int
model_type: Literal["openai", "anthropic", "google"]
input_cost: float | None = None # USD per 1M tokens
output_cost: float | None = None # USD per 1M tokens
@pydantic_dataclass
class OpenAI_GPT_4_1(LLM):
"""OpenAI GPT-4.1 model."""
name: str = "gpt-4.1"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 100000
input_cost: float = 2.00
output_cost: float = 8.00
@pydantic_dataclass
class Anthropic_Claude_Sonnet_4_5(LLM):
"""Anthropic Claude 4.5 Sonnet via Azure Foundry."""
name: str = "claude-sonnet-4-5"
model_type: Literal["openai", "anthropic", "google"] = "anthropic"
max_tokens: int = 200000
input_cost: float = 3.00
output_cost: float = 15.00
# =============================================================================
# LLM Client Setup
# =============================================================================
# Read environment variables once at module load
_AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")
_AZURE_OPENAI_ENDPOINT = os.environ.get(
"AZURE_OPENAI_ENDPOINT", "https://codeflash-openai-resource.openai.azure.com/openai/v1/"
)
_AZURE_ANTHROPIC_API_KEY = os.environ.get("AZURE_ANTHROPIC_API_KEY")
_AZURE_ANTHROPIC_ENDPOINT = os.environ.get(
"AZURE_ANTHROPIC_ENDPOINT", "https://codeflash-anthropic-resource.openai.azure.com/anthropic"
)
_OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
def _create_openai_client() -> AsyncOpenAI | None:
if _AZURE_OPENAI_API_KEY:
return AsyncOpenAI(api_key=_AZURE_OPENAI_API_KEY, base_url=_AZURE_OPENAI_ENDPOINT)
if _OPENAI_API_KEY:
return AsyncOpenAI(api_key=_OPENAI_API_KEY)
return None
def _create_anthropic_client() -> AsyncAnthropicFoundry | None:
if _AZURE_ANTHROPIC_API_KEY:
return AsyncAnthropicFoundry(api_key=_AZURE_ANTHROPIC_API_KEY, base_url=_AZURE_ANTHROPIC_ENDPOINT)
return None
_openai_client = _create_openai_client()
_anthropic_client = _create_anthropic_client()
llm_clients: dict[str, AsyncOpenAI | AsyncAnthropicFoundry | None] = {
"openai": _openai_client,
"anthropic": _anthropic_client,
}
# =============================================================================
# Response Types
# =============================================================================
@dataclass
class LLMUsage:
"""Unified usage stats for both OpenAI and Anthropic responses."""
input_tokens: int
output_tokens: int
@dataclass
class LLMResponse:
"""Unified response wrapper for both OpenAI and Anthropic API responses."""
content: str
usage: LLMUsage
raw_response: ChatCompletion | AnthropicMessage
# =============================================================================
# LLM API Call
# =============================================================================
async def call_llm(
model_name: str,
model_type: Literal["openai", "anthropic", "google"],
messages: list[dict[str, Any]],
max_tokens: int = 8192,
temperature: float | None = None,
) -> LLMResponse:
"""Call LLM with OpenAI or Anthropic client."""
client = llm_clients[model_type]
if client is None:
msg = f"LLM client for model type '{model_type}' is not available"
raise ValueError(msg)
if model_type == "anthropic":
assert isinstance(client, AsyncAnthropicFoundry)
system_prompt = next((m["content"] for m in messages if m["role"] == "system"), None)
anthropic_messages = [{"role": m["role"], "content": m["content"]} for m in messages if m["role"] != "system"]
kwargs: dict[str, Any] = {"model": model_name, "messages": anthropic_messages, "max_tokens": max_tokens}
if system_prompt:
kwargs["system"] = system_prompt
if temperature is not None:
kwargs["temperature"] = temperature
response = await client.messages.create(**kwargs)
content = "".join(block.text for block in response.content if hasattr(block, "text"))
return LLMResponse(
content=content,
usage=LLMUsage(input_tokens=response.usage.input_tokens, output_tokens=response.usage.output_tokens),
raw_response=response,
)
# OpenAI / Google (OpenAI-compatible)
assert isinstance(client, AsyncOpenAI)
openai_kwargs: dict[str, Any] = {"model": model_name, "messages": messages}
if temperature is not None:
openai_kwargs["temperature"] = temperature
response = await client.chat.completions.create(**openai_kwargs)
return LLMResponse(
content=response.choices[0].message.content or "",
usage=LLMUsage(
input_tokens=response.usage.prompt_tokens if response.usage else 0,
output_tokens=response.usage.completion_tokens if response.usage else 0,
),
raw_response=response,
)
# =============================================================================
# Cost Calculation
# =============================================================================
def calculate_llm_cost(response: Any, llm: LLM) -> float:
"""Calculate the cost of an LLM API call.
Args:
response: The raw response from the LLM API call.
llm: The LLM model configuration with pricing info.
Returns:
The total cost in USD, or 0.0 if cost cannot be calculated.
"""
try:
usage = response.usage
if hasattr(usage, "prompt_tokens"): # OpenAI
prompt_tokens = usage.prompt_tokens
completion_tokens = usage.completion_tokens
else: # Anthropic
prompt_tokens = usage.input_tokens
completion_tokens = usage.output_tokens
prompt_cost = (prompt_tokens / 1_000_000) * llm.input_cost
completion_cost = (completion_tokens / 1_000_000) * llm.output_cost
return prompt_cost + completion_cost
except Exception:
return 0.0
# =============================================================================
# Model Selection (based on available clients)
# =============================================================================
# Prefer OpenAI: use OpenAI if available, fall back to Anthropic
_OPENAI_MODEL: LLM = OpenAI_GPT_4_1() if _openai_client else Anthropic_Claude_Sonnet_4_5()
# Prefer Anthropic: use Anthropic (Azure Foundry) if available, fall back to OpenAI
_ANTHROPIC_MODEL: LLM = Anthropic_Claude_Sonnet_4_5() if _anthropic_client else OpenAI_GPT_4_1()
# Model assignments
EXPLAIN_MODEL: LLM = _OPENAI_MODEL
PLAN_MODEL: LLM = _OPENAI_MODEL
EXECUTE_MODEL: LLM = _OPENAI_MODEL
OPTIMIZE_MODEL: LLM = _OPENAI_MODEL
RANKING_MODEL: LLM = _OPENAI_MODEL
REFINEMENT_MODEL: LLM = _ANTHROPIC_MODEL
EXPLANATIONS_MODEL: LLM = _ANTHROPIC_MODEL
OPTIMIZATION_REVIEW_MODEL: LLM = _ANTHROPIC_MODEL
CODE_REPAIR_MODEL: LLM = _ANTHROPIC_MODEL

View file

@ -23,6 +23,7 @@ RATE_LIMIT_MAX = int(os.getenv("RATE_LIMIT_MAX", "40"))
# - Consider using asyncio.PriorityQueue or celery with priority tasks
# - Reference: authapp/models.py:Subscriptions.plan_type, CFAPIKeys.tier
@async_only_middleware
class RateLimitMiddleware:
def __init__(self, get_response) -> None:

View file

@ -4,6 +4,7 @@ import logging
import sentry_sdk
from asgiref.sync import iscoroutinefunction, markcoroutinefunction
from django.db import IntegrityError
from django.db.models import F
from django.http import JsonResponse
from django.utils.decorators import async_only_middleware
from django.utils.timezone import now
@ -140,16 +141,28 @@ class TrackUsageMiddleware:
status=403,
)
# Increment usage
subscription.optimizations_used = current_used + cost
subscription.total_lifetime_optimizations += cost
await subscription.asave(update_fields=["optimizations_used", "total_lifetime_optimizations"])
# Atomically increment usage using F() expressions to prevent race conditions.
# This ensures concurrent requests each increment the counter correctly
# instead of overwriting each other's updates (lost update problem).
await Subscriptions.objects.filter(user_id=user_id).aupdate(
optimizations_used=F("optimizations_used") + cost,
total_lifetime_optimizations=F("total_lifetime_optimizations") + cost,
)
# Re-read to get the actual updated value for the response
updated_subscription = await Subscriptions.objects.filter(user_id=user_id).afirst()
new_used = updated_subscription.optimizations_used if updated_subscription else current_used + cost
logging.debug(
f"track_usage_middleware.py|__call__|Atomic update completed: "
f"user_id={user_id}, endpoint={endpoint}, cost={cost}, new_used={new_used}"
)
# Attach subscription info to request
request.subscription_info = {
"userId": user_id,
"tier": subscription.plan_type,
"used": current_used + cost,
"used": new_used,
"limit": subscription.optimizations_limit,
}

View file

@ -1,222 +0,0 @@
import os
from typing import Any, Literal
from pydantic.dataclasses import dataclass
# The following pricing information is based on public OpenAI and Claude documentation
# as of August 2025. Prices can change, so always check the official:
# https://docs.anthropic.com/en/docs/about-claude/pricing
# https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/
# The pricing is in USD per 1M tokens.
# Some of the pricing are placeholder from Open AI https://platform.openai.com/docs/pricing?latest-pricing=flex.
@dataclass
class LLM:
name: str # On Azure OpenAI Service, this is the deployment name
max_tokens: int
model_type: Literal["openai", "anthropic", "google"]
# Add new pricing attributes in USD per 1M tokens
input_cost: float | None = None
output_cost: float | None = None
# name of the model deployment on Azure OpenAI Service
@dataclass
class GPT_4_OMNI(LLM):
name: str = "gpt-4o-2" if os.environ.get("OPENAI_API_TYPE") == "azure" else "gpt-4o"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 128000
input_cost: float = 2.50
output_cost: float = 10.00
@dataclass
class GPT_4_128k(LLM):
name: str = "gpt-4-1106-preview"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 128000
input_cost: float = 10.00
output_cost: float = 30.00
@dataclass
class GPT_4_32k(LLM):
name: str = "gpt4-32k"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 32768
input_cost: float = 60.00
output_cost: float = 120.00
@dataclass
class GPT_4(LLM):
name: str = "gpt-4-0613"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 8192
input_cost: float = 30.00
output_cost: float = 60.00
@dataclass
class GPT_3_5_Turbo_16k(LLM):
name: str = "gpt-3.5-turbo-16k"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 16384
input_cost: float = 3.00
output_cost: float = 4.00
@dataclass
class GPT_3_5_Turbo(LLM):
name: str = "gpt-3.5-turbo"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 4096
input_cost: float = 0.50
output_cost: float = 1.50
@dataclass
class Antropic_Claude_3_7(LLM):
name: str = "claude-3-7-sonnet-20250219"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 100000
input_cost: float = 3.00
output_cost: float = 15.00
@dataclass
class Anthropic_Claude_4(LLM):
name: str = "claude-sonnet-4-20250514"
model_type: Literal["openai", "anthropic", "google"] = "anthropic"
max_tokens: int = 100000
input_cost: float = 3.00
output_cost: float = 15.00
@dataclass
class OpenAI_GPT_4_1(LLM):
# name: str = "azure/gpt-4.1"
name: str = "gpt-4.1"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 100000
input_cost: float = 2.00
output_cost: float = 8.00
@dataclass
class Gemini_2_5(LLM):
name: str = "gemini/gemini-2.5-pro-preview-03-25"
model_type: Literal["openai", "anthropic", "google"] = "google"
max_tokens: int = 100000
@dataclass
class OpenAI_GPT_O_3(LLM):
name: str = "azure/o3"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 100000
input_cost: float = 2.00
output_cost: float = 8.00
@dataclass
class OpenAI_GPT_O_4_MINI(LLM):
name: str = "azure/o4-mini"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 100000
input_cost: float = 1.10
output_cost: float = 4.40
@dataclass
class GPT_5(LLM): # IT IS TOO SLOW AT THE MOMENT, just here for documentation
name: str = "gpt-5-codex"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 100000
input_cost: float = 1.25
output_cost: float = 10.00
@dataclass
class GPT_4_1_Nano(LLM):
name: str = "gpt-4.1-nano"
model_type: Literal["openai", "anthropic", "google"] = "openai"
max_tokens: int = 100000
input_cost: float = 0.10
output_cost: float = 0.40
def calculate_llm_cost(response: Any, llm: LLM) -> float | None:
"""Calculates the cost of an OpenAI API chat completion call.
Args:
response (dict): The JSON response from the OpenAI API call.
Returns:
float: The total cost in USD, or None if the cost cannot be calculated.
"""
try:
usage = response.usage
if hasattr(usage, "prompt_tokens"): # for openai
prompt_tokens = usage.prompt_tokens
completion_tokens = usage.completion_tokens
else: # for claude
prompt_tokens = usage.input_tokens
completion_tokens = usage.output_tokens
prompt_cost = (prompt_tokens / 1_000_000) * llm.input_cost
completion_cost = (completion_tokens / 1_000_000) * llm.output_cost
total_cost: float = prompt_cost + completion_cost
return total_cost
except Exception as e:
print(f"An error occurred: {e}")
return None
def _get_openai_model() -> LLM:
"""Return OpenAI GPT-4.1 if available, otherwise falls back to Anthropic Claude 4.
Returns:
LLM: The appropriate model instance based on available API keys.
"""
if os.environ.get("AZURE_OPENAI_API_KEY") or os.environ.get("OPENAI_API_KEY"):
return OpenAI_GPT_4_1()
# Fall back to Anthropic if OpenAI not available
if os.environ.get("ANTHROPIC_API_KEY"):
return Anthropic_Claude_4()
# Default to OpenAI (will fail gracefully with clear error from env_specific.py)
return OpenAI_GPT_4_1()
def _get_anthropic_model() -> LLM:
"""Returns Anthropic Claude 4 if available, otherwise falls back to OpenAI GPT-4.1.
Returns:
LLM: The appropriate model instance based on available API keys.
""" # noqa: D401
if os.environ.get("ANTHROPIC_API_KEY"):
return Anthropic_Claude_4()
# Fall back to OpenAI if Anthropic not available
if os.environ.get("AZURE_OPENAI_API_KEY") or os.environ.get("OPENAI_API_KEY"):
return OpenAI_GPT_4_1()
# Default to Claude (will fail gracefully with clear error from env_specific.py)
return Anthropic_Claude_4()
# Dynamically select models based on available API keys
EXPLAIN_MODEL: LLM = _get_openai_model()
PLAN_MODEL: LLM = _get_openai_model()
EXECUTE_MODEL: LLM = _get_openai_model()
OPTIMIZE_MODEL: LLM = _get_openai_model()
RANKING_MODEL: LLM = _get_openai_model()
REFINEMENT_MODEL: LLM = _get_anthropic_model()
EXPLANATIONS_MODEL: LLM = _get_anthropic_model()
OPTIMIZATION_REVIEW_MODEL: LLM = _get_anthropic_model()
CODE_REPAIR_MODEL: LLM = _get_anthropic_model()

View file

@ -0,0 +1,31 @@
"""Observability module for tracking LLM calls and errors.
This module provides utilities for comprehensive observability in the optimization pipeline:
- Structured JSON logging
- LLM call monitoring with full prompt/response capture
- Error tracking and classification
- Database persistence for historical analysis
Main components:
- models: Django models for observability tables
- logger: Structured logging utilities
- decorators: Decorators for automatic LLM call tracking
- database: Database recorders for persisting observability data
"""
# Models
# Logging utilities
from .logger import EventType, LogContext, StructuredFormatter, get_observability_logger, log_with_context
from .models import LLMCall, OptimizationError
__all__ = [
# Models
"LLMCall",
"OptimizationError",
# Logging
"StructuredFormatter",
"get_observability_logger",
"log_with_context",
"LogContext",
"EventType",
]

View file

@ -0,0 +1,264 @@
"""Database recorders for persisting observability data.
Simple recorders for manually tracking LLM calls, steps, and errors.
No decorators - just call the methods directly where needed.
"""
import uuid
from typing import Any
from asgiref.sync import sync_to_async
from .models import LLMCall, OptimizationError
class LLMCallRecorder:
"""Records LLM calls to database.
Usage:
recorder = LLMCallRecorder()
# Start tracking
llm_call_id = await recorder.record_llm_call_start(
trace_id="abc-123",
call_type="optimization",
model_name="gpt-4",
system_prompt="You are an optimizer...",
user_prompt="Optimize this code...",
messages=[...],
n_candidates=5
)
# Record completion
await recorder.record_llm_call_completion(
llm_call_id=llm_call_id,
status="success",
raw_response=response.choices[0].message.content,
prompt_tokens=response.usage.prompt_tokens,
completion_tokens=response.usage.completion_tokens,
llm_cost=0.15,
latency_ms=1250,
candidates_generated=5,
candidates_valid=4
)
"""
@sync_to_async
def record_llm_call_start(
self,
trace_id: str,
call_type: str,
model_name: str,
system_prompt: str,
user_prompt: str,
messages: list[dict],
temperature: float | None = None,
n_candidates: int | None = None,
max_tokens: int | None = None,
user_id: str | None = None,
python_version: str | None = None,
is_async: bool = False,
context: dict[str, Any] | None = None,
) -> str:
"""Record LLM call start to database.
Returns:
llm_call_id: UUID of the created record
"""
llm_call_id = str(uuid.uuid4())
LLMCall.objects.create(
id=llm_call_id,
trace_id=trace_id,
call_type=call_type,
model_name=model_name,
system_prompt=system_prompt,
user_prompt=user_prompt,
messages=messages,
temperature=temperature,
n_candidates=n_candidates,
max_tokens=max_tokens,
status="in_progress",
user_id=user_id,
python_version=python_version,
is_async=is_async,
context=context,
)
return llm_call_id
@sync_to_async
def record_llm_call_completion(
self,
llm_call_id: str,
status: str | None = None,
raw_response: str | None = None,
parsed_response: dict | None = None,
prompt_tokens: int | None = None,
completion_tokens: int | None = None,
total_tokens: int | None = None,
llm_cost: float | None = None,
latency_ms: int | None = None,
parsing_status: str | None = None,
candidates_generated: int | None = None,
candidates_valid: int | None = None,
parsing_errors: dict | None = None,
error_type: str | None = None,
error_message: str | None = None,
) -> None:
"""Update LLM call record with completion data.
Only updates fields that are explicitly provided (not None).
Use this for both full completions and partial updates (e.g., parsing status only).
"""
# Build update dict with only provided values
update_fields: dict[str, Any] = {}
if status is not None:
update_fields["status"] = status
if raw_response is not None:
update_fields["raw_response"] = raw_response
if parsed_response is not None:
update_fields["parsed_response"] = parsed_response
if prompt_tokens is not None:
update_fields["prompt_tokens"] = prompt_tokens
if completion_tokens is not None:
update_fields["completion_tokens"] = completion_tokens
if total_tokens is not None:
update_fields["total_tokens"] = total_tokens
if llm_cost is not None:
update_fields["llm_cost"] = llm_cost
if latency_ms is not None:
update_fields["latency_ms"] = latency_ms
if parsing_status is not None:
update_fields["parsing_status"] = parsing_status
if candidates_generated is not None:
update_fields["candidates_generated"] = candidates_generated
if candidates_valid is not None:
update_fields["candidates_valid"] = candidates_valid
if parsing_errors is not None:
update_fields["parsing_errors"] = parsing_errors
if error_type is not None:
update_fields["error_type"] = error_type
if error_message is not None:
update_fields["error_message"] = error_message
LLMCall.objects.filter(id=llm_call_id).update(**update_fields)
@sync_to_async
def record_llm_call_failure(
self, llm_call_id: str, error_type: str, error_message: str, latency_ms: int | None = None, retry_count: int = 0
) -> None:
"""Update LLM call record with failure information."""
LLMCall.objects.filter(id=llm_call_id).update(
status="failed",
error_type=error_type,
error_message=error_message,
latency_ms=latency_ms,
retry_count=retry_count,
)
class ErrorRecorder:
"""Records optimization errors to database.
Usage:
recorder = ErrorRecorder()
error_id = await recorder.record_error(
trace_id="abc-123",
error_type="test_failure",
error_category="system_error",
severity="error",
error_message="Test suite failed",
context={"test_name": "test_optimization"}
)
"""
@sync_to_async
def record_error(
self,
trace_id: str,
error_type: str,
error_category: str,
severity: str,
error_message: str,
llm_call_id: str | None = None,
error_code: str | None = None,
stack_trace: str | None = None,
context: dict[str, Any] | None = None,
) -> str:
"""Record error to database.
Returns:
error_id: UUID of the created error record
"""
error_id = str(uuid.uuid4())
OptimizationError.objects.create(
id=error_id,
trace_id=trace_id,
llm_call_id=llm_call_id,
error_type=error_type,
error_category=error_category,
severity=severity,
error_message=error_message,
error_code=error_code,
stack_trace=stack_trace,
context=context,
)
return error_id
# Convenience function for quick LLM call tracking
async def track_llm_call_simple(
trace_id: str,
call_type: str,
model_name: str,
system_prompt: str,
user_prompt: str,
messages: list[dict],
response: Any, # OpenAI response object
llm_cost: float,
latency_ms: int,
candidates_generated: int = 0,
candidates_valid: int = 0,
) -> str:
"""Simple helper to track an LLM call in one shot.
Use this after the LLM call completes successfully.
Returns:
llm_call_id: UUID of the created record
"""
recorder = LLMCallRecorder()
# Record start
llm_call_id = await recorder.record_llm_call_start(
trace_id=trace_id,
call_type=call_type,
model_name=model_name,
system_prompt=system_prompt,
user_prompt=user_prompt,
messages=messages,
)
# Record completion
await recorder.record_llm_call_completion(
llm_call_id=llm_call_id,
status="success",
raw_response=response.choices[0].message.content if response.choices else None,
prompt_tokens=response.usage.prompt_tokens if response.usage else None,
completion_tokens=response.usage.completion_tokens if response.usage else None,
total_tokens=response.usage.total_tokens if response.usage else None,
llm_cost=llm_cost,
latency_ms=latency_ms,
candidates_generated=candidates_generated,
candidates_valid=candidates_valid,
)
return llm_call_id

View file

@ -0,0 +1,325 @@
"""Observability decorators for automatic LLM call tracking.
Usage:
@observe_llm_call(call_type="optimization")
async def my_llm_function(trace_id: str, model: AIModel, ...):
# Your LLM call here
response = await llm_client.chat.completions.create(...)
return response
The decorator automatically:
- Records call start (non-blocking)
- Captures timing and response
- Records completion (non-blocking)
- Handles errors gracefully
"""
import asyncio
import logging
import time
from collections.abc import Callable
from functools import wraps
from typing import Any
from aiservice.llm import Anthropic_Claude_Sonnet_4_5, OpenAI_GPT_4_1, calculate_llm_cost
from aiservice.observability.database import ErrorRecorder, LLMCallRecorder
logger = logging.getLogger(__name__)
def observe_llm_call(
call_type: str, extract_trace_id: Callable | None = None, extract_model_name: Callable | None = None
):
"""Decorator to automatically observe LLM calls.
Args:
call_type: Type of LLM call ('optimization', 'test_generation', etc.)
extract_trace_id: Optional function to extract trace_id from args/kwargs
extract_model_name: Optional function to extract model name from args/kwargs
Example:
@observe_llm_call("optimization")
async def call_optimization_llm(
trace_id: str,
model: AIModel,
system_prompt: str,
user_prompt: str,
n: int = 1,
):
# Just the LLM call - observability is automatic
return await llm_client.chat.completions.create(...)
"""
def decorator(func: Callable) -> Callable:
@wraps(func)
async def wrapper(*args, **kwargs) -> Any:
# Extract trace_id (try multiple strategies)
trace_id = _extract_trace_id(args, kwargs, extract_trace_id)
# Extract model name
model_name = _extract_model_name(args, kwargs, extract_model_name)
# Extract prompts and messages
messages = kwargs.get("messages", [])
system_prompt = kwargs.get("system_prompt", "")
user_prompt = kwargs.get("user_prompt", "")
# If prompts not provided directly, extract from messages
if not system_prompt and not user_prompt and messages:
for msg in messages:
if msg.get("role") == "system" and not system_prompt:
system_prompt = msg.get("content", "")
elif msg.get("role") == "user" and not user_prompt:
user_prompt = msg.get("content", "")
# Extract additional params
temperature = kwargs.get("temperature")
n_candidates = kwargs.get("n", kwargs.get("n_candidates", 1))
user_id = kwargs.get("user_id")
python_version = kwargs.get("python_version")
context = kwargs.get("context")
# Initialize recorders
llm_recorder = LLMCallRecorder()
error_recorder = ErrorRecorder()
llm_call_id = None
# Record start (non-blocking)
if trace_id and model_name:
llm_call_id = await _record_start_background(
llm_recorder=llm_recorder,
trace_id=trace_id,
call_type=call_type,
model_name=model_name,
system_prompt=system_prompt,
user_prompt=user_prompt,
messages=messages,
temperature=temperature,
n_candidates=n_candidates,
user_id=user_id,
python_version=python_version,
context=context,
)
# Execute the actual function
start_time = time.time()
try:
result = await func(*args, **kwargs)
latency_ms = int((time.time() - start_time) * 1000)
# Record success (non-blocking)
if llm_call_id:
_record_completion_background(
llm_recorder=llm_recorder,
llm_call_id=llm_call_id,
status="success",
result=result,
latency_ms=latency_ms,
model_name=model_name,
)
return result
except Exception as e:
latency_ms = int((time.time() - start_time) * 1000)
# Record error (non-blocking)
if trace_id:
_record_error_background(
error_recorder=error_recorder,
llm_recorder=llm_recorder,
trace_id=trace_id,
llm_call_id=llm_call_id,
error=e,
latency_ms=latency_ms,
model_name=model_name,
n_candidates=n_candidates,
)
raise
return wrapper
return decorator
async def _record_start_background(
llm_recorder: LLMCallRecorder,
trace_id: str,
call_type: str,
model_name: str,
system_prompt: str,
user_prompt: str,
messages: list,
temperature: float | None,
n_candidates: int,
user_id: str | None,
python_version: str | None,
context: dict | None,
) -> str | None:
"""Record LLM call start in background (non-blocking)."""
try:
# If messages not provided, construct from prompts
if not messages and (system_prompt or user_prompt):
messages = []
if system_prompt:
messages.append({"role": "system", "content": system_prompt})
if user_prompt:
messages.append({"role": "user", "content": user_prompt})
llm_call_id = await llm_recorder.record_llm_call_start(
trace_id=trace_id,
call_type=call_type,
model_name=model_name,
system_prompt=system_prompt,
user_prompt=user_prompt,
messages=messages,
temperature=temperature,
n_candidates=n_candidates,
user_id=user_id,
python_version=python_version,
context=context,
)
return llm_call_id
except Exception as e:
logger.warning(f"Observability: Failed to record LLM start ({call_type}): {e}")
return None
def _record_completion_background(
llm_recorder: LLMCallRecorder, llm_call_id: str, status: str, result: Any, latency_ms: int, model_name: str
):
"""Record LLM call completion in background (non-blocking)."""
async def _record():
try:
# Extract data from result (handles OpenAI response format)
raw_response = None
prompt_tokens = None
completion_tokens = None
total_tokens = None
llm_cost = None
candidates_generated = None
# Handle OpenAI ChatCompletion response
if hasattr(result, "model_dump_json"):
raw_response = result.model_dump_json(indent=2)
if hasattr(result, "usage"):
prompt_tokens = getattr(result.usage, "prompt_tokens", None)
completion_tokens = getattr(result.usage, "completion_tokens", None)
total_tokens = getattr(result.usage, "total_tokens", None)
if hasattr(result, "choices"):
candidates_generated = len(result.choices)
# Calculate cost if we have a result with the right structure
try:
# Map model names to model objects
model_map = {"gpt-4.1": OpenAI_GPT_4_1(), "claude-sonnet-4-5": Anthropic_Claude_Sonnet_4_5()}
if model_name in model_map:
llm_cost = calculate_llm_cost(result, model_map[model_name])
except Exception:
pass
await llm_recorder.record_llm_call_completion(
llm_call_id=llm_call_id,
status=status,
raw_response=raw_response,
prompt_tokens=prompt_tokens,
completion_tokens=completion_tokens,
total_tokens=total_tokens,
llm_cost=llm_cost,
latency_ms=latency_ms,
candidates_generated=candidates_generated,
)
except Exception as e:
logger.warning(f"Observability: Failed to record completion: {e}")
# Fire and forget
asyncio.create_task(_record())
def _record_error_background(
error_recorder: ErrorRecorder,
llm_recorder: LLMCallRecorder,
trace_id: str,
llm_call_id: str | None,
error: Exception,
latency_ms: int,
model_name: str,
n_candidates: int,
):
"""Record error in background (non-blocking)."""
async def _record():
try:
# Record error to optimization_errors table
await error_recorder.record_error(
trace_id=trace_id,
error_type="llm_api",
error_category="llm_error",
severity="error",
error_message=str(error),
error_code=type(error).__name__,
context={"model": model_name, "n_candidates": n_candidates},
)
# Update LLM call status to failed
if llm_call_id:
await llm_recorder.record_llm_call_completion(
llm_call_id=llm_call_id,
status="failed",
error_type=type(error).__name__,
error_message=str(error),
latency_ms=latency_ms,
)
except Exception as e:
logger.warning(f"Observability: Failed to record error: {e}")
# Fire and forget
asyncio.create_task(_record())
def _extract_trace_id(args: tuple, kwargs: dict, custom_extractor: Callable | None) -> str:
"""Extract trace_id from function arguments."""
if custom_extractor:
return custom_extractor(args, kwargs)
# Try common patterns
if "trace_id" in kwargs:
return kwargs["trace_id"]
# Check if first arg might be trace_id (string UUID format)
if args and isinstance(args[0], str) and len(args[0]) == 36:
return args[0]
# Check in data objects
if "data" in kwargs and hasattr(kwargs["data"], "trace_id"):
return kwargs["data"].trace_id
logger.warning("Observability: Could not extract trace_id from function arguments")
return "unknown"
def _extract_model_name(args: tuple, kwargs: dict, custom_extractor: Callable | None) -> str:
"""Extract model name from function arguments."""
if custom_extractor:
return custom_extractor(args, kwargs)
# Try common patterns
if "model_name" in kwargs:
return kwargs["model_name"]
if "model" in kwargs:
model = kwargs["model"]
# Handle AIModel object
if hasattr(model, "name"):
return model.name
# Handle string
if isinstance(model, str):
return model
logger.warning("Observability: Could not extract model_name from function arguments")
return "unknown"

View file

@ -0,0 +1,257 @@
"""Structured JSON logging utilities for observability.
Provides JSON-formatted logging with consistent schema for machine-readable logs
that can be easily ingested into log analysis systems.
"""
import json
import logging
from datetime import UTC, datetime
from typing import Any
class StructuredFormatter(logging.Formatter):
"""JSON formatter for structured logging.
Outputs logs in JSON format with consistent fields:
- timestamp: ISO 8601 timestamp in UTC
- log_level: Log level (INFO, ERROR, etc.)
- logger_name: Logger name
- message: Log message
- module: Python module name
- function: Function name
- line: Line number
- trace_id: Optimization trace ID (if available)
- event_type: Event type for categorization (if available)
- metadata: Additional structured data (if available)
"""
def format(self, record: logging.LogRecord) -> str:
"""Format log record as JSON."""
log_data: dict[str, Any] = {
"timestamp": datetime.now(UTC).isoformat(),
"log_level": record.levelname,
"logger_name": record.name,
"message": record.getMessage(),
"module": record.module,
"function": record.funcName,
"line": record.lineno,
}
# Add trace_id if present (critical for correlating logs)
if hasattr(record, "trace_id"):
log_data["trace_id"] = record.trace_id
# Add event_type if present (for categorization)
if hasattr(record, "event_type"):
log_data["event_type"] = record.event_type
# Add metadata if present (for additional context)
if hasattr(record, "metadata"):
log_data["metadata"] = record.metadata
# Add user_id if present
if hasattr(record, "user_id"):
log_data["user_id"] = record.user_id
# Add exception info if present
if record.exc_info:
log_data["exception"] = self.formatException(record.exc_info)
return json.dumps(log_data)
def get_observability_logger(name: str, component: str = "aiservice") -> logging.Logger:
"""Get a logger configured for observability.
Args:
name: Logger name (typically __name__ of the calling module)
component: Component name for organization (default: "aiservice")
Returns:
Configured logger instance
Usage:
logger = get_observability_logger(__name__, "optimizer")
logger.info(
"Optimization started",
extra={
"trace_id": "abc-123",
"event_type": "optimization.started",
"metadata": {"model": "gpt-4", "n_candidates": 5}
}
)
"""
logger_name = f"{component}.{name}"
logger = logging.getLogger(logger_name)
# Logger configuration will be set globally in settings.py
# This just returns the logger with the correct name hierarchy
return logger
def log_with_context(
logger: logging.Logger,
level: str,
message: str,
trace_id: str | None = None,
event_type: str | None = None,
user_id: str | None = None,
metadata: dict[str, Any] | None = None,
) -> None:
"""Log a message with structured context.
Helper function for logging with consistent structured context fields.
Args:
logger: Logger instance
level: Log level ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL')
message: Log message
trace_id: Optimization trace ID for correlation
event_type: Event type for categorization
user_id: User ID for user tracking
metadata: Additional structured data
Usage:
log_with_context(
logger,
'INFO',
'LLM call completed',
trace_id='abc-123',
event_type='llm.call_success',
metadata={'model': 'gpt-4', 'latency_ms': 1250}
)
"""
extra: dict[str, Any] = {}
if trace_id:
extra["trace_id"] = trace_id
if event_type:
extra["event_type"] = event_type
if user_id:
extra["user_id"] = user_id
if metadata:
extra["metadata"] = metadata
log_level = getattr(logging, level.upper())
logger.log(log_level, message, extra=extra)
class LogContext:
"""Context manager for logging with persistent context fields.
Allows setting context fields (like trace_id) that will be included
in all log messages within the context.
Usage:
with LogContext(logger, trace_id="abc-123", user_id="user-456") as log:
log.info("Step 1 started")
# ... do work ...
log.info("Step 1 completed")
# All logs will include trace_id and user_id
"""
def __init__(self, logger: logging.Logger, trace_id: str | None = None, user_id: str | None = None, **kwargs: Any):
"""Initialize log context.
Args:
logger: Logger instance
trace_id: Optimization trace ID
user_id: User ID
**kwargs: Additional context fields to include in all logs
"""
self.logger = logger
self.context: dict[str, Any] = {}
if trace_id:
self.context["trace_id"] = trace_id
if user_id:
self.context["user_id"] = user_id
self.context.update(kwargs)
def __enter__(self) -> "LogContext":
"""Enter context."""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exit context."""
# Context is automatically released
def log(
self, level: str, message: str, event_type: str | None = None, metadata: dict[str, Any] | None = None
) -> None:
"""Log message with context."""
extra = dict(self.context)
if event_type:
extra["event_type"] = event_type
if metadata:
extra["metadata"] = metadata
log_level = getattr(logging, level.upper())
self.logger.log(log_level, message, extra=extra)
def debug(self, message: str, **kwargs):
"""Log debug message."""
self.log("DEBUG", message, **kwargs)
def info(self, message: str, **kwargs):
"""Log info message."""
self.log("INFO", message, **kwargs)
def warning(self, message: str, **kwargs):
"""Log warning message."""
self.log("WARNING", message, **kwargs)
def error(self, message: str, **kwargs):
"""Log error message."""
self.log("ERROR", message, **kwargs)
def critical(self, message: str, **kwargs):
"""Log critical message."""
self.log("CRITICAL", message, **kwargs)
# Event type constants for consistent categorization
class EventType:
"""Standard event types for optimization flow."""
# Validation events
VALIDATION_STARTED = "validation.started"
VALIDATION_SUCCESS = "validation.success"
VALIDATION_FAILED = "validation.failed"
# LLM events
LLM_CALL_STARTED = "llm.call_started"
LLM_CALL_SUCCESS = "llm.call_success"
LLM_CALL_FAILED = "llm.call_failed"
LLM_PARSE_ERROR = "llm.parse_error"
# Testing events
TEST_GENERATION_STARTED = "test.generation_started"
TEST_GENERATION_SUCCESS = "test.generation_success"
TEST_GENERATION_FAILED = "test.generation_failed"
TEST_EXECUTION_STARTED = "test.execution_started"
TEST_EXECUTION_SUCCESS = "test.execution_success"
TEST_EXECUTION_FAILED = "test.execution_failed"
# Correctness events
CORRECTNESS_VERIFIED = "correctness.verified"
CORRECTNESS_FAILED = "correctness.failed"
# Performance events
BENCHMARK_STARTED = "benchmark.started"
BENCHMARK_SUCCESS = "benchmark.success"
BENCHMARK_FAILED = "benchmark.failed"
# Ranking events
RANKING_STARTED = "ranking.started"
RANKING_SUCCESS = "ranking.success"
RANKING_FAILED = "ranking.failed"
# Optimization flow events
OPTIMIZATION_STARTED = "optimization.started"
OPTIMIZATION_SUCCESS = "optimization.success"
OPTIMIZATION_FAILED = "optimization.failed"

View file

@ -0,0 +1,114 @@
"""Django models for observability tables.
These models map to Prisma-managed tables for LLM tracking, step tracing,
and error monitoring in the optimization pipeline.
"""
import uuid
from django.db import models
class LLMCall(models.Model):
"""Track LLM API calls with full prompt/response data for prompt engineering analysis."""
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
trace_id = models.CharField(max_length=36)
call_type = models.CharField(max_length=50) # 'optimization', 'test_generation', 'ranking', 'refinement'
model_name = models.CharField(max_length=100) # 'gpt-4', 'gpt-3.5-turbo', etc.
# Prompt capture (full content for prompt engineering analysis)
system_prompt = models.TextField()
user_prompt = models.TextField()
messages = models.JSONField() # Full messages array sent to LLM
# Request configuration
temperature = models.FloatField(null=True, blank=True)
n_candidates = models.IntegerField(null=True, blank=True) # Number of completions requested
max_tokens = models.IntegerField(null=True, blank=True)
# Response capture
raw_response = models.TextField(null=True, blank=True) # Full LLM response before parsing
parsed_response = models.JSONField(null=True, blank=True) # Structured parsed response
# Token usage and cost
prompt_tokens = models.IntegerField(null=True, blank=True)
completion_tokens = models.IntegerField(null=True, blank=True)
total_tokens = models.IntegerField(null=True, blank=True)
llm_cost = models.FloatField(null=True, blank=True)
# Timing
latency_ms = models.IntegerField(null=True, blank=True)
# Status and errors
status = models.CharField(max_length=20) # 'success', 'failed', 'partial_success'
retry_count = models.IntegerField(default=0)
error_type = models.CharField(max_length=50, null=True, blank=True)
error_message = models.TextField(null=True, blank=True)
# Parsing results
parsing_status = models.CharField(max_length=20, null=True, blank=True) # 'success', 'failed', 'partial'
candidates_generated = models.IntegerField(null=True, blank=True, default=0)
candidates_valid = models.IntegerField(null=True, blank=True, default=0) # Passed syntax validation
parsing_errors = models.JSONField(null=True, blank=True) # Details of parsing failures
# Context
user_id = models.CharField(max_length=255, null=True, blank=True)
python_version = models.CharField(max_length=20, null=True, blank=True)
is_async = models.BooleanField(default=False)
context = models.JSONField(null=True, blank=True) # Additional metadata
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
managed = False
db_table = "llm_calls"
indexes = [
models.Index(fields=["trace_id"]),
models.Index(fields=["call_type"]),
models.Index(fields=["model_name"]),
models.Index(fields=["status"]),
models.Index(fields=["-created_at"]),
models.Index(fields=["call_type", "status"]),
models.Index(fields=["parsing_status"]),
]
def __str__(self):
return f"LLMCall({self.call_type}, {self.model_name}, {self.status})"
class OptimizationError(models.Model):
"""Comprehensive error tracking with classification and recovery attempts."""
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
trace_id = models.CharField(max_length=36)
llm_call_id = models.UUIDField(null=True, blank=True)
# Error classification
error_type = models.CharField(
max_length=50
) # 'validation', 'llm_api', 'llm_parsing', 'test_failure', 'compilation'
error_category = models.CharField(max_length=50) # 'user_error', 'system_error', 'llm_error', 'infrastructure'
severity = models.CharField(max_length=20) # 'critical', 'error', 'warning', 'info'
# Error details
error_message = models.TextField()
error_code = models.CharField(max_length=50, null=True, blank=True)
stack_trace = models.TextField(null=True, blank=True)
context = models.JSONField(null=True, blank=True) # Additional error context including test failure details
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
managed = False
db_table = "optimization_errors"
indexes = [
models.Index(fields=["trace_id"]),
models.Index(fields=["error_type"]),
models.Index(fields=["error_category"]),
models.Index(fields=["severity"]),
models.Index(fields=["llm_call_id"]),
]
def __str__(self):
return f"OptimizationError({self.error_type}, {self.severity})"

View file

@ -13,8 +13,8 @@ from pydantic import ValidationError
from aiservice.analytics.posthog import ph
from aiservice.common_utils import validate_trace_id
from aiservice.env_specific import debug_log_sensitive_data, llm_clients
from aiservice.models.aimodels import CODE_REPAIR_MODEL, calculate_llm_cost
from aiservice.env_specific import debug_log_sensitive_data
from aiservice.llm import CODE_REPAIR_MODEL, calculate_llm_cost, call_llm
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
from optimizer.models import OptimizedCandidateSource
@ -33,7 +33,7 @@ if TYPE_CHECKING:
ChatCompletionToolMessageParam,
)
from aiservice.models.aimodels import LLM
from aiservice.llm import LLM
code_repair_api = NinjaAPI(urls_namespace="code_repair")
@ -58,9 +58,11 @@ async def code_repair( # noqa: D417
- original code
- optimized code
- behaviour test diffs
Returns
-------
CodeRepairIntermediateResponseItemschema or CodeRepairErrorResponseSchema
"""
system_prompt = ctx.get_system_prompt()
user_prompt = ctx.get_user_prompt()
@ -76,28 +78,31 @@ async def code_repair( # noqa: D417
| ChatCompletionFunctionMessageParam
] = [system_message, user_message]
debug_log_sensitive_data(f"This was the user prompt\n {user_prompt}\n")
llm_client = llm_clients[optimize_model.model_type]
try:
output = await llm_client.with_options(max_retries=2).chat.completions.create(
model=optimize_model.name, messages=messages, n=1
)
llm_cost = calculate_llm_cost(output, optimize_model)
output = await call_llm(model_name=optimize_model.name, model_type=optimize_model.model_type, messages=messages)
llm_cost = calculate_llm_cost(output.raw_response, optimize_model)
except Exception as e:
logging.exception("Claude Code Generation error in code_repair")
sentry_sdk.capture_exception(e)
debug_log_sensitive_data(f"Failed to generate code for source:\n{ctx.data.original_source_code}")
return CodeRepairErrorResponseSchema(error=str(e))
debug_log_sensitive_data(f"ClaudeClient optimization response:\n{output.model_dump_json(indent=2)}")
debug_log_sensitive_data(f"ClaudeClient optimization response:\n{output.content}")
if output.usage is not None:
ph(user_id, "code_repair-usage", properties={"model": optimize_model.name, "usage": output.usage.json()})
results = [content for op in output.choices if (content := op.message.content)] # will be of size 1
ph(
user_id,
"code_repair-usage",
properties={
"model": optimize_model.name,
"usage": {"input_tokens": output.usage.input_tokens, "output_tokens": output.usage.output_tokens},
},
)
# Regex doesn't work yet in extracting everything else other than the search replace block
explanation = results[0]
explanation = output.content
repaired_optimization = ""
try:
repaired_optimization = ctx.apply_patches_to_optimized_code(results[0])
repaired_optimization = ctx.apply_patches_to_optimized_code(output.content)
except (ValueError, ValidationError) as exc:
sentry_sdk.capture_exception(exc)
debug_log_sensitive_data(f"{type(exc).__name__} for source:\n{ctx.data.modified_source_code}")
@ -194,7 +199,7 @@ async def repair(
}
},
)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost, user_id=request.user)
return 200, CodeRepairResponseItemschema(
source_code=code_repair_data.source_code,
optimization_id=code_repair_data.optimization_id,

View file

@ -16,6 +16,8 @@ from testgen.instrumentation.edit_generated_test import parse_module_to_cst
if TYPE_CHECKING:
from optimizer.diff_patches_utils.diff import Diff
class TestDiffScope(str, Enum):
RETURN_VALUE = "return_value"
STDOUT = "stdout"
@ -110,7 +112,6 @@ class CodeRepairContext:
file_to_code = diff.run()
return group_code(file_to_code)
def is_valid(self, new_refined_code: str) -> bool:
if is_markdown_structure_changed(new_refined_code, self.data.modified_source_code):
return False

View file

@ -1,22 +1,24 @@
from __future__ import annotations
import logging
import re
import time
from typing import TYPE_CHECKING
import sentry_sdk
from aiservice.analytics.posthog import ph
from aiservice.common_utils import validate_trace_id
from aiservice.env_specific import create_llm_client, debug_log_sensitive_data, llm_clients
from aiservice.models.aimodels import EXPLANATIONS_MODEL, LLM, calculate_llm_cost
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
from ninja import NinjaAPI, Schema
from openai import OpenAIError
from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam
from packaging import version
from aiservice.analytics.posthog import ph
from aiservice.common_utils import validate_trace_id
from aiservice.env_specific import debug_log_sensitive_data
from aiservice.llm import EXPLANATIONS_MODEL, LLM, calculate_llm_cost, call_llm
from aiservice.observability.database import ErrorRecorder, LLMCallRecorder
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
if TYPE_CHECKING:
from aiservice.models.aimodels import LLM
from openai.types.chat import (
ChatCompletionAssistantMessageParam,
ChatCompletionFunctionMessageParam,
@ -213,24 +215,100 @@ async def explain_optimizations( # noqa: D417
| ChatCompletionToolMessageParam
| ChatCompletionFunctionMessageParam
] = [system_message, user_message]
llm_client = llm_clients[explanations_model.model_type]
llm_recorder = LLMCallRecorder()
error_recorder = ErrorRecorder()
llm_call_id = None
obs_context: dict = {"optimization_id": data.optimization_id, "speedup": data.speedup}
if data.call_sequence:
obs_context["call_sequence"] = data.call_sequence
try:
output = await llm_client.with_options(max_retries=2).chat.completions.create(
model=explanations_model.name, messages=messages, n=1
llm_call_id = await llm_recorder.record_llm_call_start(
trace_id=data.trace_id,
call_type="explanation",
model_name=explanations_model.name,
system_prompt=system_prompt,
user_prompt=user_prompt,
messages=[{"role": m["role"], "content": m["content"]} for m in messages],
temperature=None,
n_candidates=1,
user_id=user_id,
context=obs_context,
)
await update_optimization_cost(trace_id=data.trace_id, cost=calculate_llm_cost(output, explanations_model))
except OpenAIError as e:
except Exception as obs_error:
logging.warning(f"Observability recording failed (explain start): {obs_error}")
start_time = time.time()
try:
output = await call_llm(
model_name=explanations_model.name, model_type=explanations_model.model_type, messages=messages
)
await update_optimization_cost(
trace_id=data.trace_id, cost=calculate_llm_cost(output.raw_response, explanations_model), user_id=user_id
)
except Exception as e:
sentry_sdk.capture_exception(e)
debug_log_sensitive_data(f"Failed to generate new explanation, Error message: {e}")
try:
error_context = {"model": explanations_model.name, "optimization_id": data.optimization_id}
if data.call_sequence:
error_context["call_sequence"] = data.call_sequence
await error_recorder.record_error(
trace_id=data.trace_id,
error_type="llm_api",
error_category="explanation_error",
severity="error",
error_message=str(e),
error_code=type(e).__name__,
context=error_context,
)
if llm_call_id:
await llm_recorder.record_llm_call_completion(
llm_call_id=llm_call_id,
status="failed",
error_type=type(e).__name__,
error_message=str(e),
latency_ms=int((time.time() - start_time) * 1000),
)
except Exception as obs_error:
logging.warning(f"Observability recording failed (explain error): {obs_error}")
return ExplanationsErrorResponseSchema(error=str(e))
debug_log_sensitive_data(f"AIClient optimization response:\n{output}")
cost = calculate_llm_cost(output.raw_response, explanations_model)
latency_ms = int((time.time() - start_time) * 1000)
try:
if llm_call_id:
await llm_recorder.record_llm_call_completion(
llm_call_id=llm_call_id,
status="success",
raw_response=output.content,
prompt_tokens=output.usage.input_tokens if output.usage else None,
completion_tokens=output.usage.output_tokens if output.usage else None,
total_tokens=(output.usage.input_tokens + output.usage.output_tokens) if output.usage else None,
llm_cost=cost,
latency_ms=latency_ms,
candidates_generated=1,
)
except Exception as obs_error:
logging.warning(f"Observability recording failed (explain completion): {obs_error}")
debug_log_sensitive_data(f"AIClient optimization response:\n{output.content}")
if output.usage is not None:
ph(
user_id,
"aiservice-optimize-openai-usage",
properties={"model": explanations_model.name, "n": 1, "usage": output.usage.json()},
properties={
"model": explanations_model.name,
"n": 1,
"usage": {"input_tokens": output.usage.input_tokens, "output_tokens": output.usage.output_tokens},
},
)
return ExplanationsResponseSchema(explanation=output.choices[0].message.content)
return ExplanationsResponseSchema(explanation=output.content)
class ExplanationsSchema(Schema):
@ -252,6 +330,7 @@ class ExplanationsSchema(Schema):
python_version: str | None = None
function_references: str | None = None
codeflash_version: str = "0.18.2"
call_sequence: int | None = None
class ExplanationsResponseSchema(Schema):

View file

@ -1,8 +1,9 @@
from datetime import UTC, datetime
from typing import Any
from uuid import UUID
from uuid import UUID, uuid4
from django.db.models import F
from django.db.models.functions import Coalesce
from log_features.models import OptimizationEvents, OptimizationFeatures, Repositories
@ -34,6 +35,7 @@ async def log_optimization_event(
repository_id = repository.id if repository else None
return await OptimizationEvents.objects.acreate(
id=str(uuid4()),
event_type=event_type,
user_id=user_id,
repository_id=repository_id,
@ -46,6 +48,47 @@ async def log_optimization_event(
)
async def get_or_create_optimization_event(
trace_id: str,
event_type: str = "no-pr",
user_id: str | None = None,
repository_id: str | None = None,
repo_owner: str | None = None,
repo_name: str | None = None,
api_key_id: int | None = None,
metadata: dict[str, Any] | None = None,
current_username: str | None = None,
llm_cost: float | None = None,
) -> tuple[OptimizationEvents, bool]:
"""Get existing optimization event or create a new one.
Uses aupdate_or_create to handle race conditions atomically.
Returns:
A tuple of (event, created) where created is True if a new event was created.
"""
if repository_id is None and repo_owner and repo_name:
repository = await get_repository(repo_owner, repo_name)
repository_id = repository.id if repository else None
event, created = await OptimizationEvents.objects.aupdate_or_create(
trace_id=trace_id,
defaults={},
create_defaults={
"id": str(uuid4()),
"event_type": event_type,
"user_id": user_id,
"repository_id": repository_id,
"api_key_id": api_key_id,
"metadata": metadata,
"created_at": datetime.now(UTC),
"current_username": current_username,
"llm_cost": llm_cost,
},
)
return event, created
async def update_optimization_features_review(
trace_id: str,
review_quality: str | None = None,
@ -59,6 +102,25 @@ async def update_optimization_features_review(
)
async def update_optimization_cost(trace_id: str, cost: float) -> None:
"""Atomically increment llm_cost for the given trace_id and return new total."""
await OptimizationEvents.objects.filter(trace_id=trace_id).aupdate(llm_cost=F("llm_cost") + float(cost))
async def update_optimization_cost(trace_id: str, cost: float, user_id: str | None = None) -> None:
"""Atomically increment llm_cost for the given trace_id.
Creates a minimal event if one doesn't exist, then updates the cost.
Uses aupdate_or_create to handle race conditions atomically.
"""
_, created = await OptimizationEvents.objects.aupdate_or_create(
trace_id=trace_id,
defaults={},
create_defaults={
"id": str(uuid4()),
"event_type": "no-pr",
"user_id": user_id,
"created_at": datetime.now(UTC),
"llm_cost": cost,
},
)
# If the record already existed, increment the cost atomically
if not created:
await OptimizationEvents.objects.filter(trace_id=trace_id).aupdate(
llm_cost=Coalesce(F("llm_cost"), 0.0) + float(cost)
)

View file

@ -51,7 +51,7 @@ class OptimizationFeatures(models.Model):
class OptimizationEvents(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
id = models.CharField(max_length=36, primary_key=True, editable=False)
event_type = models.CharField(max_length=64) # 'pr_created', 'pr_merged', 'pr_closed'
user_id = models.CharField(max_length=255, null=True, blank=True)
repository_id = models.CharField(max_length=255, null=True, blank=True)

View file

@ -25,6 +25,5 @@ aiservice/management/commands/__init__.py
aiservice/__init__.py
aiservice/models/__init__.py
aiservice/models/functions_to_optimize.py
aiservice/models/aimodels.py
aiservice/analytics/__init__.py
gunicorn.conf.py

View file

@ -4,19 +4,21 @@ import json
import logging
import re
from enum import Enum
from typing import TYPE_CHECKING, cast
from typing import TYPE_CHECKING
import sentry_sdk
from aiservice.env_specific import debug_log_sensitive_data, llm_clients
from aiservice.models.aimodels import OPTIMIZATION_REVIEW_MODEL, calculate_llm_cost
from log_features.log_event import update_optimization_cost, update_optimization_features_review
from ninja import NinjaAPI, Schema
from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam
from packaging import version
if TYPE_CHECKING:
from aiservice.models.aimodels import LLM
from aiservice.analytics.posthog import ph
from aiservice.env_specific import debug_log_sensitive_data
from aiservice.llm import OPTIMIZATION_REVIEW_MODEL, LLMResponse, calculate_llm_cost, call_llm
from aiservice.observability.decorators import observe_llm_call
from log_features.log_event import update_optimization_cost, update_optimization_features_review
if TYPE_CHECKING:
from aiservice.llm import LLM
optimization_review_api = NinjaAPI(urls_namespace="optimization_review")
@ -52,6 +54,7 @@ class OptimizationReviewSchema(Schema):
calling_fn_details: str
python_version: str | None = None
codeflash_version: str = "0.18.2"
call_sequence: int | None = None
def _build_optimization_review_messages(
@ -141,28 +144,40 @@ Output as a json markdown block with the key named as 'rating' and value being o
return [system_message, user_message]
@observe_llm_call("optimization_review")
async def call_optimization_review_llm(
trace_id: str, model: LLM, messages: list[dict[str, str]], user_id: str | None = None, context: dict | None = None
) -> LLMResponse:
return await call_llm(model_name=model.name, model_type=model.model_type, messages=messages)
async def get_optimization_review(
request, data: OptimizationReviewSchema, optimization_review_model: LLM = OPTIMIZATION_REVIEW_MODEL
) -> tuple[int, OptimizationReviewResponseSchema | OptimizationReviewErrorSchema]:
"""Compute optimization review via Claude."""
ph(request.user, "aiservice-optimization-review-called")
try:
messages = _build_optimization_review_messages(data)
debug_log_sensitive_data(f"{messages[0]}{messages[1]}")
llm_client = llm_clients[optimization_review_model.model_type]
# Call Claude API with retries
response = await llm_client.with_options(max_retries=2).chat.completions.create(
model=optimization_review_model.name, messages=messages
)
# Calculate and update cost
cost = calculate_llm_cost(response, optimization_review_model)
if cost:
await update_optimization_cost(data.trace_id, cost)
obs_context: dict = {"speedup": data.speedup}
if data.call_sequence:
obs_context["call_sequence"] = data.call_sequence
# Extract review text from Claude response
review_text = cast("str", response.choices[0].message.content).strip()
response = await call_optimization_review_llm(
trace_id=data.trace_id,
model=optimization_review_model,
messages=messages,
user_id=request.user,
context=obs_context,
)
cost = calculate_llm_cost(response.raw_response, optimization_review_model)
await update_optimization_cost(data.trace_id, cost, user_id=request.user)
review_text = response.content.strip()
match = re.match(r"(.*?)```json(?:\n|\\n)(.*?)```(.*)", review_text, re.DOTALL | re.MULTILINE)
if match:
try:

View file

@ -118,7 +118,11 @@ class SingleRefinerContext(BaseRefinerContext):
)
def apply_patches_to_optimized_code(self, llm_res: str) -> str:
diff: Diff = SearchAndReplaceDiff(content=llm_res, source_code={"file": self.data.optimized_source_code}, match_files_when_having_single_patch=False)
diff: Diff = SearchAndReplaceDiff(
content=llm_res,
source_code={"file": self.data.optimized_source_code},
match_files_when_having_single_patch=False,
)
file_to_code = diff.run()
return file_to_code["file"]

View file

@ -49,17 +49,13 @@ def parse_diff(diff: str) -> list[SearchReplaceBlock]:
replace_end = idx
if idx >= n:
raise ValueError(
"Invalid diff format: Missing '>>>>>>> REPLACE' marker"
)
raise ValueError("Invalid diff format: Missing '>>>>>>> REPLACE' marker")
search_content = "".join(lines[search_start:search_end]).rstrip()
replace_content = "".join(lines[replace_start:replace_end]).rstrip()
try:
block = SearchReplaceBlock.from_block(
search=search_content, replace=replace_content
)
block = SearchReplaceBlock.from_block(search=search_content, replace=replace_content)
blocks.append(block)
except ValidationError as ve:
raise ValueError(f"Invalid block format: {ve}")
@ -126,9 +122,7 @@ def apply_patches(diff_str: str, content: str) -> str:
start_char_idx = content.find(block.search)
if start_char_idx != -1:
end_char_idx = start_char_idx + len(block.search)
content = (
f"{content[:start_char_idx]}{block.replace}{content[end_char_idx:]}"
)
content = f"{content[:start_char_idx]}{block.replace}{content[end_char_idx:]}"
return content

View file

@ -1,10 +1,16 @@
from __future__ import annotations
import enum
from typing import TYPE_CHECKING
import libcst
from ninja import Schema
from pydantic import field_validator
from pydantic.dataclasses import dataclass
if TYPE_CHECKING:
from aiservice.llm import LLM
class OptimizedCandidateSource(str, enum.Enum):
OPTIMIZE = "OPTIMIZE"
@ -63,4 +69,15 @@ class OptimizeSchema(Schema):
repo_owner: str | None = None
repo_name: str | None = None
is_async: bool | None = False
n_candidates: int | None = 5
model: str | None = None
call_sequence: int | None = None
def get_model_from_name(model_name: str | None) -> LLM:
from aiservice.llm import OPTIMIZE_MODEL, Anthropic_Claude_Sonnet_4_5, OpenAI_GPT_4_1 # noqa: PLC0415
if model_name == "gpt-4.1":
return OpenAI_GPT_4_1()
if model_name == "claude-sonnet-4-5":
return Anthropic_Claude_Sonnet_4_5()
return OPTIMIZE_MODEL

View file

@ -10,15 +10,15 @@ import libcst as cst
import sentry_sdk
from ninja import NinjaAPI
from ninja.errors import HttpError
from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam
from pydantic import ValidationError
from aiservice.analytics.posthog import ph
from aiservice.common_utils import parse_python_version, should_hack_for_demo, validate_trace_id
from aiservice.env_specific import debug_log_sensitive_data, debug_log_sensitive_data_from_callable, llm_clients
from aiservice.models.aimodels import OPTIMIZE_MODEL, calculate_llm_cost
from aiservice.env_specific import debug_log_sensitive_data, debug_log_sensitive_data_from_callable
from aiservice.llm import LLM, OPTIMIZE_MODEL, LLMResponse, calculate_llm_cost, call_llm
from aiservice.observability.decorators import observe_llm_call
from authapp.user import get_user_by_id
from log_features.log_event import log_optimization_event
from log_features.log_event import get_or_create_optimization_event
from log_features.log_features import log_features
from optimizer.context_utils.context_helpers import group_code
from optimizer.context_utils.optimizer_context import (
@ -28,17 +28,10 @@ from optimizer.context_utils.optimizer_context import (
OptimizeResponseSchema,
)
from optimizer.diff_patches_utils.diff import DiffMethod
from optimizer.models import OptimizedCandidateSource, OptimizeSchema # noqa: TC001
from optimizer.models import OptimizedCandidateSource, OptimizeSchema, get_model_from_name # noqa: TC001 # noqa: TC001
if TYPE_CHECKING:
from django.http import HttpRequest
from openai.types.chat import (
ChatCompletionAssistantMessageParam,
ChatCompletionFunctionMessageParam,
ChatCompletionToolMessageParam,
)
from aiservice.models.aimodels import LLM
optimizations_json = [
@ -119,37 +112,44 @@ ASYNC_SYSTEM_PROMPT = (current_dir / "async_system_prompt.md").read_text()
ASYNC_USER_PROMPT = (current_dir / "async_user_prompt.md").read_text()
@observe_llm_call("optimization")
async def call_optimization_llm(
trace_id: str,
model: LLM,
system_prompt: str,
user_prompt: str,
user_id: str | None = None,
python_version: str | None = None,
context: dict | None = None,
) -> LLMResponse:
"""Call LLM for code optimization with automatic observability.
This function is decorated with @observe_llm_call which automatically:
- Records call start (non-blocking)
- Captures timing and token usage
- Records completion (non-blocking)
- Handles errors automatically
All observability runs in the background without blocking the LLM call.
Args:
context: Additional context for observability (e.g., call_sequence for multi-model tracking)
"""
messages = [{"role": "system", "content": system_prompt}, {"role": "user", "content": user_prompt}]
return await call_llm(model_name=model.name, model_type=model.model_type, messages=messages)
async def optimize_python_code(
user_id: str,
ctx: BaseOptimizerContext,
trace_id: str,
dependency_code: str | None = None,
n: int = 1,
optimize_model: LLM = OPTIMIZE_MODEL,
python_version: tuple[int, int, int] = (3, 12, 9),
call_sequence: int | None = None,
) -> tuple[list[OptimizeResponseItemSchema], float | None]:
"""Optimize the given python code for performance using LLMs.
Parameters
----------
user_id : str
The ID of the user requesting the optimization.
ctx : BaseOptimizerContext
The optimizer context containing source code and configuration.
dependency_code : str | None, optional
Additional dependency code for context. Default is None.
n : int, optional
Number of optimization variants to generate. Default is 1.
optimize_model : LLM, optional
The LLM model to use for optimization. Default is OPTIMIZE_MODEL.
python_version : tuple[int, int, int], optional
The python version to use. Default is (3, 12, 9).
Returns
-------
tuple[list[OptimizeResponseItemSchema], float | None]
A tuple containing a list of optimization response items and the LLM cost.
"""
"""Optimize the given python code for performance using LLMs."""
logging.info("/optimize: Optimizing python code.")
debug_log_sensitive_data(f"Optimizing python code for user {user_id}:\n{ctx.source_code}")
# TODO: Experiment with iterative approaches to optimization. Take the learnings from the testing phase into the
@ -161,51 +161,48 @@ async def optimize_python_code(
system_prompt = ctx.get_system_prompt(python_version_str)
user_prompt = ctx.get_user_prompt(dependency_code, None)
system_message = ChatCompletionSystemMessageParam(role="system", content=system_prompt)
user_message = ChatCompletionUserMessageParam(role="user", content=user_prompt)
messages: list[
ChatCompletionSystemMessageParam
| ChatCompletionUserMessageParam
| ChatCompletionAssistantMessageParam
| ChatCompletionToolMessageParam
| ChatCompletionFunctionMessageParam
] = [system_message, user_message]
llm_client = llm_clients[optimize_model.model_type]
# Build context for observability (includes call_sequence for multi-model tracking)
obs_context = {"call_sequence": call_sequence} if call_sequence else None
# Call LLM with automatic observability (decorator handles everything)
try:
output = await llm_client.with_options(max_retries=3).chat.completions.create(
model=optimize_model.name, messages=messages, n=n
output = await call_optimization_llm(
trace_id=trace_id,
model=optimize_model,
system_prompt=system_prompt,
user_prompt=user_prompt,
user_id=user_id,
python_version=python_version_str,
context=obs_context,
)
except Exception as e:
logging.exception("OpenAI Code Generation error in optimizer")
sentry_sdk.capture_exception(e)
debug_log_sensitive_data(f"Failed to generate code for source:\n{ctx.source_code}")
return []
llm_cost = calculate_llm_cost(output, optimize_model)
debug_log_sensitive_data(f"OpenAIClient optimization response:\n{output.model_dump_json(indent=2)}")
return [], None
if output.usage is not None:
llm_cost = calculate_llm_cost(output.raw_response, optimize_model)
debug_log_sensitive_data(f"OpenAIClient optimization response:\n{output.raw_response.model_dump_json(indent=2)}")
if output.raw_response.usage is not None:
ph(
user_id,
"aiservice-optimize-openai-usage",
properties={"model": optimize_model.name, "n": n, "usage": output.usage.json()},
properties={"model": optimize_model.name, "usage": output.raw_response.usage.json()},
)
results = [content for op in output.choices if (content := op.message.content)]
optimization_response_items: list[OptimizeResponseItemSchema] = []
for result in results:
ctx.extract_code_and_explanation_from_llm_res(result)
try:
res = ctx.parse_and_generate_candidate_schema()
if res is not None and ctx.is_valid_code():
optimization_response_items.append(res)
ctx.extracted_code_and_expl = None
ctx.parsed_code_and_explanation = None
except (ValueError, ValidationError, cst.ParserSyntaxError) as e:
sentry_sdk.capture_message(f"Error parsing optimization result: {e}")
debug_log_sensitive_data(f"error for source:\n{ctx.source_code}")
debug_log_sensitive_data(f"Traceback: {e}")
continue
return optimization_response_items, llm_cost
ctx.extract_code_and_explanation_from_llm_res(output.content)
try:
res = ctx.parse_and_generate_candidate_schema()
if res is not None and ctx.is_valid_code():
return [res], llm_cost
except (ValueError, ValidationError, cst.ParserSyntaxError) as e:
sentry_sdk.capture_message(f"Error parsing optimization result: {e}")
debug_log_sensitive_data(f"error for source:\n{ctx.source_code}")
debug_log_sensitive_data(f"Traceback: {e}")
return [], llm_cost
def validate_request_data(data: OptimizeSchema, ctx: BaseOptimizerContext) -> tuple[int, int, int]:
@ -240,7 +237,9 @@ async def optimize(
system_prompt = ASYNC_SYSTEM_PROMPT if data.is_async else SYSTEM_PROMPT
user_prompt = ASYNC_USER_PROMPT if data.is_async else USER_PROMPT
ctx: BaseOptimizerContext = BaseOptimizerContext.get_dynamic_context(system_prompt, user_prompt, data.source_code, DiffMethod.NO_DIFF)
ctx: BaseOptimizerContext = BaseOptimizerContext.get_dynamic_context(
system_prompt, user_prompt, data.source_code, DiffMethod.NO_DIFF
)
ph(request.user, "aiservice-optimize-called")
try:
@ -258,7 +257,7 @@ async def optimize(
response = await hack_for_demo_gsq(ctx)
async with asyncio.TaskGroup() as tg:
event_task = tg.create_task(
log_optimization_event(
get_or_create_optimization_event(
event_type="no-pr",
user_id=request.user,
current_username=data.current_username,
@ -274,7 +273,7 @@ async def optimize(
llm_cost=0,
)
)
event = event_task.result()
event, _created = event_task.result()
for item in response.optimizations:
item.optimization_event_id = str(event.id) if event else None
return response_code, response
@ -284,9 +283,11 @@ async def optimize(
optimize_python_code(
request.user,
ctx,
data.trace_id,
data.dependency_code,
n=min(data.n_candidates or 5, 5),
optimize_model=get_model_from_name(data.model),
python_version=python_version,
call_sequence=data.call_sequence,
)
)
user_task = None
@ -315,7 +316,7 @@ async def optimize(
async with asyncio.TaskGroup() as tg:
event_task = tg.create_task(
log_optimization_event(
get_or_create_optimization_event(
event_type="no-pr",
user_id=request.user,
current_username=data.current_username,
@ -347,11 +348,14 @@ async def optimize(
},
explanations_post={cei.optimization_id: cei.explanation for cei in optimization_response_items},
experiment_metadata=data.experiment_metadata if data.experiment_metadata else None,
optimizations_origin={cei.optimization_id: {"source": OptimizedCandidateSource.OPTIMIZE, "parent": None} for cei in optimization_response_items},
optimizations_origin={
cei.optimization_id: {"source": OptimizedCandidateSource.OPTIMIZE, "parent": None}
for cei in optimization_response_items
},
)
)
event = event_task.result()
event, _created = event_task.result()
for item in optimization_response_items:
item.optimization_event_id = str(event.id) if event else None

View file

@ -10,8 +10,9 @@ from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUs
from aiservice.analytics.posthog import ph
from aiservice.common_utils import parse_python_version, validate_trace_id
from aiservice.env_specific import debug_log_sensitive_data, debug_log_sensitive_data_from_callable, llm_clients
from aiservice.models.aimodels import OPTIMIZE_MODEL, calculate_llm_cost
from aiservice.env_specific import debug_log_sensitive_data, debug_log_sensitive_data_from_callable
from aiservice.llm import OPTIMIZE_MODEL, LLMResponse, calculate_llm_cost, call_llm
from aiservice.observability.decorators import observe_llm_call
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
from optimizer.context_utils.optimizer_context import (
@ -20,7 +21,7 @@ from optimizer.context_utils.optimizer_context import (
OptimizeResponseSchema,
)
from optimizer.diff_patches_utils.diff import DiffMethod
from optimizer.models import OptimizedCandidateSource
from optimizer.models import OptimizedCandidateSource, get_model_from_name
if TYPE_CHECKING:
from openai.types.chat import (
@ -29,7 +30,7 @@ if TYPE_CHECKING:
ChatCompletionToolMessageParam,
)
from aiservice.models.aimodels import LLM
from aiservice.llm import LLM
from optimizer.context_utils.optimizer_context import OptimizeResponseItemSchema
@ -42,33 +43,42 @@ SYSTEM_PROMPT = (current_dir / "system_prompt.md").read_text()
USER_PROMPT = (current_dir / "user_prompt.md").read_text()
async def optimize_python_code_line_profiler( # noqa: D417
@observe_llm_call("line_profiler")
async def call_line_profiler_llm(
trace_id: str,
model: LLM,
messages: list[dict[str, str]],
user_id: str | None = None,
python_version: str | None = None,
context: dict | None = None,
) -> LLMResponse:
"""Call LLM for line profiler optimization with automatic observability.
This function is decorated with @observe_llm_call which automatically:
- Records call start (non-blocking)
- Captures timing and token usage
- Records completion (non-blocking)
- Handles errors automatically
All observability runs in the background without blocking the LLM call.
"""
return await call_llm(model_name=model.name, model_type=model.model_type, messages=messages)
async def optimize_python_code_line_profiler(
user_id: str,
trace_id: str,
line_profiler_results: str,
ctx: BaseOptimizerContext,
dependency_code: str | None = None,
n: int = 1,
optimize_model: LLM = OPTIMIZE_MODEL,
lsp_mode: bool = False, # noqa: FBT001, FBT002
python_version: tuple[int, int, int] = (3, 12, 9),
call_sequence: int | None = None,
) -> list[OptimizeResponseItemSchema]:
"""Optimize the given python code for performance using OpenAI's GPT-4o model.
Parameters
----------
- source_code (str): The python code to optimize.
- n (int): Number of optimization variants to generate. Default is 1.
Returns: - List[Tuple[Union[str, None], Union[str, None]]]: A list of tuples where the first element is the
optimized code and the second is the explanation.
"""
"""Optimize the given python code for performance using LLMs."""
logging.info("/optimize: Optimizing python code line profile.")
debug_log_sensitive_data(f"Optimizing python code for user {user_id}:\n{ctx.source_code}")
if user_id in ["github|1235813", "github|1100399"] or lsp_mode:
# for Galileo and LSP mode, we only generate 5 LP optimizations
n = 5
python_version_str = ".".join(str(x) for x in python_version)
@ -90,38 +100,46 @@ async def optimize_python_code_line_profiler( # noqa: D417
] = [system_message, user_message]
debug_log_sensitive_data(f"This was the user prompt\n {user_prompt}\n")
# TODO: Verify if the context window length is within the model capability
llm_client = llm_clients[optimize_model.model_type]
# Build context for observability (includes call_sequence for multi-model tracking)
obs_context: dict = {"lsp_mode": lsp_mode}
if call_sequence:
obs_context["call_sequence"] = call_sequence
# Call LLM with automatic observability (decorator handles everything)
try:
output = await llm_client.with_options(max_retries=3).chat.completions.create(
model=optimize_model.name, messages=messages, n=n
output = await call_line_profiler_llm(
trace_id=trace_id,
model=optimize_model,
messages=messages,
user_id=user_id,
python_version=python_version_str,
context=obs_context,
)
await update_optimization_cost(
trace_id=trace_id, cost=calculate_llm_cost(output.raw_response, optimize_model), user_id=user_id
)
await update_optimization_cost(trace_id=trace_id, cost=calculate_llm_cost(output, optimize_model))
except Exception as e:
logging.exception("OpenAI Code Generation error in optimizer-line-profiler")
sentry_sdk.capture_exception(e)
debug_log_sensitive_data(f"Failed to generate code for source:\n{ctx.source_code}")
return []
debug_log_sensitive_data(f"OpenAIClient optimization response:\n{output.model_dump_json(indent=2)}")
debug_log_sensitive_data(f"OpenAIClient optimization response:\n{output.raw_response.model_dump_json(indent=2)}")
if output.usage is not None:
if output.raw_response.usage is not None:
ph(
user_id,
"aiservice-optimize-line-profiler-openai-usage",
properties={"model": optimize_model.name, "n": n, "usage": output.usage.json()},
properties={"model": optimize_model.name, "usage": output.raw_response.usage.json()},
)
results = [content for op in output.choices if (content := op.message.content)]
optimization_response_items: list[OptimizeResponseItemSchema] = []
for result in results:
ctx.extract_code_and_explanation_from_llm_res(result)
res = ctx.parse_and_generate_candidate_schema()
if res is not None and ctx.is_valid_code():
optimization_response_items.append(res)
ctx.extracted_code_and_expl = None
ctx.parsed_code_and_explanation = None
ctx.extract_code_and_explanation_from_llm_res(output.content)
res = ctx.parse_and_generate_candidate_schema()
if res is not None and ctx.is_valid_code():
return [res]
return optimization_response_items
return []
class OptimizeSchemaLP(Schema):
@ -133,7 +151,8 @@ class OptimizeSchemaLP(Schema):
experiment_metadata: dict[str, str] | None = None
codeflash_version: str | None = None
lsp_mode: bool = False
n_candidates_lp: int | None = 6
model: str | None = None
call_sequence: int | None = None
@optimize_line_profiler_api.post(
@ -141,7 +160,9 @@ class OptimizeSchemaLP(Schema):
)
async def optimize(request, data: OptimizeSchemaLP) -> tuple[int, OptimizeResponseSchema | OptimizeErrorResponseSchema]: # noqa: ANN001
ph(request.user, "aiservice-optimize-called")
ctx: BaseOptimizerContext = BaseOptimizerContext.get_dynamic_context(SYSTEM_PROMPT, USER_PROMPT, data.source_code, DiffMethod.NO_DIFF)
ctx: BaseOptimizerContext = BaseOptimizerContext.get_dynamic_context(
SYSTEM_PROMPT, USER_PROMPT, data.source_code, DiffMethod.NO_DIFF
)
try:
python_version: tuple[int, int, int] = parse_python_version(data.python_version)
except: # noqa: E722
@ -159,12 +180,13 @@ async def optimize(request, data: OptimizeSchemaLP) -> tuple[int, OptimizeRespon
optimization_response_items = await optimize_python_code_line_profiler(
user_id=request.user,
trace_id=data.trace_id,
line_profiler_results=data.line_profiler_results,
ctx=ctx,
dependency_code=data.dependency_code,
line_profiler_results=data.line_profiler_results,
n=min(data.n_candidates_lp or 6, 8),
optimize_model=get_model_from_name(data.model),
lsp_mode=data.lsp_mode,
python_version=python_version,
call_sequence=data.call_sequence,
)
if len(optimization_response_items) == 0:
ph(request.user, "aiservice-optimize-no-optimizations-found")
@ -192,8 +214,10 @@ async def optimize(request, data: OptimizeSchemaLP) -> tuple[int, OptimizeRespon
},
explanations_post={cei.optimization_id: cei.explanation for cei in optimization_response_items},
experiment_metadata=data.experiment_metadata if data.experiment_metadata else None,
optimizations_origin={cei.optimization_id: {"source": OptimizedCandidateSource.OPTIMIZE_LP, "parent": None} for cei in optimization_response_items},
optimizations_origin={
cei.optimization_id: {"source": OptimizedCandidateSource.OPTIMIZE_LP, "parent": None}
for cei in optimization_response_items
},
)
response = OptimizeResponseSchema(optimizations=optimization_response_items)

View file

@ -274,7 +274,6 @@ def _strip_comments_from_code(code: str) -> str:
The same code with all comments removed, preserving string content
"""
try:
lines = code.splitlines(keepends=True)
tokens = tokenize.generate_tokens(io.StringIO(code).readline)
@ -437,91 +436,94 @@ def clean_extraneous_comments(original_module: cst.Module, optimized_module: cst
elif is_comment_only and not is_near_change:
# Comment line not near a code change - skip it
pass
# Code line - check if it changed or not
# If changed, use optimized version. If unchanged, use original version.
elif opt_idx in code_changed_lines:
# Code changed - just use the optimized line as-is, no restoration
result_lines.append(opt_line)
else:
# Code line - check if it changed or not
# If changed, use optimized version. If unchanged, use original version.
if opt_idx in code_changed_lines:
# Code changed - just use the optimized line as-is, no restoration
result_lines.append(opt_line)
else:
# Code didn't change - find and use original (including any preceding comments)
found_orig = None
orig_line_idx = None
for orig_idx_search in range(orig_idx, len(orig_lines)):
orig_code = orig_code_only[orig_idx_search] if orig_idx_search < len(orig_code_only) else ""
if orig_code == opt_code:
found_orig = orig_lines[orig_idx_search]
orig_line_idx = orig_idx_search
orig_idx = orig_idx_search + 1
break
# Code didn't change - find and use original (including any preceding comments)
found_orig = None
orig_line_idx = None
for orig_idx_search in range(orig_idx, len(orig_lines)):
orig_code = orig_code_only[orig_idx_search] if orig_idx_search < len(orig_code_only) else ""
if orig_code == opt_code:
found_orig = orig_lines[orig_idx_search]
orig_line_idx = orig_idx_search
orig_idx = orig_idx_search + 1
break
if found_orig:
# Check if there are comment-only/blank lines in the original that come before this line
# BUT ONLY restore them if this code line is UNCHANGED (not in code_changed_lines)
# Don't restore original comments before CHANGED code lines
if orig_line_idx is not None and orig_line_idx > 0 and opt_idx not in code_changed_lines:
# Look backwards for ALL consecutive comment-only or blank lines
# Collect them all, then decide which ones to restore
preceding_lines = []
check_idx = orig_line_idx - 1
if found_orig:
# Check if there are comment-only/blank lines in the original that come before this line
# BUT ONLY restore them if this code line is UNCHANGED (not in code_changed_lines)
# Don't restore original comments before CHANGED code lines
if orig_line_idx is not None and orig_line_idx > 0 and opt_idx not in code_changed_lines:
# Look backwards for ALL consecutive comment-only or blank lines
# Collect them all, then decide which ones to restore
preceding_lines = []
check_idx = orig_line_idx - 1
while check_idx >= 0:
check_code = orig_code_only[check_idx] if check_idx < len(orig_code_only) else ""
if not check_code.strip():
# This is a comment-only or blank line in the original
# Add it if not already restored and if it was removed from optimized
if check_idx not in orig_to_opt_mapping and check_idx not in restored_orig_indices:
preceding_lines.insert(0, orig_lines[check_idx])
restored_orig_indices.add(check_idx)
check_idx -= 1
else:
# Hit a line with actual code - stop looking backwards
break
while check_idx >= 0:
check_code = orig_code_only[check_idx] if check_idx < len(orig_code_only) else ""
if not check_code.strip():
# This is a comment-only or blank line in the original
# Add it if not already restored and if it was removed from optimized
if check_idx not in orig_to_opt_mapping and check_idx not in restored_orig_indices:
preceding_lines.insert(0, orig_lines[check_idx])
restored_orig_indices.add(check_idx)
check_idx -= 1
else:
# Hit a line with actual code - stop looking backwards
break
# Add the restored comments/blank lines before the actual line
result_lines.extend(preceding_lines)
# Add the restored comments/blank lines before the actual line
result_lines.extend(preceding_lines)
# Use the original line (preserves original comments or lack thereof)
result_lines.append(found_orig)
restored_orig_indices.add(orig_line_idx)
# Use the original line (preserves original comments or lack thereof)
result_lines.append(found_orig)
restored_orig_indices.add(orig_line_idx)
# Also check for trailing blank/comment lines after this line that were removed
# BUT: only restore them if this code line is UNCHANGED and they don't come before a changed line
# (in that case, the new comment from optimized should be kept)
if orig_line_idx is not None and orig_line_idx < len(orig_lines) - 1 and opt_idx not in code_changed_lines:
trailing_lines = []
check_idx = orig_line_idx + 1
found_changed_line = False
# Also check for trailing blank/comment lines after this line that were removed
# BUT: only restore them if this code line is UNCHANGED and they don't come before a changed line
# (in that case, the new comment from optimized should be kept)
if (
orig_line_idx is not None
and orig_line_idx < len(orig_lines) - 1
and opt_idx not in code_changed_lines
):
trailing_lines = []
check_idx = orig_line_idx + 1
found_changed_line = False
while check_idx < len(orig_lines):
check_code = orig_code_only[check_idx] if check_idx < len(orig_code_only) else ""
if not check_code.strip():
# This is a comment-only or blank line in the original
# Check if it was removed (not in the optimized version)
if check_idx not in orig_to_opt_mapping and check_idx not in restored_orig_indices:
trailing_lines.append(orig_lines[check_idx])
restored_orig_indices.add(check_idx)
check_idx += 1
else:
# Hit a line with actual code
# Check if this code line was changed in the optimized version
if check_idx in orig_to_opt_mapping:
next_opt_idx = orig_to_opt_mapping[check_idx]
if next_opt_idx in code_changed_lines:
found_changed_line = True
else:
# This original line is not in the mapping, which means
# it was either deleted or modified. In either case,
# this is a changed line.
while check_idx < len(orig_lines):
check_code = orig_code_only[check_idx] if check_idx < len(orig_code_only) else ""
if not check_code.strip():
# This is a comment-only or blank line in the original
# Check if it was removed (not in the optimized version)
if check_idx not in orig_to_opt_mapping and check_idx not in restored_orig_indices:
trailing_lines.append(orig_lines[check_idx])
restored_orig_indices.add(check_idx)
check_idx += 1
else:
# Hit a line with actual code
# Check if this code line was changed in the optimized version
if check_idx in orig_to_opt_mapping:
next_opt_idx = orig_to_opt_mapping[check_idx]
if next_opt_idx in code_changed_lines:
found_changed_line = True
break
else:
# This original line is not in the mapping, which means
# it was either deleted or modified. In either case,
# this is a changed line.
found_changed_line = True
break
# Only add trailing comments if they're NOT immediately before a changed line
if not found_changed_line:
result_lines.extend(trailing_lines)
else:
# Keep it (shouldn't happen but be safe)
result_lines.append(opt_line)
# Only add trailing comments if they're NOT immediately before a changed line
if not found_changed_line:
result_lines.extend(trailing_lines)
else:
# Keep it (shouldn't happen but be safe)
result_lines.append(opt_line)
# Parse the cleaned code back into a CST module
cleaned_code = "".join(result_lines)

View file

@ -14,8 +14,9 @@ from pydantic import ValidationError
from aiservice.analytics.posthog import ph
from aiservice.common_utils import validate_trace_id
from aiservice.env_specific import debug_log_sensitive_data, llm_clients
from aiservice.models.aimodels import REFINEMENT_MODEL, calculate_llm_cost
from aiservice.env_specific import debug_log_sensitive_data
from aiservice.llm import REFINEMENT_MODEL, LLMResponse, calculate_llm_cost, call_llm
from aiservice.observability.decorators import observe_llm_call
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
from optimizer.context_utils.refiner_context import BaseRefinerContext, RefinementContextData
@ -28,7 +29,7 @@ if TYPE_CHECKING:
ChatCompletionToolMessageParam,
)
from aiservice.models.aimodels import LLM
from aiservice.llm import LLM
refinement_api = NinjaAPI(urls_namespace="refinement")
@ -187,8 +188,20 @@ Here is the function_references
"""
@observe_llm_call("refinement")
async def call_refinement_llm(
trace_id: str, model: LLM, messages: list[dict[str, str]], user_id: str | None = None, context: dict | None = None
) -> LLMResponse:
return await call_llm(model_name=model.name, model_type=model.model_type, messages=messages)
async def refinement( # noqa: D417
user_id: str, optimization_id: str, ctx: BaseRefinerContext, optimize_model: LLM = REFINEMENT_MODEL
user_id: str,
optimization_id: str,
ctx: BaseRefinerContext,
trace_id: str = "",
optimize_model: LLM = REFINEMENT_MODEL,
call_sequence: int | None = None,
) -> RefinementIntermediateResponseItemschema | OptimizeErrorResponseSchema:
"""Optimize the given python code for performance using Anthropic's Claude 4 model.
@ -230,28 +243,38 @@ async def refinement( # noqa: D417
| ChatCompletionFunctionMessageParam
] = [system_message, user_message]
debug_log_sensitive_data(f"This was the user prompt\n {user_prompt}\n")
llm_client = llm_clients[optimize_model.model_type]
obs_context: dict = {"optimization_id": optimization_id, "speedup": ctx.data.speedup}
if call_sequence:
obs_context["call_sequence"] = call_sequence
try:
output = await llm_client.with_options(max_retries=2).chat.completions.create(
model=optimize_model.name, messages=messages, n=1
output = await call_refinement_llm(
trace_id=trace_id, model=optimize_model, messages=messages, user_id=user_id, context=obs_context
)
llm_cost = calculate_llm_cost(output, optimize_model)
llm_cost = calculate_llm_cost(output.raw_response, optimize_model)
except Exception as e:
logging.exception("Claude Code Generation error in refinement")
sentry_sdk.capture_exception(e)
debug_log_sensitive_data(f"Failed to generate code for source:\n{ctx.data.original_source_code}")
return OptimizeErrorResponseSchema(error=str(e))
debug_log_sensitive_data(f"ClaudeClient optimization response:\n{output.model_dump_json(indent=2)}")
debug_log_sensitive_data(f"ClaudeClient optimization response:\n{output.content}")
if output.usage is not None:
ph(user_id, "refinement-usage", properties={"model": optimize_model.name, "usage": output.usage.json()})
results = [content for op in output.choices if (content := op.message.content)] # will be of size 1
ph(
user_id,
"refinement-usage",
properties={
"model": optimize_model.name,
"usage": {"input_tokens": output.usage.input_tokens, "output_tokens": output.usage.output_tokens},
},
)
# Regex doesn't work yet in extracting everything else other than the search replace block
refined_explanation = results[0]
refined_explanation = output.content
refined_optimization = ""
try:
refined_optimization = ctx.apply_patches_to_optimized_code(results[0])
refined_optimization = ctx.apply_patches_to_optimized_code(output.content)
except (ValueError, ValidationError) as exc:
sentry_sdk.capture_exception(exc)
debug_log_sensitive_data(f"{type(exc).__name__} for source:\n{ctx.data.optimized_source_code}")
@ -285,6 +308,7 @@ class RefinementRequestSchema(Schema):
speedup: str = ""
python_version: str | None = None
function_references: str | None = None
call_sequence: int | None = None
class OptimizeErrorResponseSchema(Schema):
@ -346,7 +370,13 @@ async def refine(
if i != 0:
ctx.data = ctx_data_list[i]
optimized_source_code_and_explanations_futures.append(
refinement(user_id=request.user, optimization_id=data[i].optimization_id, ctx=ctx)
refinement(
user_id=request.user,
optimization_id=data[i].optimization_id,
ctx=ctx,
trace_id=trace_id,
call_sequence=data[i].call_sequence,
)
)
refinement_data = await asyncio.gather(*optimized_source_code_and_explanations_futures)
# simple filtering mechanism, remove empty strings and remove duplicates after removing trailing and leading whitespaces, validate with libcst
@ -395,7 +425,7 @@ async def refine(
for cei in filtered_refined_optimizations
},
)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost, user_id=request.user)
return 200, Refinementschema(
refinements=[
RefinementResponseItemschema(

View file

@ -24,6 +24,7 @@ dependencies = [
"sentry-sdk[django]>=2.35.0",
"stamina>=25.1.0",
"jedi>=0.19.2",
"anthropic>=0.75.0",
]
[project.urls]

View file

@ -4,17 +4,18 @@ import re
from typing import TYPE_CHECKING
import sentry_sdk
from aiservice.analytics.posthog import ph
from aiservice.common_utils import validate_trace_id
from aiservice.env_specific import debug_log_sensitive_data, llm_clients
from aiservice.models.aimodels import LLM, RANKING_MODEL, calculate_llm_cost
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
from ninja import NinjaAPI, Schema
from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam
from aiservice.analytics.posthog import ph
from aiservice.common_utils import validate_trace_id
from aiservice.env_specific import debug_log_sensitive_data
from aiservice.llm import LLM, RANKING_MODEL, LLMResponse, calculate_llm_cost, call_llm
from aiservice.observability.decorators import observe_llm_call
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
if TYPE_CHECKING:
from aiservice.models.aimodels import LLM
from openai.types.chat import (
ChatCompletionAssistantMessageParam,
ChatCompletionFunctionMessageParam,
@ -72,6 +73,23 @@ Here are the function references
"""
@observe_llm_call("ranking")
async def call_ranker_llm(
trace_id: str, model: LLM, messages: list[dict[str, str]], user_id: str | None = None, context: dict | None = None
) -> LLMResponse:
"""Call LLM for ranking with automatic observability.
This function is decorated with @observe_llm_call which automatically:
- Records call start (non-blocking)
- Captures timing and token usage
- Records completion (non-blocking)
- Handles errors automatically
All observability runs in the background without blocking the LLM call.
"""
return await call_llm(model_name=model.name, model_type=model.model_type, messages=messages)
async def rank_optimizations( # noqa: D417
user_id: str, data: RankInputSchema, rank_model: LLM = RANKING_MODEL
) -> RankResponseSchema | RankErrorResponseSchema:
@ -109,26 +127,37 @@ async def rank_optimizations( # noqa: D417
| ChatCompletionToolMessageParam
| ChatCompletionFunctionMessageParam
] = [system_message, user_message]
llm_client = llm_clients[rank_model.model_type]
# Call LLM with automatic observability (decorator handles everything)
try:
output = await llm_client.with_options(max_retries=2).chat.completions.create(
model=rank_model.name, messages=messages, n=1
output = await call_ranker_llm(
trace_id=data.trace_id,
model=rank_model,
messages=messages,
user_id=user_id,
context={
"num_candidates": len(data.diffs),
"speedups": data.speedups,
"python_version": data.python_version,
},
)
await update_optimization_cost(
trace_id=data.trace_id, cost=calculate_llm_cost(output.raw_response, rank_model), user_id=user_id
)
await update_optimization_cost(trace_id=data.trace_id, cost=calculate_llm_cost(output, rank_model))
except Exception as e: # noqa: BLE001
debug_log_sensitive_data(f"Failed to generate new explanation, Error message: {e}")
sentry_sdk.capture_exception(e)
return RankErrorResponseSchema(error=str(e))
debug_log_sensitive_data(f"AIClient optimization response:\n{output}")
if output.usage is not None:
if output.raw_response.usage is not None:
ph(
user_id,
"aiservice-optimize-openai-usage",
properties={"model": rank_model.name, "n": 1, "usage": output.usage.model_dump_json()},
properties={"model": rank_model.name, "n": 1, "usage": output.raw_response.usage.model_dump_json()},
)
# parse xml tag for explanation, ranking
try:
explanation_match = re.search(explain_regex_pattern, output.choices[0].message.content)
explanation_match = re.search(explain_regex_pattern, output.content)
explanation = explanation_match.group(1)
except: # noqa: E722
# TODO add logging instead of print("No explanation found")
@ -139,7 +168,7 @@ async def rank_optimizations( # noqa: D417
pass
# still doing stuff instead of returning coz ranking is important
try:
ranking_match = re.search(rank_regex_pattern, output.choices[0].message.content)
ranking_match = re.search(rank_regex_pattern, output.content)
# TODO better parsing, could be only comma separated, need to handle all edge cases
ranking = list(map(int, ranking_match.group(1).strip().split(",")))
except: # noqa: E722
@ -149,6 +178,7 @@ async def rank_optimizations( # noqa: D417
# TODO need to handle all edge cases
# TODO add logging instead of print("Invalid ranking")
return RankErrorResponseSchema(error="No ranking found")
return RankResponseSchema(ranking=ranking, explanation=explanation)

View file

@ -11,7 +11,7 @@ from codeflash.code_utils.code_utils import ellipsis_in_ast, get_imports_from_fi
from codeflash.models.models import TestsInFile
from codeflash.verification.gen_regression_tests import print_message_delta, print_messages
from aiservice.models.aimodels import EXECUTE_MODEL, EXPLAIN_MODEL, LLM, PLAN_MODEL
from aiservice.llm import EXECUTE_MODEL, EXPLAIN_MODEL, LLM, PLAN_MODEL
def regression_tests_from_function_with_inspiration(

View file

@ -1,9 +1,9 @@
from __future__ import annotations
from functools import lru_cache
from typing import TYPE_CHECKING, cast
from libcst import CSTTransformer, ImportAlias, ImportFrom, MetadataWrapper, Name, parse_expression, parse_module
from functools import lru_cache
if TYPE_CHECKING:
from libcst import (

View file

@ -26,6 +26,7 @@ class TestGenSchema(Schema):
codeflash_version: str | None = None
test_index: int | None = None
is_async: bool | None = False
call_sequence: int | None = None
@model_validator(mode="after")
def helper_function_names_validator(self) -> Self:

View file

@ -8,8 +8,8 @@ from pathlib import Path
from typing import SupportsIndex
from aiservice.common_utils import parse_python_version, safe_isort
from aiservice.env_specific import debug_log_sensitive_data, llm_clients
from aiservice.models.aimodels import EXECUTE_MODEL, EXPLAIN_MODEL, LLM, PLAN_MODEL, calculate_llm_cost
from aiservice.env_specific import debug_log_sensitive_data
from aiservice.llm import EXECUTE_MODEL, EXPLAIN_MODEL, LLM, PLAN_MODEL, calculate_llm_cost, call_llm
from aiservice.models.functions_to_optimize import FunctionToOptimize
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
@ -141,25 +141,25 @@ async def generate_regression_tests_from_function(
if print_text:
print_messages(explain_messages)
try:
llm_client = llm_clients[explain_model.model_type]
if llm_client is None:
raise TestGenerationFailedException(f"LLM client for model type '{explain_model.model_type}' is not available")
explanation_response = await llm_client.with_options(max_retries=2).chat.completions.create(
model=explain_model.name, messages=explain_messages, temperature=temperature
explanation_response = await call_llm(
model_name=explain_model.name,
model_type=explain_model.model_type,
messages=explain_messages,
temperature=temperature,
)
total_llm_cost += calculate_llm_cost(explanation_response, explain_model) or 0.0
total_llm_cost += calculate_llm_cost(explanation_response.raw_response, explain_model)
except Exception as e:
logging.exception("OpenAI client error in explain step")
sentry_sdk.capture_exception(e)
raise TestGenerationFailedException(e) from e
debug_log_sensitive_data(f"OpenAIClient explanation response:\n{explanation_response.model_dump_json(indent=2)}")
if explanation_response.usage is not None:
debug_log_sensitive_data(f"OpenAIClient explanation response:\n{explanation_response.raw_response.model_dump_json(indent=2)}")
if explanation_response.raw_response.usage is not None:
ph(
user_id,
"aiservice-testgen-explain-openai-usage",
properties={"model": explain_model.name, "usage": explanation_response.usage.json()},
properties={"model": explain_model.name, "usage": explanation_response.raw_response.usage.json()},
)
explanation = explanation_response.choices[0].message.content
explanation = explanation_response.content
explain_assistant_message = {"role": "assistant", "content": explanation}
# Step 1b: Fetch relevant data from the database to use as inputs based on function explanation
@ -170,19 +170,19 @@ async def generate_regression_tests_from_function(
if print_text:
print_messages(explain_messages)
try:
llm_client = llm_clients[execute_model.model_type]
if llm_client is None:
raise TestGenerationFailedException(f"LLM client for model type '{execute_model.model_type}' is not available")
fetch_data_response = await llm_client.with_options(max_retries=2).chat.completions.create(
model=execute_model.name, messages=fetch_data_messages, temperature=temperature
fetch_data_response = await call_llm(
model_name=execute_model.name,
model_type=execute_model.model_type,
messages=fetch_data_messages,
temperature=temperature,
)
total_llm_cost += calculate_llm_cost(fetch_data_response, execute_model) or 0.0
total_llm_cost += calculate_llm_cost(fetch_data_response.raw_response, execute_model)
except Exception as e:
logging.exception("OpenAI client error in explain step")
sentry_sdk.capture_exception(e)
raise TestGenerationFailedException(e) from e
fetch_data_function = fetch_data_response.choices[0].message.content
fetch_data_function = fetch_data_response.content
fetch_data_function = fetch_data_function.split("```python")[1].split("```")[0].strip()
# Step 1c: Run the function to get the data
@ -219,26 +219,26 @@ To help unit test the function above, list diverse scenarios that the function s
if print_text:
print_messages([plan_user_message])
try:
llm_client = llm_clients[plan_model.model_type]
if llm_client is None:
raise TestGenerationFailedException(f"LLM client for model type '{plan_model.model_type}' is not available")
plan_response = await llm_client.with_options(max_retries=2).chat.completions.create(
model=plan_model.name, messages=plan_messages, temperature=temperature
plan_response = await call_llm(
model_name=plan_model.name,
model_type=plan_model.model_type,
messages=plan_messages,
temperature=temperature,
)
total_llm_cost += calculate_llm_cost(plan_response, plan_model) or 0.0
total_llm_cost += calculate_llm_cost(plan_response.raw_response, plan_model)
except Exception as e:
logging.exception("OpenAI client error in plan step")
sentry_sdk.capture_exception(e)
raise TestGenerationFailedException(e) from e
debug_log_sensitive_data(f"OpenAIClient plan response:\n{plan_response.model_dump_json(indent=2)}")
if plan_response.usage is not None:
debug_log_sensitive_data(f"OpenAIClient plan response:\n{plan_response.raw_response.model_dump_json(indent=2)}")
if plan_response.raw_response.usage is not None:
ph(
user_id,
"aiservice-testgen-plan-openai-usage",
properties={"model": plan_model.name, "usage": plan_response.usage.json()},
properties={"model": plan_model.name, "usage": plan_response.raw_response.usage.json()},
)
plan = plan_response.choices[0].message.content
plan = plan_response.content
plan_assistant_message = {"role": "assistant", "content": plan}
# Step 2b: If the plan is short, ask GPT to elaborate further
@ -261,23 +261,23 @@ To help unit test the function above, list diverse scenarios that the function s
if print_text:
print_messages([elaboration_user_message])
try:
llm_client = llm_clients[plan_model.model_type]
if llm_client is None:
raise TestGenerationFailedException(f"LLM client for model type '{plan_model.model_type}' is not available")
elaboration_response = await llm_client.with_options(max_retries=2).chat.completions.create(
model=plan_model.name, messages=elaboration_messages, temperature=temperature
elaboration_response = await call_llm(
model_name=plan_model.name,
model_type=plan_model.model_type,
messages=elaboration_messages,
temperature=temperature,
)
total_llm_cost += calculate_llm_cost(elaboration_response, plan_model) or 0.0
total_llm_cost += calculate_llm_cost(elaboration_response.raw_response, plan_model)
except Exception as e:
logging.exception("OpenAI client error in elaboration step")
sentry_sdk.capture_exception(e)
raise TestGenerationFailedException(e) from e
debug_log_sensitive_data(
f"OpenAIClient elaboration response:\n{elaboration_response.model_dump_json(indent=2)}"
f"OpenAIClient elaboration response:\n{elaboration_response.raw_response.model_dump_json(indent=2)}"
)
elaboration = elaboration_response.choices[0].message.content
elaboration = elaboration_response.content
elaboration_assistant_message = {"role": "assistant", "content": elaboration}
# Step 3: Generate the unit test
@ -314,25 +314,25 @@ To help unit test the function above, list diverse scenarios that the function s
tries = 2
while tries > 0:
try:
llm_client = llm_clients[execute_model.model_type]
if llm_client is None:
raise TestGenerationFailedException(f"LLM client for model type '{execute_model.model_type}' is not available")
execute_response = await llm_client.with_options(max_retries=2).chat.completions.create(
model=execute_model.name, messages=execute_messages, temperature=temperature
execute_response = await call_llm(
model_name=execute_model.name,
model_type=execute_model.model_type,
messages=execute_messages,
temperature=temperature,
)
total_llm_cost += calculate_llm_cost(execute_response, execute_model) or 0.0
total_llm_cost += calculate_llm_cost(execute_response.raw_response, execute_model)
except Exception as e:
logging.exception("OpenAI client error in execute step")
sentry_sdk.capture_exception(e)
raise TestGenerationFailedException(e) from e
debug_log_sensitive_data(f"OpenAIClient execute response:\n{execute_response.model_dump_json(indent=2)}")
if execute_response.usage is not None:
debug_log_sensitive_data(f"OpenAIClient execute response:\n{execute_response.raw_response.model_dump_json(indent=2)}")
if execute_response.raw_response.usage is not None:
ph(
user_id,
"aiservice-testgen-execute-openai-usage",
properties={"model": execute_model.name, "usage": execute_response.usage.json()},
properties={"model": execute_model.name, "usage": execute_response.raw_response.usage.json()},
)
execution_output = execute_response.choices[0].message.content
execution_output = execute_response.content
# check the output for errors
code = execution_output.split("```python")[1].split("```")[0].strip()
@ -353,7 +353,7 @@ To help unit test the function above, list diverse scenarios that the function s
if tries == 0:
raise TestGenerationFailedException("Failed to generate test code after 2 tries.")
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost, user_id=user_id)
# return the unit test as a string
return code

View file

@ -10,16 +10,17 @@ from typing import TYPE_CHECKING, SupportsIndex
import sentry_sdk
import stamina
from aiservice.analytics.posthog import ph
from aiservice.common_utils import parse_python_version, safe_isort, should_hack_for_demo, validate_trace_id
from aiservice.env_specific import IS_PRODUCTION, debug_log_sensitive_data, llm_clients
from aiservice.models.aimodels import EXECUTE_MODEL, calculate_llm_cost
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
from ninja import NinjaAPI
from ninja.errors import HttpError
from openai import OpenAIError
from aiservice.analytics.posthog import ph
from aiservice.common_utils import parse_python_version, safe_isort, should_hack_for_demo, validate_trace_id
from aiservice.env_specific import IS_PRODUCTION, debug_log_sensitive_data
from aiservice.llm import EXECUTE_MODEL, LLMResponse, calculate_llm_cost, call_llm
from aiservice.observability.decorators import observe_llm_call
from log_features.log_event import update_optimization_cost
from log_features.log_features import log_features
from testgen.instrumentation.edit_generated_test import parse_module_to_cst, replace_definition_with_import
from testgen.instrumentation.instrument_new_tests import instrument_test_source
from testgen.models import (
@ -34,7 +35,7 @@ from testgen.postprocessing.postprocess_pipeline import postprocessing_testgen_p
from testgen.testgen_context import BaseTestGenContext, TestGenContextData
if TYPE_CHECKING:
from aiservice.models.aimodels import LLM
from aiservice.llm import LLM
from authapp.auth import AuthBearer
testgen_api = NinjaAPI(urls_namespace="testgen")
@ -189,6 +190,31 @@ def parse_and_validate_llm_output(
raise
@observe_llm_call("test_generation")
async def call_testgen_llm(
trace_id: str,
model: LLM,
messages: list[dict[str, str]],
temperature: float,
user_id: str | None = None,
python_version: str | None = None,
context: dict | None = None,
) -> LLMResponse:
"""Call LLM for test generation with automatic observability.
This function is decorated with @observe_llm_call which automatically:
- Records call start (non-blocking)
- Captures timing and token usage
- Records completion (non-blocking)
- Handles errors automatically
All observability runs in the background without blocking the LLM call.
"""
return await call_llm(
model_name=model.name, model_type=model.model_type, messages=messages, temperature=temperature
)
@stamina.retry(on=(SyntaxError, ValueError, OpenAIError), attempts=2)
async def generate_and_validate_test_code(
messages: list[dict[str, str]],
@ -201,30 +227,41 @@ async def generate_and_validate_test_code(
cost_tracker: list[float],
user_id: str,
posthog_event_suffix: str,
trace_id: str = "",
call_sequence: int | None = None,
) -> str:
llm_client = llm_clients[execute_model.model_type]
response = await llm_client.with_options(max_retries=2).chat.completions.create(
model=model.name, messages=messages, temperature=temperature
obs_context: dict | None = {"call_sequence": call_sequence} if call_sequence else None
response = await call_testgen_llm(
trace_id=trace_id,
model=model,
messages=messages,
temperature=temperature,
user_id=user_id,
python_version=".".join(str(v) for v in python_version),
context=obs_context,
)
cost = calculate_llm_cost(response, execute_model) or 0.0
cost = calculate_llm_cost(response.raw_response, execute_model)
cost_tracker.append(cost)
debug_log_sensitive_data(f"OpenAIClient {error_context}execute response:\n{response.model_dump_json(indent=2)}")
debug_log_sensitive_data(
f"OpenAIClient {error_context}execute response:\n{response.raw_response.model_dump_json(indent=2)}"
)
if response.usage:
if response.raw_response.usage:
ph(
user_id,
f"aiservice-testgen-{posthog_event_suffix}execute-openai-usage",
properties={"model": execute_model.name, "usage": response.usage.model_dump_json()},
properties={"model": execute_model.name, "usage": response.raw_response.usage.model_dump_json()},
)
return parse_and_validate_llm_output(
response_content=response.choices[0].message.content,
ctx=ctx,
python_version=python_version,
error_context=error_context,
# Parse and validate
validated_code = parse_and_validate_llm_output(
response_content=response.content, ctx=ctx, python_version=python_version, error_context=error_context
)
return validated_code
@stamina.retry(on=TestGenerationFailedError, attempts=2)
async def generate_regression_tests_from_function(
@ -238,6 +275,7 @@ async def generate_regression_tests_from_function(
temperature: float = 0.4,
is_async: bool = False, # noqa: FBT001, FBT002
trace_id: str = "",
call_sequence: int | None = None,
) -> tuple[str, str | None, str | None]:
execute_messages, posthog_event_suffix, error_context = build_prompt(
ctx=ctx, function_name=function_name, unit_test_package=unit_test_package, is_async=is_async
@ -256,9 +294,11 @@ async def generate_regression_tests_from_function(
cost_tracker=cost_tracker,
user_id=user_id,
posthog_event_suffix=posthog_event_suffix,
trace_id=trace_id,
call_sequence=call_sequence,
)
total_llm_cost = sum(cost_tracker)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost, user_id=user_id)
processed_cst = postprocessing_testgen_pipeline(
parse_module_to_cst(validated_code), data.helper_function_names, data.function_to_optimize, data.module_path
@ -290,7 +330,7 @@ async def generate_regression_tests_from_function(
return generated_test_source, instrumented_behavior_tests, instrumented_perf_tests # noqa: TRY300
except (SyntaxError, ValueError) as e:
total_llm_cost = sum(cost_tracker)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost)
await update_optimization_cost(trace_id=trace_id, cost=total_llm_cost, user_id=user_id)
msg = f"Failed to generate valid {error_context}test code after {len(cost_tracker)} tries."
raise TestGenerationFailedError(msg) from e
@ -390,6 +430,7 @@ async def testgen(
unit_test_package=data.test_framework,
is_async=data.is_async,
trace_id=data.trace_id,
call_sequence=data.call_sequence,
)
ph(request.user, "aiservice-testgen-tests-generated")

View file

@ -124,11 +124,24 @@ async def test_allows_usage_within_limit(middleware, rf, monkeypatch):
request.user = type("User", (), {"id": 1})()
fake_sub = FakeSubscription(used=50, limit=100, lifetime=10)
# After atomic update, the new used value would be 50 + 10 (optimize cost) = 60
updated_sub = FakeSubscription(used=60, limit=100, lifetime=20)
# Track whether aupdate has been called to determine which subscription to return
update_called = {"value": False}
class FakeFilter:
async def afirst(self):
# Return original sub before update, updated sub after update
if update_called["value"]:
return updated_sub
return fake_sub
async def aupdate(self, **kwargs):
# Simulate atomic update - mark that update was called
update_called["value"] = True
return 1
monkeypatch.setattr(
"aiservice.middleware.track_usage_middleware.Subscriptions.objects.filter", lambda **kwargs: FakeFilter()
)

View file

@ -1,10 +1,8 @@
from code_repair.code_repair_context import CodeRepairContext, CodeRepairContextData
from optimizer.diff_patches_utils.seach_and_replace import apply_patches
def test_code_repair_single_file() -> None:
original_code = """```python:demo.py
import math
from typing import List, Tuple, Optional
@ -92,7 +90,6 @@ def calculate_portfolio_metrics(
```
"""
llm_response = """Looking at the test failure and comparing the original and modified code, I can see several issues in the modified implementation:
1. **Hardcoded volatility**: The volatility is hardcoded to `2` instead of being calculated
@ -210,7 +207,7 @@ The key changes I made:
6. **Fixed dictionary formatting**: Changed from double quotes to single quotes to match original formatting
These changes align the modified code with the original implementation's behavior, ensuring that the test for zero volatility passes (when cash investment has 0% return, the volatility should indeed be 0.0)."""
ctx = CodeRepairContext(CodeRepairContextData(original_code, optimized_code, ""), "" , "")
ctx = CodeRepairContext(CodeRepairContextData(original_code, optimized_code, ""), "", "")
refined_optimization = ctx.apply_patches_to_optimized_code(llm_response)
print(refined_optimization)

View file

@ -8,6 +8,7 @@ documentation to unchanged code.
import ast
import libcst as cst
from optimizer.postprocess import clean_extraneous_comments

View file

@ -2,7 +2,8 @@ version = 1
revision = 3
requires-python = ">=3.12.1, <4"
resolution-markers = [
"python_full_version >= '3.13'",
"python_full_version >= '3.14'",
"python_full_version == '3.13.*'",
"python_full_version < '3.13'",
]
@ -118,6 +119,7 @@ name = "aiservice"
version = "0.0.1"
source = { editable = "." }
dependencies = [
{ name = "anthropic" },
{ name = "dj-database-url" },
{ name = "django" },
{ name = "django-ninja" },
@ -153,6 +155,7 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "anthropic", specifier = ">=0.75.0" },
{ name = "dj-database-url", specifier = ">=2.2.0,<3" },
{ name = "django", specifier = ">=5.0.6,<6" },
{ name = "django-ninja", specifier = ">=1.3.0,<2" },
@ -195,36 +198,54 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
]
[[package]]
name = "anthropic"
version = "0.75.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "distro" },
{ name = "docstring-parser" },
{ name = "httpx" },
{ name = "jiter" },
{ name = "pydantic" },
{ name = "sniffio" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/04/1f/08e95f4b7e2d35205ae5dcbb4ae97e7d477fc521c275c02609e2931ece2d/anthropic-0.75.0.tar.gz", hash = "sha256:e8607422f4ab616db2ea5baacc215dd5f028da99ce2f022e33c7c535b29f3dfb", size = 439565, upload-time = "2025-11-24T20:41:45.28Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/60/1c/1cd02b7ae64302a6e06724bf80a96401d5313708651d277b1458504a1730/anthropic-0.75.0-py3-none-any.whl", hash = "sha256:ea8317271b6c15d80225a9f3c670152746e88805a7a61e14d4a374577164965b", size = 388164, upload-time = "2025-11-24T20:41:43.587Z" },
]
[[package]]
name = "anyio"
version = "4.11.0"
version = "4.12.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" }
sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" },
{ url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" },
]
[[package]]
name = "asgiref"
version = "3.10.0"
version = "3.11.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/46/08/4dfec9b90758a59acc6be32ac82e98d1fbfc321cb5cfa410436dbacf821c/asgiref-3.10.0.tar.gz", hash = "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e", size = 37483, upload-time = "2025-10-05T09:15:06.557Z" }
sdist = { url = "https://files.pythonhosted.org/packages/76/b9/4db2509eabd14b4a8c71d1b24c8d5734c52b8560a7b1e1a8b56c8d25568b/asgiref-3.11.0.tar.gz", hash = "sha256:13acff32519542a1736223fb79a715acdebe24286d98e8b164a73085f40da2c4", size = 37969, upload-time = "2025-11-19T15:32:20.106Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/17/9c/fc2331f538fbf7eedba64b2052e99ccf9ba9d6888e2f41441ee28847004b/asgiref-3.10.0-py3-none-any.whl", hash = "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734", size = 24050, upload-time = "2025-10-05T09:15:05.11Z" },
{ url = "https://files.pythonhosted.org/packages/91/be/317c2c55b8bbec407257d45f5c8d1b6867abc76d12043f2d3d58c538a4ea/asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d", size = 24096, upload-time = "2025-11-19T15:32:19.004Z" },
]
[[package]]
name = "asttokens"
version = "3.0.0"
version = "3.0.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978, upload-time = "2024-11-30T04:30:14.439Z" }
sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918, upload-time = "2024-11-30T04:30:10.946Z" },
{ url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" },
]
[[package]]
@ -247,11 +268,11 @@ wheels = [
[[package]]
name = "certifi"
version = "2025.10.5"
version = "2025.11.12"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" },
{ url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
]
[[package]]
@ -313,14 +334,14 @@ wheels = [
[[package]]
name = "click"
version = "8.3.0"
version = "8.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" }
sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" },
{ url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
]
[[package]]
@ -365,29 +386,38 @@ wheels = [
[[package]]
name = "django"
version = "5.2.7"
version = "5.2.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "asgiref" },
{ name = "sqlparse" },
{ name = "tzdata", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b1/96/bd84e2bb997994de8bcda47ae4560991084e86536541d7214393880f01a8/django-5.2.7.tar.gz", hash = "sha256:e0f6f12e2551b1716a95a63a1366ca91bbcd7be059862c1b18f989b1da356cdd", size = 10865812, upload-time = "2025-10-01T14:22:12.081Z" }
sdist = { url = "https://files.pythonhosted.org/packages/eb/1c/188ce85ee380f714b704283013434976df8d3a2df8e735221a02605b6794/django-5.2.9.tar.gz", hash = "sha256:16b5ccfc5e8c27e6c0561af551d2ea32852d7352c67d452ae3e76b4f6b2ca495", size = 10848762, upload-time = "2025-12-02T14:01:08.418Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8f/ef/81f3372b5dd35d8d354321155d1a38894b2b766f576d0abffac4d8ae78d9/django-5.2.7-py3-none-any.whl", hash = "sha256:59a13a6515f787dec9d97a0438cd2efac78c8aca1c80025244b0fe507fe0754b", size = 8307145, upload-time = "2025-10-01T14:22:49.476Z" },
{ url = "https://files.pythonhosted.org/packages/17/b0/7f42bfc38b8f19b78546d47147e083ed06e12fc29c42da95655e0962c6c2/django-5.2.9-py3-none-any.whl", hash = "sha256:3a4ea88a70370557ab1930b332fd2887a9f48654261cdffda663fef5976bb00a", size = 8290652, upload-time = "2025-12-02T14:01:03.485Z" },
]
[[package]]
name = "django-ninja"
version = "1.4.5"
version = "1.5.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
{ name = "pydantic" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fa/9a/558489e0e25173772fbd826306b7d1777e80285b02d69e0e1aaec41e3eec/django_ninja-1.4.5.tar.gz", hash = "sha256:aa1a2ee2b22c5f1c2f4bfbc004386be7074cbfaf133680c2b359a31221965503", size = 3710511, upload-time = "2025-10-19T18:28:02.362Z" }
sdist = { url = "https://files.pythonhosted.org/packages/42/8e/10b4299aa37017bb026ae091b2192d05b3d4b78a9dc57d59c4f362551907/django_ninja-1.5.1.tar.gz", hash = "sha256:6acda68a64d60934c6fdccb4d97c3ac7f02cfefd78a5d87ae053effe081b17c7", size = 3719868, upload-time = "2025-12-04T11:35:46.615Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/49/e3/168274a8def4b9a2fb2540319a68914e8e4e529cd7f7b5f1ba8939d011bc/django_ninja-1.4.5-py3-none-any.whl", hash = "sha256:d779702ddc6e17b10739049ddb075a6a1e6c6270bdc04e0b0429f6adbf670373", size = 2426449, upload-time = "2025-10-19T18:28:00.518Z" },
{ url = "https://files.pythonhosted.org/packages/0a/fc/124c3f999fbc77680777ff331ac9552227d7a6753639ef66dbba780fc6b4/django_ninja-1.5.1-py3-none-any.whl", hash = "sha256:135aaa1117dce8dfd7a1e80b4487a8cccee3a4182c3c8b562d08ea94e4d2cbdf", size = 2426577, upload-time = "2025-12-04T11:35:44.656Z" },
]
[[package]]
name = "docstring-parser"
version = "0.17.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" },
]
[[package]]
@ -563,15 +593,15 @@ wheels = [
[[package]]
name = "httpx-aiohttp"
version = "0.1.9"
version = "0.1.12"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
{ name = "httpx" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d8/f2/9a86ce9bc48cf57dabb3a3160dfed26d8bbe5a2478a51f9d1dbf89f2f1fc/httpx_aiohttp-0.1.9.tar.gz", hash = "sha256:4ee8b22e6f2e7c80cd03be29eff98bfe7d89bd77f021ce0b578ee76b73b4bfe6", size = 206023, upload-time = "2025-10-15T08:52:57.475Z" }
sdist = { url = "https://files.pythonhosted.org/packages/63/2c/b894861cecf030fb45675ea24aa55b5722e97c602a163d872fca66c5a6d8/httpx_aiohttp-0.1.12.tar.gz", hash = "sha256:81feec51fd82c0ecfa0e9aaf1b1a6c2591260d5e2bcbeb7eb0277a78e610df2c", size = 275945, upload-time = "2025-12-12T10:12:15.283Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a1/db/5cfa8254a86c34a1ab7fe0dbec9f81bb5ebd831cbdd65aa4be4f37027804/httpx_aiohttp-0.1.9-py3-none-any.whl", hash = "sha256:3dc2845568b07742588710fcf3d72db2cbcdf2acc93376edf85f789c4d8e5fda", size = 6180, upload-time = "2025-10-15T08:52:56.521Z" },
{ url = "https://files.pythonhosted.org/packages/16/8d/85c9701e9af72ca132a1783e2a54364a90c6da832304416a30fc11196ab2/httpx_aiohttp-0.1.12-py3-none-any.whl", hash = "sha256:5b0eac39a7f360fa7867a60bcb46bb1024eada9c01cbfecdb54dc1edb3fb7141", size = 6367, upload-time = "2025-12-12T10:12:14.018Z" },
]
[[package]]
@ -635,122 +665,174 @@ wheels = [
[[package]]
name = "jiter"
version = "0.11.1"
version = "0.12.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a3/68/0357982493a7b20925aece061f7fb7a2678e3b232f8d73a6edb7e5304443/jiter-0.11.1.tar.gz", hash = "sha256:849dcfc76481c0ea0099391235b7ca97d7279e0fa4c86005457ac7c88e8b76dc", size = 168385, upload-time = "2025-10-17T11:31:15.186Z" }
sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/15/8b/318e8af2c904a9d29af91f78c1e18f0592e189bbdb8a462902d31fe20682/jiter-0.11.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c92148eec91052538ce6823dfca9525f5cfc8b622d7f07e9891a280f61b8c96c", size = 305655, upload-time = "2025-10-17T11:29:18.859Z" },
{ url = "https://files.pythonhosted.org/packages/f7/29/6c7de6b5d6e511d9e736312c0c9bfcee8f9b6bef68182a08b1d78767e627/jiter-0.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ecd4da91b5415f183a6be8f7158d127bdd9e6a3174138293c0d48d6ea2f2009d", size = 315645, upload-time = "2025-10-17T11:29:20.889Z" },
{ url = "https://files.pythonhosted.org/packages/ac/5f/ef9e5675511ee0eb7f98dd8c90509e1f7743dbb7c350071acae87b0145f3/jiter-0.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7e3ac25c00b9275684d47aa42febaa90a9958e19fd1726c4ecf755fbe5e553b", size = 348003, upload-time = "2025-10-17T11:29:22.712Z" },
{ url = "https://files.pythonhosted.org/packages/56/1b/abe8c4021010b0a320d3c62682769b700fb66f92c6db02d1a1381b3db025/jiter-0.11.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d7305c0a841858f866cd459cd9303f73883fb5e097257f3d4a3920722c69d4", size = 365122, upload-time = "2025-10-17T11:29:24.408Z" },
{ url = "https://files.pythonhosted.org/packages/2a/2d/4a18013939a4f24432f805fbd5a19893e64650b933edb057cd405275a538/jiter-0.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e86fa10e117dce22c547f31dd6d2a9a222707d54853d8de4e9a2279d2c97f239", size = 488360, upload-time = "2025-10-17T11:29:25.724Z" },
{ url = "https://files.pythonhosted.org/packages/f0/77/38124f5d02ac4131f0dfbcfd1a19a0fac305fa2c005bc4f9f0736914a1a4/jiter-0.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae5ef1d48aec7e01ee8420155d901bb1d192998fa811a65ebb82c043ee186711", size = 376884, upload-time = "2025-10-17T11:29:27.056Z" },
{ url = "https://files.pythonhosted.org/packages/7b/43/59fdc2f6267959b71dd23ce0bd8d4aeaf55566aa435a5d00f53d53c7eb24/jiter-0.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb68e7bf65c990531ad8715e57d50195daf7c8e6f1509e617b4e692af1108939", size = 358827, upload-time = "2025-10-17T11:29:28.698Z" },
{ url = "https://files.pythonhosted.org/packages/7d/d0/b3cc20ff5340775ea3bbaa0d665518eddecd4266ba7244c9cb480c0c82ec/jiter-0.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43b30c8154ded5845fa454ef954ee67bfccce629b2dea7d01f795b42bc2bda54", size = 385171, upload-time = "2025-10-17T11:29:30.078Z" },
{ url = "https://files.pythonhosted.org/packages/d2/bc/94dd1f3a61f4dc236f787a097360ec061ceeebebf4ea120b924d91391b10/jiter-0.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:586cafbd9dd1f3ce6a22b4a085eaa6be578e47ba9b18e198d4333e598a91db2d", size = 518359, upload-time = "2025-10-17T11:29:31.464Z" },
{ url = "https://files.pythonhosted.org/packages/7e/8c/12ee132bd67e25c75f542c227f5762491b9a316b0dad8e929c95076f773c/jiter-0.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:677cc2517d437a83bb30019fd4cf7cad74b465914c56ecac3440d597ac135250", size = 509205, upload-time = "2025-10-17T11:29:32.895Z" },
{ url = "https://files.pythonhosted.org/packages/39/d5/9de848928ce341d463c7e7273fce90ea6d0ea4343cd761f451860fa16b59/jiter-0.11.1-cp312-cp312-win32.whl", hash = "sha256:fa992af648fcee2b850a3286a35f62bbbaeddbb6dbda19a00d8fbc846a947b6e", size = 205448, upload-time = "2025-10-17T11:29:34.217Z" },
{ url = "https://files.pythonhosted.org/packages/ee/b0/8002d78637e05009f5e3fb5288f9d57d65715c33b5d6aa20fd57670feef5/jiter-0.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:88b5cae9fa51efeb3d4bd4e52bfd4c85ccc9cac44282e2a9640893a042ba4d87", size = 204285, upload-time = "2025-10-17T11:29:35.446Z" },
{ url = "https://files.pythonhosted.org/packages/9f/a2/bb24d5587e4dff17ff796716542f663deee337358006a80c8af43ddc11e5/jiter-0.11.1-cp312-cp312-win_arm64.whl", hash = "sha256:9a6cae1ab335551917f882f2c3c1efe7617b71b4c02381e4382a8fc80a02588c", size = 188712, upload-time = "2025-10-17T11:29:37.027Z" },
{ url = "https://files.pythonhosted.org/packages/7c/4b/e4dd3c76424fad02a601d570f4f2a8438daea47ba081201a721a903d3f4c/jiter-0.11.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:71b6a920a5550f057d49d0e8bcc60945a8da998019e83f01adf110e226267663", size = 305272, upload-time = "2025-10-17T11:29:39.249Z" },
{ url = "https://files.pythonhosted.org/packages/67/83/2cd3ad5364191130f4de80eacc907f693723beaab11a46c7d155b07a092c/jiter-0.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b3de72e925388453a5171be83379549300db01284f04d2a6f244d1d8de36f94", size = 314038, upload-time = "2025-10-17T11:29:40.563Z" },
{ url = "https://files.pythonhosted.org/packages/d3/3c/8e67d9ba524e97d2f04c8f406f8769a23205026b13b0938d16646d6e2d3e/jiter-0.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc19dd65a2bd3d9c044c5b4ebf657ca1e6003a97c0fc10f555aa4f7fb9821c00", size = 345977, upload-time = "2025-10-17T11:29:42.009Z" },
{ url = "https://files.pythonhosted.org/packages/8d/a5/489ce64d992c29bccbffabb13961bbb0435e890d7f2d266d1f3df5e917d2/jiter-0.11.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d58faaa936743cd1464540562f60b7ce4fd927e695e8bc31b3da5b914baa9abd", size = 364503, upload-time = "2025-10-17T11:29:43.459Z" },
{ url = "https://files.pythonhosted.org/packages/d4/c0/e321dd83ee231d05c8fe4b1a12caf1f0e8c7a949bf4724d58397104f10f2/jiter-0.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:902640c3103625317291cb73773413b4d71847cdf9383ba65528745ff89f1d14", size = 487092, upload-time = "2025-10-17T11:29:44.835Z" },
{ url = "https://files.pythonhosted.org/packages/f9/5e/8f24ec49c8d37bd37f34ec0112e0b1a3b4b5a7b456c8efff1df5e189ad43/jiter-0.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30405f726e4c2ed487b176c09f8b877a957f535d60c1bf194abb8dadedb5836f", size = 376328, upload-time = "2025-10-17T11:29:46.175Z" },
{ url = "https://files.pythonhosted.org/packages/7f/70/ded107620e809327cf7050727e17ccfa79d6385a771b7fe38fb31318ef00/jiter-0.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3217f61728b0baadd2551844870f65219ac4a1285d5e1a4abddff3d51fdabe96", size = 356632, upload-time = "2025-10-17T11:29:47.454Z" },
{ url = "https://files.pythonhosted.org/packages/19/53/c26f7251613f6a9079275ee43c89b8a973a95ff27532c421abc2a87afb04/jiter-0.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1364cc90c03a8196f35f396f84029f12abe925415049204446db86598c8b72c", size = 384358, upload-time = "2025-10-17T11:29:49.377Z" },
{ url = "https://files.pythonhosted.org/packages/84/16/e0f2cc61e9c4d0b62f6c1bd9b9781d878a427656f88293e2a5335fa8ff07/jiter-0.11.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:53a54bf8e873820ab186b2dca9f6c3303f00d65ae5e7b7d6bda1b95aa472d646", size = 517279, upload-time = "2025-10-17T11:29:50.968Z" },
{ url = "https://files.pythonhosted.org/packages/60/5c/4cd095eaee68961bca3081acbe7c89e12ae24a5dae5fd5d2a13e01ed2542/jiter-0.11.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7e29aca023627b0e0c2392d4248f6414d566ff3974fa08ff2ac8dbb96dfee92a", size = 508276, upload-time = "2025-10-17T11:29:52.619Z" },
{ url = "https://files.pythonhosted.org/packages/4f/25/f459240e69b0e09a7706d96ce203ad615ca36b0fe832308d2b7123abf2d0/jiter-0.11.1-cp313-cp313-win32.whl", hash = "sha256:f153e31d8bca11363751e875c0a70b3d25160ecbaee7b51e457f14498fb39d8b", size = 205593, upload-time = "2025-10-17T11:29:53.938Z" },
{ url = "https://files.pythonhosted.org/packages/7c/16/461bafe22bae79bab74e217a09c907481a46d520c36b7b9fe71ee8c9e983/jiter-0.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:f773f84080b667c69c4ea0403fc67bb08b07e2b7ce1ef335dea5868451e60fed", size = 203518, upload-time = "2025-10-17T11:29:55.216Z" },
{ url = "https://files.pythonhosted.org/packages/7b/72/c45de6e320edb4fa165b7b1a414193b3cae302dd82da2169d315dcc78b44/jiter-0.11.1-cp313-cp313-win_arm64.whl", hash = "sha256:635ecd45c04e4c340d2187bcb1cea204c7cc9d32c1364d251564bf42e0e39c2d", size = 188062, upload-time = "2025-10-17T11:29:56.631Z" },
{ url = "https://files.pythonhosted.org/packages/65/9b/4a57922437ca8753ef823f434c2dec5028b237d84fa320f06a3ba1aec6e8/jiter-0.11.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d892b184da4d94d94ddb4031296931c74ec8b325513a541ebfd6dfb9ae89904b", size = 313814, upload-time = "2025-10-17T11:29:58.509Z" },
{ url = "https://files.pythonhosted.org/packages/76/50/62a0683dadca25490a4bedc6a88d59de9af2a3406dd5a576009a73a1d392/jiter-0.11.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa22c223a3041dacb2fcd37c70dfd648b44662b4a48e242592f95bda5ab09d58", size = 344987, upload-time = "2025-10-17T11:30:00.208Z" },
{ url = "https://files.pythonhosted.org/packages/da/00/2355dbfcbf6cdeaddfdca18287f0f38ae49446bb6378e4a5971e9356fc8a/jiter-0.11.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:330e8e6a11ad4980cd66a0f4a3e0e2e0f646c911ce047014f984841924729789", size = 356399, upload-time = "2025-10-17T11:30:02.084Z" },
{ url = "https://files.pythonhosted.org/packages/c9/07/c2bd748d578fa933d894a55bff33f983bc27f75fc4e491b354bef7b78012/jiter-0.11.1-cp313-cp313t-win_amd64.whl", hash = "sha256:09e2e386ebf298547ca3a3704b729471f7ec666c2906c5c26c1a915ea24741ec", size = 203289, upload-time = "2025-10-17T11:30:03.656Z" },
{ url = "https://files.pythonhosted.org/packages/e6/ee/ace64a853a1acbd318eb0ca167bad1cf5ee037207504b83a868a5849747b/jiter-0.11.1-cp313-cp313t-win_arm64.whl", hash = "sha256:fe4a431c291157e11cee7c34627990ea75e8d153894365a3bc84b7a959d23ca8", size = 188284, upload-time = "2025-10-17T11:30:05.046Z" },
{ url = "https://files.pythonhosted.org/packages/8d/00/d6006d069e7b076e4c66af90656b63da9481954f290d5eca8c715f4bf125/jiter-0.11.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0fa1f70da7a8a9713ff8e5f75ec3f90c0c870be6d526aa95e7c906f6a1c8c676", size = 304624, upload-time = "2025-10-17T11:30:06.678Z" },
{ url = "https://files.pythonhosted.org/packages/fc/45/4a0e31eb996b9ccfddbae4d3017b46f358a599ccf2e19fbffa5e531bd304/jiter-0.11.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:569ee559e5046a42feb6828c55307cf20fe43308e3ae0d8e9e4f8d8634d99944", size = 315042, upload-time = "2025-10-17T11:30:08.87Z" },
{ url = "https://files.pythonhosted.org/packages/e7/91/22f5746f5159a28c76acdc0778801f3c1181799aab196dbea2d29e064968/jiter-0.11.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f69955fa1d92e81987f092b233f0be49d4c937da107b7f7dcf56306f1d3fcce9", size = 346357, upload-time = "2025-10-17T11:30:10.222Z" },
{ url = "https://files.pythonhosted.org/packages/f5/4f/57620857d4e1dc75c8ff4856c90cb6c135e61bff9b4ebfb5dc86814e82d7/jiter-0.11.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:090f4c9d4a825e0fcbd0a2647c9a88a0f366b75654d982d95a9590745ff0c48d", size = 365057, upload-time = "2025-10-17T11:30:11.585Z" },
{ url = "https://files.pythonhosted.org/packages/ce/34/caf7f9cc8ae0a5bb25a5440cc76c7452d264d1b36701b90fdadd28fe08ec/jiter-0.11.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf3d8cedf9e9d825233e0dcac28ff15c47b7c5512fdfe2e25fd5bbb6e6b0cee", size = 487086, upload-time = "2025-10-17T11:30:13.052Z" },
{ url = "https://files.pythonhosted.org/packages/50/17/85b5857c329d533d433fedf98804ebec696004a1f88cabad202b2ddc55cf/jiter-0.11.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2aa9b1958f9c30d3d1a558b75f0626733c60eb9b7774a86b34d88060be1e67fe", size = 376083, upload-time = "2025-10-17T11:30:14.416Z" },
{ url = "https://files.pythonhosted.org/packages/85/d3/2d9f973f828226e6faebdef034097a2918077ea776fb4d88489949024787/jiter-0.11.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42d1ca16590b768c5e7d723055acd2633908baacb3628dd430842e2e035aa90", size = 357825, upload-time = "2025-10-17T11:30:15.765Z" },
{ url = "https://files.pythonhosted.org/packages/f4/55/848d4dabf2c2c236a05468c315c2cb9dc736c5915e65449ccecdba22fb6f/jiter-0.11.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5db4c2486a023820b701a17aec9c5a6173c5ba4393f26662f032f2de9c848b0f", size = 383933, upload-time = "2025-10-17T11:30:17.34Z" },
{ url = "https://files.pythonhosted.org/packages/0b/6c/204c95a4fbb0e26dfa7776c8ef4a878d0c0b215868011cc904bf44f707e2/jiter-0.11.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:4573b78777ccfac954859a6eff45cbd9d281d80c8af049d0f1a3d9fc323d5c3a", size = 517118, upload-time = "2025-10-17T11:30:18.684Z" },
{ url = "https://files.pythonhosted.org/packages/88/25/09956644ea5a2b1e7a2a0f665cb69a973b28f4621fa61fc0c0f06ff40a31/jiter-0.11.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:7593ac6f40831d7961cb67633c39b9fef6689a211d7919e958f45710504f52d3", size = 508194, upload-time = "2025-10-17T11:30:20.719Z" },
{ url = "https://files.pythonhosted.org/packages/09/49/4d1657355d7f5c9e783083a03a3f07d5858efa6916a7d9634d07db1c23bd/jiter-0.11.1-cp314-cp314-win32.whl", hash = "sha256:87202ec6ff9626ff5f9351507def98fcf0df60e9a146308e8ab221432228f4ea", size = 203961, upload-time = "2025-10-17T11:30:22.073Z" },
{ url = "https://files.pythonhosted.org/packages/76/bd/f063bd5cc2712e7ca3cf6beda50894418fc0cfeb3f6ff45a12d87af25996/jiter-0.11.1-cp314-cp314-win_amd64.whl", hash = "sha256:a5dd268f6531a182c89d0dd9a3f8848e86e92dfff4201b77a18e6b98aa59798c", size = 202804, upload-time = "2025-10-17T11:30:23.452Z" },
{ url = "https://files.pythonhosted.org/packages/52/ca/4d84193dfafef1020bf0bedd5e1a8d0e89cb67c54b8519040effc694964b/jiter-0.11.1-cp314-cp314-win_arm64.whl", hash = "sha256:5d761f863f912a44748a21b5c4979c04252588ded8d1d2760976d2e42cd8d991", size = 188001, upload-time = "2025-10-17T11:30:24.915Z" },
{ url = "https://files.pythonhosted.org/packages/d5/fa/3b05e5c9d32efc770a8510eeb0b071c42ae93a5b576fd91cee9af91689a1/jiter-0.11.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2cc5a3965285ddc33e0cab933e96b640bc9ba5940cea27ebbbf6695e72d6511c", size = 312561, upload-time = "2025-10-17T11:30:26.742Z" },
{ url = "https://files.pythonhosted.org/packages/50/d3/335822eb216154ddb79a130cbdce88fdf5c3e2b43dc5dba1fd95c485aaf5/jiter-0.11.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b572b3636a784c2768b2342f36a23078c8d3aa6d8a30745398b1bab58a6f1a8", size = 344551, upload-time = "2025-10-17T11:30:28.252Z" },
{ url = "https://files.pythonhosted.org/packages/31/6d/a0bed13676b1398f9b3ba61f32569f20a3ff270291161100956a577b2dd3/jiter-0.11.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad93e3d67a981f96596d65d2298fe8d1aa649deb5374a2fb6a434410ee11915e", size = 363051, upload-time = "2025-10-17T11:30:30.009Z" },
{ url = "https://files.pythonhosted.org/packages/a4/03/313eda04aa08545a5a04ed5876e52f49ab76a4d98e54578896ca3e16313e/jiter-0.11.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a83097ce379e202dcc3fe3fc71a16d523d1ee9192c8e4e854158f96b3efe3f2f", size = 485897, upload-time = "2025-10-17T11:30:31.429Z" },
{ url = "https://files.pythonhosted.org/packages/5f/13/a1011b9d325e40b53b1b96a17c010b8646013417f3902f97a86325b19299/jiter-0.11.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7042c51e7fbeca65631eb0c332f90c0c082eab04334e7ccc28a8588e8e2804d9", size = 375224, upload-time = "2025-10-17T11:30:33.18Z" },
{ url = "https://files.pythonhosted.org/packages/92/da/1b45026b19dd39b419e917165ff0ea629dbb95f374a3a13d2df95e40a6ac/jiter-0.11.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a68d679c0e47649a61df591660507608adc2652442de7ec8276538ac46abe08", size = 356606, upload-time = "2025-10-17T11:30:34.572Z" },
{ url = "https://files.pythonhosted.org/packages/7a/0c/9acb0e54d6a8ba59ce923a180ebe824b4e00e80e56cefde86cc8e0a948be/jiter-0.11.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1b0da75dbf4b6ec0b3c9e604d1ee8beaf15bc046fff7180f7d89e3cdbd3bb51", size = 384003, upload-time = "2025-10-17T11:30:35.987Z" },
{ url = "https://files.pythonhosted.org/packages/3f/2b/e5a5fe09d6da2145e4eed651e2ce37f3c0cf8016e48b1d302e21fb1628b7/jiter-0.11.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:69dd514bf0fa31c62147d6002e5ca2b3e7ef5894f5ac6f0a19752385f4e89437", size = 516946, upload-time = "2025-10-17T11:30:37.425Z" },
{ url = "https://files.pythonhosted.org/packages/5f/fe/db936e16e0228d48eb81f9934e8327e9fde5185e84f02174fcd22a01be87/jiter-0.11.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:bb31ac0b339efa24c0ca606febd8b77ef11c58d09af1b5f2be4c99e907b11111", size = 507614, upload-time = "2025-10-17T11:30:38.977Z" },
{ url = "https://files.pythonhosted.org/packages/86/db/c4438e8febfb303486d13c6b72f5eb71cf851e300a0c1f0b4140018dd31f/jiter-0.11.1-cp314-cp314t-win32.whl", hash = "sha256:b2ce0d6156a1d3ad41da3eec63b17e03e296b78b0e0da660876fccfada86d2f7", size = 204043, upload-time = "2025-10-17T11:30:40.308Z" },
{ url = "https://files.pythonhosted.org/packages/36/59/81badb169212f30f47f817dfaabf965bc9b8204fed906fab58104ee541f9/jiter-0.11.1-cp314-cp314t-win_amd64.whl", hash = "sha256:f4db07d127b54c4a2d43b4cf05ff0193e4f73e0dd90c74037e16df0b29f666e1", size = 204046, upload-time = "2025-10-17T11:30:41.692Z" },
{ url = "https://files.pythonhosted.org/packages/dd/01/43f7b4eb61db3e565574c4c5714685d042fb652f9eef7e5a3de6aafa943a/jiter-0.11.1-cp314-cp314t-win_arm64.whl", hash = "sha256:28e4fdf2d7ebfc935523e50d1efa3970043cfaa161674fe66f9642409d001dfe", size = 188069, upload-time = "2025-10-17T11:30:43.23Z" },
{ url = "https://files.pythonhosted.org/packages/a6/bc/950dd7f170c6394b6fdd73f989d9e729bd98907bcc4430ef080a72d06b77/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:0d4d6993edc83cf75e8c6828a8d6ce40a09ee87e38c7bfba6924f39e1337e21d", size = 302626, upload-time = "2025-10-17T11:31:09.645Z" },
{ url = "https://files.pythonhosted.org/packages/3a/65/43d7971ca82ee100b7b9b520573eeef7eabc0a45d490168ebb9a9b5bb8b2/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f78d151c83a87a6cf5461d5ee55bc730dd9ae227377ac6f115b922989b95f838", size = 297034, upload-time = "2025-10-17T11:31:10.975Z" },
{ url = "https://files.pythonhosted.org/packages/19/4c/000e1e0c0c67e96557a279f8969487ea2732d6c7311698819f977abae837/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9022974781155cd5521d5cb10997a03ee5e31e8454c9d999dcdccd253f2353f", size = 337328, upload-time = "2025-10-17T11:31:12.399Z" },
{ url = "https://files.pythonhosted.org/packages/d9/71/71408b02c6133153336d29fa3ba53000f1e1a3f78bb2fc2d1a1865d2e743/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18c77aaa9117510d5bdc6a946baf21b1f0cfa58ef04d31c8d016f206f2118960", size = 343697, upload-time = "2025-10-17T11:31:13.773Z" },
{ url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" },
{ url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" },
{ url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" },
{ url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" },
{ url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" },
{ url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" },
{ url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" },
{ url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" },
{ url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" },
{ url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" },
{ url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" },
{ url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" },
{ url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" },
{ url = "https://files.pythonhosted.org/packages/3d/a6/97209693b177716e22576ee1161674d1d58029eb178e01866a0422b69224/jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e", size = 313658, upload-time = "2025-11-09T20:47:44.424Z" },
{ url = "https://files.pythonhosted.org/packages/06/4d/125c5c1537c7d8ee73ad3d530a442d6c619714b95027143f1b61c0b4dfe0/jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1", size = 318605, upload-time = "2025-11-09T20:47:45.973Z" },
{ url = "https://files.pythonhosted.org/packages/99/bf/a840b89847885064c41a5f52de6e312e91fa84a520848ee56c97e4fa0205/jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf", size = 349803, upload-time = "2025-11-09T20:47:47.535Z" },
{ url = "https://files.pythonhosted.org/packages/8a/88/e63441c28e0db50e305ae23e19c1d8fae012d78ed55365da392c1f34b09c/jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44", size = 365120, upload-time = "2025-11-09T20:47:49.284Z" },
{ url = "https://files.pythonhosted.org/packages/0a/7c/49b02714af4343970eb8aca63396bc1c82fa01197dbb1e9b0d274b550d4e/jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45", size = 479918, upload-time = "2025-11-09T20:47:50.807Z" },
{ url = "https://files.pythonhosted.org/packages/69/ba/0a809817fdd5a1db80490b9150645f3aae16afad166960bcd562be194f3b/jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87", size = 379008, upload-time = "2025-11-09T20:47:52.211Z" },
{ url = "https://files.pythonhosted.org/packages/5f/c3/c9fc0232e736c8877d9e6d83d6eeb0ba4e90c6c073835cc2e8f73fdeef51/jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed", size = 361785, upload-time = "2025-11-09T20:47:53.512Z" },
{ url = "https://files.pythonhosted.org/packages/96/61/61f69b7e442e97ca6cd53086ddc1cf59fb830549bc72c0a293713a60c525/jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9", size = 386108, upload-time = "2025-11-09T20:47:54.893Z" },
{ url = "https://files.pythonhosted.org/packages/e9/2e/76bb3332f28550c8f1eba3bf6e5efe211efda0ddbbaf24976bc7078d42a5/jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626", size = 519937, upload-time = "2025-11-09T20:47:56.253Z" },
{ url = "https://files.pythonhosted.org/packages/84/d6/fa96efa87dc8bff2094fb947f51f66368fa56d8d4fc9e77b25d7fbb23375/jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c", size = 510853, upload-time = "2025-11-09T20:47:58.32Z" },
{ url = "https://files.pythonhosted.org/packages/8a/28/93f67fdb4d5904a708119a6ab58a8f1ec226ff10a94a282e0215402a8462/jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de", size = 204699, upload-time = "2025-11-09T20:47:59.686Z" },
{ url = "https://files.pythonhosted.org/packages/c4/1f/30b0eb087045a0abe2a5c9c0c0c8da110875a1d3be83afd4a9a4e548be3c/jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a", size = 204258, upload-time = "2025-11-09T20:48:01.01Z" },
{ url = "https://files.pythonhosted.org/packages/2c/f4/2b4daf99b96bce6fc47971890b14b2a36aef88d7beb9f057fafa032c6141/jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60", size = 185503, upload-time = "2025-11-09T20:48:02.35Z" },
{ url = "https://files.pythonhosted.org/packages/39/ca/67bb15a7061d6fe20b9b2a2fd783e296a1e0f93468252c093481a2f00efa/jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6", size = 317965, upload-time = "2025-11-09T20:48:03.783Z" },
{ url = "https://files.pythonhosted.org/packages/18/af/1788031cd22e29c3b14bc6ca80b16a39a0b10e611367ffd480c06a259831/jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4", size = 345831, upload-time = "2025-11-09T20:48:05.55Z" },
{ url = "https://files.pythonhosted.org/packages/05/17/710bf8472d1dff0d3caf4ced6031060091c1320f84ee7d5dcbed1f352417/jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb", size = 361272, upload-time = "2025-11-09T20:48:06.951Z" },
{ url = "https://files.pythonhosted.org/packages/fb/f1/1dcc4618b59761fef92d10bcbb0b038b5160be653b003651566a185f1a5c/jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7", size = 204604, upload-time = "2025-11-09T20:48:08.328Z" },
{ url = "https://files.pythonhosted.org/packages/d9/32/63cb1d9f1c5c6632a783c0052cde9ef7ba82688f7065e2f0d5f10a7e3edb/jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3", size = 185628, upload-time = "2025-11-09T20:48:09.572Z" },
{ url = "https://files.pythonhosted.org/packages/a8/99/45c9f0dbe4a1416b2b9a8a6d1236459540f43d7fb8883cff769a8db0612d/jiter-0.12.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:c46d927acd09c67a9fb1416df45c5a04c27e83aae969267e98fba35b74e99525", size = 312478, upload-time = "2025-11-09T20:48:10.898Z" },
{ url = "https://files.pythonhosted.org/packages/4c/a7/54ae75613ba9e0f55fcb0bc5d1f807823b5167cc944e9333ff322e9f07dd/jiter-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:774ff60b27a84a85b27b88cd5583899c59940bcc126caca97eb2a9df6aa00c49", size = 318706, upload-time = "2025-11-09T20:48:12.266Z" },
{ url = "https://files.pythonhosted.org/packages/59/31/2aa241ad2c10774baf6c37f8b8e1f39c07db358f1329f4eb40eba179c2a2/jiter-0.12.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5433fab222fb072237df3f637d01b81f040a07dcac1cb4a5c75c7aa9ed0bef1", size = 351894, upload-time = "2025-11-09T20:48:13.673Z" },
{ url = "https://files.pythonhosted.org/packages/54/4f/0f2759522719133a9042781b18cc94e335b6d290f5e2d3e6899d6af933e3/jiter-0.12.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8c593c6e71c07866ec6bfb790e202a833eeec885022296aff6b9e0b92d6a70e", size = 365714, upload-time = "2025-11-09T20:48:15.083Z" },
{ url = "https://files.pythonhosted.org/packages/dc/6f/806b895f476582c62a2f52c453151edd8a0fde5411b0497baaa41018e878/jiter-0.12.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90d32894d4c6877a87ae00c6b915b609406819dce8bc0d4e962e4de2784e567e", size = 478989, upload-time = "2025-11-09T20:48:16.706Z" },
{ url = "https://files.pythonhosted.org/packages/86/6c/012d894dc6e1033acd8db2b8346add33e413ec1c7c002598915278a37f79/jiter-0.12.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:798e46eed9eb10c3adbbacbd3bdb5ecd4cf7064e453d00dbef08802dae6937ff", size = 378615, upload-time = "2025-11-09T20:48:18.614Z" },
{ url = "https://files.pythonhosted.org/packages/87/30/d718d599f6700163e28e2c71c0bbaf6dace692e7df2592fd793ac9276717/jiter-0.12.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3f1368f0a6719ea80013a4eb90ba72e75d7ea67cfc7846db2ca504f3df0169a", size = 364745, upload-time = "2025-11-09T20:48:20.117Z" },
{ url = "https://files.pythonhosted.org/packages/8f/85/315b45ce4b6ddc7d7fceca24068543b02bdc8782942f4ee49d652e2cc89f/jiter-0.12.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65f04a9d0b4406f7e51279710b27484af411896246200e461d80d3ba0caa901a", size = 386502, upload-time = "2025-11-09T20:48:21.543Z" },
{ url = "https://files.pythonhosted.org/packages/74/0b/ce0434fb40c5b24b368fe81b17074d2840748b4952256bab451b72290a49/jiter-0.12.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:fd990541982a24281d12b67a335e44f117e4c6cbad3c3b75c7dea68bf4ce3a67", size = 519845, upload-time = "2025-11-09T20:48:22.964Z" },
{ url = "https://files.pythonhosted.org/packages/e8/a3/7a7a4488ba052767846b9c916d208b3ed114e3eb670ee984e4c565b9cf0d/jiter-0.12.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:b111b0e9152fa7df870ecaebb0bd30240d9f7fff1f2003bcb4ed0f519941820b", size = 510701, upload-time = "2025-11-09T20:48:24.483Z" },
{ url = "https://files.pythonhosted.org/packages/c3/16/052ffbf9d0467b70af24e30f91e0579e13ded0c17bb4a8eb2aed3cb60131/jiter-0.12.0-cp314-cp314-win32.whl", hash = "sha256:a78befb9cc0a45b5a5a0d537b06f8544c2ebb60d19d02c41ff15da28a9e22d42", size = 205029, upload-time = "2025-11-09T20:48:25.749Z" },
{ url = "https://files.pythonhosted.org/packages/e4/18/3cf1f3f0ccc789f76b9a754bdb7a6977e5d1d671ee97a9e14f7eb728d80e/jiter-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:e1fe01c082f6aafbe5c8faf0ff074f38dfb911d53f07ec333ca03f8f6226debf", size = 204960, upload-time = "2025-11-09T20:48:27.415Z" },
{ url = "https://files.pythonhosted.org/packages/02/68/736821e52ecfdeeb0f024b8ab01b5a229f6b9293bbdb444c27efade50b0f/jiter-0.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:d72f3b5a432a4c546ea4bedc84cce0c3404874f1d1676260b9c7f048a9855451", size = 185529, upload-time = "2025-11-09T20:48:29.125Z" },
{ url = "https://files.pythonhosted.org/packages/30/61/12ed8ee7a643cce29ac97c2281f9ce3956eb76b037e88d290f4ed0d41480/jiter-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e6ded41aeba3603f9728ed2b6196e4df875348ab97b28fc8afff115ed42ba7a7", size = 318974, upload-time = "2025-11-09T20:48:30.87Z" },
{ url = "https://files.pythonhosted.org/packages/2d/c6/f3041ede6d0ed5e0e79ff0de4c8f14f401bbf196f2ef3971cdbe5fd08d1d/jiter-0.12.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a947920902420a6ada6ad51892082521978e9dd44a802663b001436e4b771684", size = 345932, upload-time = "2025-11-09T20:48:32.658Z" },
{ url = "https://files.pythonhosted.org/packages/d5/5d/4d94835889edd01ad0e2dbfc05f7bdfaed46292e7b504a6ac7839aa00edb/jiter-0.12.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:add5e227e0554d3a52cf390a7635edaffdf4f8fce4fdbcef3cc2055bb396a30c", size = 367243, upload-time = "2025-11-09T20:48:34.093Z" },
{ url = "https://files.pythonhosted.org/packages/fd/76/0051b0ac2816253a99d27baf3dda198663aff882fa6ea7deeb94046da24e/jiter-0.12.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9b1cda8fcb736250d7e8711d4580ebf004a46771432be0ae4796944b5dfa5d", size = 479315, upload-time = "2025-11-09T20:48:35.507Z" },
{ url = "https://files.pythonhosted.org/packages/70/ae/83f793acd68e5cb24e483f44f482a1a15601848b9b6f199dacb970098f77/jiter-0.12.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deeb12a2223fe0135c7ff1356a143d57f95bbf1f4a66584f1fc74df21d86b993", size = 380714, upload-time = "2025-11-09T20:48:40.014Z" },
{ url = "https://files.pythonhosted.org/packages/b1/5e/4808a88338ad2c228b1126b93fcd8ba145e919e886fe910d578230dabe3b/jiter-0.12.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c596cc0f4cb574877550ce4ecd51f8037469146addd676d7c1a30ebe6391923f", size = 365168, upload-time = "2025-11-09T20:48:41.462Z" },
{ url = "https://files.pythonhosted.org/packages/0c/d4/04619a9e8095b42aef436b5aeb4c0282b4ff1b27d1db1508df9f5dc82750/jiter-0.12.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ab4c823b216a4aeab3fdbf579c5843165756bd9ad87cc6b1c65919c4715f783", size = 387893, upload-time = "2025-11-09T20:48:42.921Z" },
{ url = "https://files.pythonhosted.org/packages/17/ea/d3c7e62e4546fdc39197fa4a4315a563a89b95b6d54c0d25373842a59cbe/jiter-0.12.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e427eee51149edf962203ff8db75a7514ab89be5cb623fb9cea1f20b54f1107b", size = 520828, upload-time = "2025-11-09T20:48:44.278Z" },
{ url = "https://files.pythonhosted.org/packages/cc/0b/c6d3562a03fd767e31cb119d9041ea7958c3c80cb3d753eafb19b3b18349/jiter-0.12.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:edb868841f84c111255ba5e80339d386d937ec1fdce419518ce1bd9370fac5b6", size = 511009, upload-time = "2025-11-09T20:48:45.726Z" },
{ url = "https://files.pythonhosted.org/packages/aa/51/2cb4468b3448a8385ebcd15059d325c9ce67df4e2758d133ab9442b19834/jiter-0.12.0-cp314-cp314t-win32.whl", hash = "sha256:8bbcfe2791dfdb7c5e48baf646d37a6a3dcb5a97a032017741dea9f817dca183", size = 205110, upload-time = "2025-11-09T20:48:47.033Z" },
{ url = "https://files.pythonhosted.org/packages/b2/c5/ae5ec83dec9c2d1af805fd5fe8f74ebded9c8670c5210ec7820ce0dbeb1e/jiter-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2fa940963bf02e1d8226027ef461e36af472dea85d36054ff835aeed944dd873", size = 205223, upload-time = "2025-11-09T20:48:49.076Z" },
{ url = "https://files.pythonhosted.org/packages/97/9a/3c5391907277f0e55195550cf3fa8e293ae9ee0c00fb402fec1e38c0c82f/jiter-0.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:506c9708dd29b27288f9f8f1140c3cb0e3d8ddb045956d7757b1fa0e0f39a473", size = 185564, upload-time = "2025-11-09T20:48:50.376Z" },
{ url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" },
{ url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" },
{ url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" },
{ url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" },
]
[[package]]
name = "libcst"
version = "1.8.5"
version = "1.8.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyyaml", marker = "python_full_version < '3.13'" },
{ name = "pyyaml-ft", marker = "python_full_version >= '3.13'" },
{ name = "pyyaml", marker = "python_full_version != '3.13.*'" },
{ name = "pyyaml-ft", marker = "python_full_version == '3.13.*'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5c/55/ca4552d7fe79a91b2a7b4fa39991e8a45a17c8bfbcaf264597d95903c777/libcst-1.8.5.tar.gz", hash = "sha256:e72e1816eed63f530668e93a4c22ff1cf8b91ddce0ec53e597d3f6c53e103ec7", size = 884582, upload-time = "2025-09-26T05:29:44.101Z" }
sdist = { url = "https://files.pythonhosted.org/packages/de/cd/337df968b38d94c5aabd3e1b10630f047a2b345f6e1d4456bd9fe7417537/libcst-1.8.6.tar.gz", hash = "sha256:f729c37c9317126da9475bdd06a7208eb52fcbd180a6341648b45a56b4ba708b", size = 891354, upload-time = "2025-11-03T22:33:30.621Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/13/bb/c7abe0654fcf00292d6959256948ce4ae07785c4f65a45c3e25cc4637074/libcst-1.8.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c7733aba7b43239157661207b1e3a9f3711a7fc061a0eca6a33f0716fdfd21", size = 2196690, upload-time = "2025-09-26T05:28:17.839Z" },
{ url = "https://files.pythonhosted.org/packages/49/25/e7c02209e8ce66e7b75a66d132118f6f812a8b03cd31ee7d96de56c733a1/libcst-1.8.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b8c3cfbbf6049e3c587713652e4b3c88cfbf7df7878b2eeefaa8dd20a48dc607", size = 2082616, upload-time = "2025-09-26T05:28:19.794Z" },
{ url = "https://files.pythonhosted.org/packages/32/68/a4f49d99e3130256e225d639722440ba2682c12812a30ebd7ba64fd0fd31/libcst-1.8.5-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:31d86025d8997c853f85c4b5d494f04a157fb962e24f187b4af70c7755c9b27d", size = 2229037, upload-time = "2025-09-26T05:28:21.459Z" },
{ url = "https://files.pythonhosted.org/packages/b2/62/4fa21600a0bf3eb9f4d4f8bbb50ef120fb0b2990195eabba997b0b889566/libcst-1.8.5-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff9c535cfe99f0be79ac3024772b288570751fc69fc472b44fca12d1912d1561", size = 2292806, upload-time = "2025-09-26T05:28:23.033Z" },
{ url = "https://files.pythonhosted.org/packages/14/df/a01e8d54b62060698e37e3e28f77559ecb70c7b93ffee00d17e40221f419/libcst-1.8.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e8204607504563d3606bbaea2b9b04e0cef2b3bdc14c89171a702c1e09b9318a", size = 2294836, upload-time = "2025-09-26T05:28:24.937Z" },
{ url = "https://files.pythonhosted.org/packages/75/4f/c410e7f7ceda0558f688c1ca5dfb3a40ff8dfc527f8e6015fa749e11a650/libcst-1.8.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e6cd3df72d47701b205fa3349ba8899566df82cef248c2fdf5f575d640419c4", size = 2396004, upload-time = "2025-09-26T05:28:26.582Z" },
{ url = "https://files.pythonhosted.org/packages/f0/07/bb77dcb94badad0ad3e5a1e992a4318dbdf40632eac3b5cf18299858ad7d/libcst-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:197c2f86dd0ca5c6464184ddef7f6440d64c8da39b78d16fc053da6701ed1209", size = 2107301, upload-time = "2025-09-26T05:28:28.235Z" },
{ url = "https://files.pythonhosted.org/packages/79/70/e688e6d99d6920c3f97bf8bbaec33ac2c71a947730772a1d32dd899dbbf1/libcst-1.8.5-cp312-cp312-win_arm64.whl", hash = "sha256:c5ca109c9a81dff3d947dceba635a08f9c3dfeb7f61b0b824a175ef0a98ea69b", size = 1990870, upload-time = "2025-09-26T05:28:29.858Z" },
{ url = "https://files.pythonhosted.org/packages/b0/77/ca1d2499881c774121ebb7c78c22f371c179f18317961e1e529dafc1af52/libcst-1.8.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9e9563dcd754b65557ba9cdff9a5af32cfa5f007be0db982429580db45bfe", size = 2196687, upload-time = "2025-09-26T05:28:31.769Z" },
{ url = "https://files.pythonhosted.org/packages/ef/1c/fdb7c226ad82fcf3b1bb19c24d8e895588a0c1fd2bc81e30792d041e15bc/libcst-1.8.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61d56839d237e9bf3310e6479ffaf6659f298940f0e0d2460ce71ee67a5375df", size = 2082639, upload-time = "2025-09-26T05:28:33.358Z" },
{ url = "https://files.pythonhosted.org/packages/af/1a/c6e89455483355971d13f6d71ad717624686b50558f7e2c12393c2c8e2f1/libcst-1.8.5-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:b084769dcda2036265fc426eec5894c658af8d4b0e0d0255ab6bb78c8c9d6eb4", size = 2229202, upload-time = "2025-09-26T05:28:35.276Z" },
{ url = "https://files.pythonhosted.org/packages/02/9c/3e4ce737a34c0ada15a35f51d0dbd8bf0ac0cef0c4560ddc0a8364e3f712/libcst-1.8.5-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c20384b8a4a7801b4416ef96173f1fbb7fafad7529edfdf151811ef70423118a", size = 2293220, upload-time = "2025-09-26T05:28:37.201Z" },
{ url = "https://files.pythonhosted.org/packages/1a/74/a68fcb3625b0c218c01aaefef9366f505654a1aa64af99cfe7ff7c97bf41/libcst-1.8.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:271b0b363972ff7d2b8116add13977e7c3b2668c7a424095851d548d222dab18", size = 2295146, upload-time = "2025-09-26T05:28:39.122Z" },
{ url = "https://files.pythonhosted.org/packages/37/c3/f4b6edf204f919c6968eb2d111c338098aebbe3fb5d5d95aceacfcf65d9a/libcst-1.8.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0ba728c7aee73b330f49f2df0f0b56b74c95302eeb78860f8d5ff0e0fc52c887", size = 2396597, upload-time = "2025-09-26T05:28:41.162Z" },
{ url = "https://files.pythonhosted.org/packages/d0/94/b5cbe122db8f60e7e05bd56743f91d176f3da9b2101f8234e25bb3c5e493/libcst-1.8.5-cp313-cp313-win_amd64.whl", hash = "sha256:0abf0e87570cd3b06a8cafbb5378a9d1cbf12e4583dc35e0fff2255100da55a1", size = 2107479, upload-time = "2025-09-26T05:28:43.094Z" },
{ url = "https://files.pythonhosted.org/packages/05/4d/5e47752c37b33ea6fd1fac76f62e2caa37a6f78d841338bb8fd3dcf51498/libcst-1.8.5-cp313-cp313-win_arm64.whl", hash = "sha256:757390c3cf0b45d7ae1d1d4070c839b082926e762e65eab144f37a63ad33b939", size = 1990992, upload-time = "2025-09-26T05:28:44.993Z" },
{ url = "https://files.pythonhosted.org/packages/88/df/d0eaaed2c402f945fd049b990c98242cb6eace640258e9f8d484206a9666/libcst-1.8.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f8934763389cd21ce3ed229b63b994b79dac8be7e84a9da144823f46bc1ffc5c", size = 2187746, upload-time = "2025-09-26T05:28:46.946Z" },
{ url = "https://files.pythonhosted.org/packages/19/05/ca62c80dc5f2cf26c2d5d1428612950c6f04df66f765ab0ca8b7d42b4ba1/libcst-1.8.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b873caf04862b6649a2a961fce847f7515ba882be02376a924732cf82c160861", size = 2072530, upload-time = "2025-09-26T05:28:48.451Z" },
{ url = "https://files.pythonhosted.org/packages/1a/38/34a5825bd87badaf8bc0725e5816d395f43ea2f8d1f3cb6982cccc70a1a2/libcst-1.8.5-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:50e095d18c4f76da0e03f25c50b52a2999acbcbe4598a3cf41842ee3c13b54f1", size = 2219819, upload-time = "2025-09-26T05:28:50.328Z" },
{ url = "https://files.pythonhosted.org/packages/74/ea/10407cc1c06231079f5ee6c5e2c2255a2c3f876a7a7f13af734f9bb6ee0e/libcst-1.8.5-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3a3c967725cc3e8fa5c7251188d57d48eec8835f44c6b53f7523992bec595fa0", size = 2283011, upload-time = "2025-09-26T05:28:51.808Z" },
{ url = "https://files.pythonhosted.org/packages/5b/fc/c4e4c03b4804ac78b8209e83a3c15e449aa68ddd0e602d5c2cc4b7e1b9ed/libcst-1.8.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eed454ab77f4b18100c41d8973b57069e503943ea4e5e5bbb660404976a0fe7a", size = 2283315, upload-time = "2025-09-26T05:28:53.33Z" },
{ url = "https://files.pythonhosted.org/packages/bb/39/75e07c2933b55815b71b1971e5388a24d1d1475631266251249eaed8af28/libcst-1.8.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:39130e59868b8fa49f6eeedd46f008d3456fc13ded57e1c85b211636eb6425f3", size = 2387279, upload-time = "2025-09-26T05:28:54.872Z" },
{ url = "https://files.pythonhosted.org/packages/04/44/0315fb0f2ee8913d209a5caf57932db8efb3f562dbcdc5fb157de92fb098/libcst-1.8.5-cp313-cp313t-win_amd64.whl", hash = "sha256:a7b1cc3abfdba5ce36907f94f07e079528d4be52c07dfffa26f0e68eb1d25d45", size = 2098827, upload-time = "2025-09-26T05:28:56.877Z" },
{ url = "https://files.pythonhosted.org/packages/45/c2/1335fe9feb7d75526df454a8f9db77615460c69691c27af0a57621ca9e47/libcst-1.8.5-cp313-cp313t-win_arm64.whl", hash = "sha256:20354c4217e87afea936e9ea90c57fe0b2c5651f41b3ee59f5df8a53ab417746", size = 1979853, upload-time = "2025-09-26T05:28:58.408Z" },
{ url = "https://files.pythonhosted.org/packages/9e/4e/4d961f15e7cc3f9924c4865158cf23de3cb1d9727be5bc5ec1f6b2e0e991/libcst-1.8.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f350ff2867b3075ba97a022de694f2747c469c25099216cef47b58caaee96314", size = 2196843, upload-time = "2025-09-26T05:29:00.64Z" },
{ url = "https://files.pythonhosted.org/packages/47/b5/706b51025218b31346335c8aa1e316e91dbd82b9bd60483a23842a59033b/libcst-1.8.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b95db09d04d125619a63f191c9534853656c4c76c303b8b4c5f950c8e610fba", size = 2082306, upload-time = "2025-09-26T05:29:02.498Z" },
{ url = "https://files.pythonhosted.org/packages/eb/78/53816b76257d9d149f074ac0b913be1c94d54fb07b3a77f3e11333659d36/libcst-1.8.5-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:60e62e966b45b7dee6f0ec0fd7687704d29be18ae670c5bc6c9c61a12ccf589f", size = 2230603, upload-time = "2025-09-26T05:29:04.123Z" },
{ url = "https://files.pythonhosted.org/packages/a6/06/4497c456ad0ace0f60a38f0935d6e080600532bcddeaf545443d4d7c4db2/libcst-1.8.5-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:7cbb330a352dde570059c73af7b7bbfaa84ae121f54d2ce46c5530351f57419d", size = 2293110, upload-time = "2025-09-26T05:29:05.685Z" },
{ url = "https://files.pythonhosted.org/packages/14/fc/9ef8cc7c0a9cca722b6f176cc82b5925dbcdfcee6e17cd6d3056d45af38e/libcst-1.8.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:71b2b1ef2305cba051252342a1a4f8e94e6b8e95d7693a7c15a00ce8849ef722", size = 2296366, upload-time = "2025-09-26T05:29:07.451Z" },
{ url = "https://files.pythonhosted.org/packages/2d/7e/799dac0cd086cc5dab3837ead9c72dd4e29a79323795dc52b2ebb3aac9a0/libcst-1.8.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0f504d06dfba909d1ba6a4acf60bfe3f22275444d6e0d07e472a5da4a209b0be", size = 2397188, upload-time = "2025-09-26T05:29:09.084Z" },
{ url = "https://files.pythonhosted.org/packages/1b/5c/e4f32439818db04ea43b1d6de1d375dcdd5ff33b828864900c340f26436c/libcst-1.8.5-cp314-cp314-win_amd64.whl", hash = "sha256:c69d2b39e360dea5490ccb5dcf5957dcbb1067d27dc1f3f0787d4e287f7744e2", size = 2183599, upload-time = "2025-09-26T05:29:11.039Z" },
{ url = "https://files.pythonhosted.org/packages/e2/f9/a457c3da610aef4b5f5c00f1feb67192594b77fb9dddab8f654161c1ea6f/libcst-1.8.5-cp314-cp314-win_arm64.whl", hash = "sha256:63405cb548b2d7b78531535a7819231e633b13d3dee3eb672d58f0f3322892ca", size = 2071025, upload-time = "2025-09-26T05:29:12.546Z" },
{ url = "https://files.pythonhosted.org/packages/4a/b6/37abad6fc44df268cd8c2a903ddb2108bd8ac324ef000c2dfcb03d763a41/libcst-1.8.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8a5921105610f35921cc4db6fa5e68e941c6da20ce7f9f93b41b6c66b5481353", size = 2187762, upload-time = "2025-09-26T05:29:14.322Z" },
{ url = "https://files.pythonhosted.org/packages/b4/19/d1118c0b25612a3f50fb2c4b2010562fbf7e7df30ad821bab0aae9cf7e4f/libcst-1.8.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:abded10e8d92462fa982d19b064c6f24ed7ead81cf3c3b71011e9764cb12923d", size = 2072565, upload-time = "2025-09-26T05:29:16.37Z" },
{ url = "https://files.pythonhosted.org/packages/f7/c8/f72515e2774234c4f92909222d762789cc4be2247ed4189bc0639ade1f8c/libcst-1.8.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dd7bdb14545c4b77a6c0eb39c86a76441fe833da800f6ca63e917e1273621029", size = 2219884, upload-time = "2025-09-26T05:29:18.118Z" },
{ url = "https://files.pythonhosted.org/packages/f4/b8/b267b28cbb0cae19e8c7887cdeda72288ae1020d1c22b6c9955f065b296e/libcst-1.8.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6dc28d33ab8750a84c28b5625f7916846ecbecefd89bf75a5292a35644b6efbd", size = 2282790, upload-time = "2025-09-26T05:29:19.578Z" },
{ url = "https://files.pythonhosted.org/packages/9e/8a/46f2b01bb6782dbc0f4e917ed029b1236278a5dc6d263e55ee986a83a88e/libcst-1.8.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:970b7164a71c65e13c961965f9677bbbbeb21ce2e7e6655294f7f774156391c4", size = 2283591, upload-time = "2025-09-26T05:29:21.024Z" },
{ url = "https://files.pythonhosted.org/packages/e4/ca/3097729b5f6ab1d5e3a753492912d1d8b483a320421d3c0e9e26f1ecef0c/libcst-1.8.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd74c543770e6a61dcb8846c9689dfcce2ad686658896f77f3e21b6ce94bcb2e", size = 2386780, upload-time = "2025-09-26T05:29:22.922Z" },
{ url = "https://files.pythonhosted.org/packages/bb/cc/4fc91968779b70429106797ddb2265a18b0026e17ec6ba805c34427d2fb9/libcst-1.8.5-cp314-cp314t-win_amd64.whl", hash = "sha256:3d8e80cd1ed6577166f0bab77357f819f12564c2ed82307612e2bcc93e684d72", size = 2174807, upload-time = "2025-09-26T05:29:24.799Z" },
{ url = "https://files.pythonhosted.org/packages/79/3c/db47e1cf0c98a13cbea2cb5611e7b6913ac5e63845b0e41ee7020b03f523/libcst-1.8.5-cp314-cp314t-win_arm64.whl", hash = "sha256:a026aaa19cb2acd8a4d9e2a215598b0a7e2c194bf4482eb9dec4d781ec6e10b2", size = 2059048, upload-time = "2025-09-26T05:29:28.425Z" },
{ url = "https://files.pythonhosted.org/packages/0c/3c/93365c17da3d42b055a8edb0e1e99f1c60c776471db6c9b7f1ddf6a44b28/libcst-1.8.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0c13d5bd3d8414a129e9dccaf0e5785108a4441e9b266e1e5e9d1f82d1b943c9", size = 2206166, upload-time = "2025-11-03T22:32:16.012Z" },
{ url = "https://files.pythonhosted.org/packages/1d/cb/7530940e6ac50c6dd6022349721074e19309eb6aa296e942ede2213c1a19/libcst-1.8.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1472eeafd67cdb22544e59cf3bfc25d23dc94058a68cf41f6654ff4fcb92e09", size = 2083726, upload-time = "2025-11-03T22:32:17.312Z" },
{ url = "https://files.pythonhosted.org/packages/1b/cf/7e5eaa8c8f2c54913160671575351d129170db757bb5e4b7faffed022271/libcst-1.8.6-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:089c58e75cb142ec33738a1a4ea7760a28b40c078ab2fd26b270dac7d2633a4d", size = 2235755, upload-time = "2025-11-03T22:32:18.859Z" },
{ url = "https://files.pythonhosted.org/packages/55/54/570ec2b0e9a3de0af9922e3bb1b69a5429beefbc753a7ea770a27ad308bd/libcst-1.8.6-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c9d7aeafb1b07d25a964b148c0dda9451efb47bbbf67756e16eeae65004b0eb5", size = 2301473, upload-time = "2025-11-03T22:32:20.499Z" },
{ url = "https://files.pythonhosted.org/packages/11/4c/163457d1717cd12181c421a4cca493454bcabd143fc7e53313bc6a4ad82a/libcst-1.8.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207481197afd328aa91d02670c15b48d0256e676ce1ad4bafb6dc2b593cc58f1", size = 2298899, upload-time = "2025-11-03T22:32:21.765Z" },
{ url = "https://files.pythonhosted.org/packages/35/1d/317ddef3669883619ef3d3395ea583305f353ef4ad87d7a5ac1c39be38e3/libcst-1.8.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:375965f34cc6f09f5f809244d3ff9bd4f6cb6699f571121cebce53622e7e0b86", size = 2408239, upload-time = "2025-11-03T22:32:23.275Z" },
{ url = "https://files.pythonhosted.org/packages/9a/a1/f47d8cccf74e212dd6044b9d6dbc223636508da99acff1d54786653196bc/libcst-1.8.6-cp312-cp312-win_amd64.whl", hash = "sha256:da95b38693b989eaa8d32e452e8261cfa77fe5babfef1d8d2ac25af8c4aa7e6d", size = 2119660, upload-time = "2025-11-03T22:32:24.822Z" },
{ url = "https://files.pythonhosted.org/packages/19/d0/dd313bf6a7942cdf951828f07ecc1a7695263f385065edc75ef3016a3cb5/libcst-1.8.6-cp312-cp312-win_arm64.whl", hash = "sha256:bff00e1c766658adbd09a175267f8b2f7616e5ee70ce45db3d7c4ce6d9f6bec7", size = 1999824, upload-time = "2025-11-03T22:32:26.131Z" },
{ url = "https://files.pythonhosted.org/packages/90/01/723cd467ec267e712480c772aacc5aa73f82370c9665162fd12c41b0065b/libcst-1.8.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7445479ebe7d1aff0ee094ab5a1c7718e1ad78d33e3241e1a1ec65dcdbc22ffb", size = 2206386, upload-time = "2025-11-03T22:32:27.422Z" },
{ url = "https://files.pythonhosted.org/packages/17/50/b944944f910f24c094f9b083f76f61e3985af5a376f5342a21e01e2d1a81/libcst-1.8.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4fc3fef8a2c983e7abf5d633e1884c5dd6fa0dcb8f6e32035abd3d3803a3a196", size = 2083945, upload-time = "2025-11-03T22:32:28.847Z" },
{ url = "https://files.pythonhosted.org/packages/36/a1/bd1b2b2b7f153d82301cdaddba787f4a9fc781816df6bdb295ca5f88b7cf/libcst-1.8.6-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:1a3a5e4ee870907aa85a4076c914ae69066715a2741b821d9bf16f9579de1105", size = 2235818, upload-time = "2025-11-03T22:32:30.504Z" },
{ url = "https://files.pythonhosted.org/packages/b9/ab/f5433988acc3b4d188c4bb154e57837df9488cc9ab551267cdeabd3bb5e7/libcst-1.8.6-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6609291c41f7ad0bac570bfca5af8fea1f4a27987d30a1fa8b67fe5e67e6c78d", size = 2301289, upload-time = "2025-11-03T22:32:31.812Z" },
{ url = "https://files.pythonhosted.org/packages/5d/57/89f4ba7a6f1ac274eec9903a9e9174890d2198266eee8c00bc27eb45ecf7/libcst-1.8.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:25eaeae6567091443b5374b4c7d33a33636a2d58f5eda02135e96fc6c8807786", size = 2299230, upload-time = "2025-11-03T22:32:33.242Z" },
{ url = "https://files.pythonhosted.org/packages/f2/36/0aa693bc24cce163a942df49d36bf47a7ed614a0cd5598eee2623bc31913/libcst-1.8.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04030ea4d39d69a65873b1d4d877def1c3951a7ada1824242539e399b8763d30", size = 2408519, upload-time = "2025-11-03T22:32:34.678Z" },
{ url = "https://files.pythonhosted.org/packages/db/18/6dd055b5f15afa640fb3304b2ee9df8b7f72e79513814dbd0a78638f4a0e/libcst-1.8.6-cp313-cp313-win_amd64.whl", hash = "sha256:8066f1b70f21a2961e96bedf48649f27dfd5ea68be5cd1bed3742b047f14acde", size = 2119853, upload-time = "2025-11-03T22:32:36.287Z" },
{ url = "https://files.pythonhosted.org/packages/c9/ed/5ddb2a22f0b0abdd6dcffa40621ada1feaf252a15e5b2733a0a85dfd0429/libcst-1.8.6-cp313-cp313-win_arm64.whl", hash = "sha256:c188d06b583900e662cd791a3f962a8c96d3dfc9b36ea315be39e0a4c4792ebf", size = 1999808, upload-time = "2025-11-03T22:32:38.1Z" },
{ url = "https://files.pythonhosted.org/packages/25/d3/72b2de2c40b97e1ef4a1a1db4e5e52163fc7e7740ffef3846d30bc0096b5/libcst-1.8.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c41c76e034a1094afed7057023b1d8967f968782433f7299cd170eaa01ec033e", size = 2190553, upload-time = "2025-11-03T22:32:39.819Z" },
{ url = "https://files.pythonhosted.org/packages/0d/20/983b7b210ccc3ad94a82db54230e92599c4a11b9cfc7ce3bc97c1d2df75c/libcst-1.8.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5432e785322aba3170352f6e72b32bea58d28abd141ac37cc9b0bf6b7c778f58", size = 2074717, upload-time = "2025-11-03T22:32:41.373Z" },
{ url = "https://files.pythonhosted.org/packages/13/f2/9e01678fedc772e09672ed99930de7355757035780d65d59266fcee212b8/libcst-1.8.6-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:85b7025795b796dea5284d290ff69de5089fc8e989b25d6f6f15b6800be7167f", size = 2225834, upload-time = "2025-11-03T22:32:42.716Z" },
{ url = "https://files.pythonhosted.org/packages/4a/0d/7bed847b5c8c365e9f1953da274edc87577042bee5a5af21fba63276e756/libcst-1.8.6-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:536567441182a62fb706e7aa954aca034827b19746832205953b2c725d254a93", size = 2287107, upload-time = "2025-11-03T22:32:44.549Z" },
{ url = "https://files.pythonhosted.org/packages/02/f0/7e51fa84ade26c518bfbe7e2e4758b56d86a114c72d60309ac0d350426c4/libcst-1.8.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2f04d3672bde1704f383a19e8f8331521abdbc1ed13abb349325a02ac56e5012", size = 2288672, upload-time = "2025-11-03T22:32:45.867Z" },
{ url = "https://files.pythonhosted.org/packages/ad/cd/15762659a3f5799d36aab1bc2b7e732672722e249d7800e3c5f943b41250/libcst-1.8.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f04febcd70e1e67917be7de513c8d4749d2e09206798558d7fe632134426ea4", size = 2392661, upload-time = "2025-11-03T22:32:47.232Z" },
{ url = "https://files.pythonhosted.org/packages/e4/6b/b7f9246c323910fcbe021241500f82e357521495dcfe419004dbb272c7cb/libcst-1.8.6-cp313-cp313t-win_amd64.whl", hash = "sha256:1dc3b897c8b0f7323412da3f4ad12b16b909150efc42238e19cbf19b561cc330", size = 2105068, upload-time = "2025-11-03T22:32:49.145Z" },
{ url = "https://files.pythonhosted.org/packages/a6/0b/4fd40607bc4807ec2b93b054594373d7fa3d31bb983789901afcb9bcebe9/libcst-1.8.6-cp313-cp313t-win_arm64.whl", hash = "sha256:44f38139fa95e488db0f8976f9c7ca39a64d6bc09f2eceef260aa1f6da6a2e42", size = 1985181, upload-time = "2025-11-03T22:32:50.597Z" },
{ url = "https://files.pythonhosted.org/packages/3a/60/4105441989e321f7ad0fd28ffccb83eb6aac0b7cfb0366dab855dcccfbe5/libcst-1.8.6-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:b188e626ce61de5ad1f95161b8557beb39253de4ec74fc9b1f25593324a0279c", size = 2204202, upload-time = "2025-11-03T22:32:52.311Z" },
{ url = "https://files.pythonhosted.org/packages/67/2f/51a6f285c3a183e50cfe5269d4a533c21625aac2c8de5cdf2d41f079320d/libcst-1.8.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:87e74f7d7dfcba9efa91127081e22331d7c42515f0a0ac6e81d4cf2c3ed14661", size = 2083581, upload-time = "2025-11-03T22:32:54.269Z" },
{ url = "https://files.pythonhosted.org/packages/2f/64/921b1c19b638860af76cdb28bc81d430056592910b9478eea49e31a7f47a/libcst-1.8.6-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:3a926a4b42015ee24ddfc8ae940c97bd99483d286b315b3ce82f3bafd9f53474", size = 2236495, upload-time = "2025-11-03T22:32:55.723Z" },
{ url = "https://files.pythonhosted.org/packages/12/a8/b00592f9bede618cbb3df6ffe802fc65f1d1c03d48a10d353b108057d09c/libcst-1.8.6-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:3f4fbb7f569e69fd9e89d9d9caa57ca42c577c28ed05062f96a8c207594e75b8", size = 2301466, upload-time = "2025-11-03T22:32:57.337Z" },
{ url = "https://files.pythonhosted.org/packages/af/df/790d9002f31580fefd0aec2f373a0f5da99070e04c5e8b1c995d0104f303/libcst-1.8.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:08bd63a8ce674be431260649e70fca1d43f1554f1591eac657f403ff8ef82c7a", size = 2300264, upload-time = "2025-11-03T22:32:58.852Z" },
{ url = "https://files.pythonhosted.org/packages/21/de/dc3f10e65bab461be5de57850d2910a02c24c3ddb0da28f0e6e4133c3487/libcst-1.8.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e00e275d4ba95d4963431ea3e409aa407566a74ee2bf309a402f84fc744abe47", size = 2408572, upload-time = "2025-11-03T22:33:00.552Z" },
{ url = "https://files.pythonhosted.org/packages/20/3b/35645157a7590891038b077db170d6dd04335cd2e82a63bdaa78c3297dfe/libcst-1.8.6-cp314-cp314-win_amd64.whl", hash = "sha256:fea5c7fa26556eedf277d4f72779c5ede45ac3018650721edd77fd37ccd4a2d4", size = 2193917, upload-time = "2025-11-03T22:33:02.354Z" },
{ url = "https://files.pythonhosted.org/packages/b3/a2/1034a9ba7d3e82f2c2afaad84ba5180f601aed676d92b76325797ad60951/libcst-1.8.6-cp314-cp314-win_arm64.whl", hash = "sha256:bb9b4077bdf8857b2483879cbbf70f1073bc255b057ec5aac8a70d901bb838e9", size = 2078748, upload-time = "2025-11-03T22:33:03.707Z" },
{ url = "https://files.pythonhosted.org/packages/95/a1/30bc61e8719f721a5562f77695e6154e9092d1bdf467aa35d0806dcd6cea/libcst-1.8.6-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:55ec021a296960c92e5a33b8d93e8ad4182b0eab657021f45262510a58223de1", size = 2188980, upload-time = "2025-11-03T22:33:05.152Z" },
{ url = "https://files.pythonhosted.org/packages/2c/14/c660204532407c5628e3b615015a902ed2d0b884b77714a6bdbe73350910/libcst-1.8.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ba9ab2b012fbd53b36cafd8f4440a6b60e7e487cd8b87428e57336b7f38409a4", size = 2074828, upload-time = "2025-11-03T22:33:06.864Z" },
{ url = "https://files.pythonhosted.org/packages/82/e2/c497c354943dff644749f177ee9737b09ed811b8fc842b05709a40fe0d1b/libcst-1.8.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c0a0cc80aebd8aa15609dd4d330611cbc05e9b4216bcaeabba7189f99ef07c28", size = 2225568, upload-time = "2025-11-03T22:33:08.354Z" },
{ url = "https://files.pythonhosted.org/packages/86/ef/45999676d07bd6d0eefa28109b4f97124db114e92f9e108de42ba46a8028/libcst-1.8.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:42a4f68121e2e9c29f49c97f6154e8527cd31021809cc4a941c7270aa64f41aa", size = 2286523, upload-time = "2025-11-03T22:33:10.206Z" },
{ url = "https://files.pythonhosted.org/packages/f4/6c/517d8bf57d9f811862f4125358caaf8cd3320a01291b3af08f7b50719db4/libcst-1.8.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8a434c521fadaf9680788b50d5c21f4048fa85ed19d7d70bd40549fbaeeecab1", size = 2288044, upload-time = "2025-11-03T22:33:11.628Z" },
{ url = "https://files.pythonhosted.org/packages/83/ce/24d7d49478ffb61207f229239879845da40a374965874f5ee60f96b02ddb/libcst-1.8.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6a65f844d813ab4ef351443badffa0ae358f98821561d19e18b3190f59e71996", size = 2392605, upload-time = "2025-11-03T22:33:12.962Z" },
{ url = "https://files.pythonhosted.org/packages/39/c3/829092ead738b71e96a4e96896c96f276976e5a8a58b4473ed813d7c962b/libcst-1.8.6-cp314-cp314t-win_amd64.whl", hash = "sha256:bdb14bc4d4d83a57062fed2c5da93ecb426ff65b0dc02ddf3481040f5f074a82", size = 2181581, upload-time = "2025-11-03T22:33:14.514Z" },
{ url = "https://files.pythonhosted.org/packages/98/6d/5d6a790a02eb0d9d36c4aed4f41b277497e6178900b2fa29c35353aa45ed/libcst-1.8.6-cp314-cp314t-win_arm64.whl", hash = "sha256:819c8081e2948635cab60c603e1bbdceccdfe19104a242530ad38a36222cb88f", size = 2065000, upload-time = "2025-11-03T22:33:16.257Z" },
]
[[package]]
name = "librt"
version = "0.7.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/93/e4/b59bdf1197fdf9888452ea4d2048cdad61aef85eb83e99dc52551d7fdc04/librt-0.7.4.tar.gz", hash = "sha256:3871af56c59864d5fd21d1ac001eb2fb3b140d52ba0454720f2e4a19812404ba", size = 145862, upload-time = "2025-12-15T16:52:43.862Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f3/e7/b805d868d21f425b7e76a0ea71a2700290f2266a4f3c8357fcf73efc36aa/librt-0.7.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7dd3b5c37e0fb6666c27cf4e2c88ae43da904f2155c4cfc1e5a2fdce3b9fcf92", size = 55688, upload-time = "2025-12-15T16:51:31.571Z" },
{ url = "https://files.pythonhosted.org/packages/59/5e/69a2b02e62a14cfd5bfd9f1e9adea294d5bcfeea219c7555730e5d068ee4/librt-0.7.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c5de1928c486201b23ed0cc4ac92e6e07be5cd7f3abc57c88a9cf4f0f32108", size = 57141, upload-time = "2025-12-15T16:51:32.714Z" },
{ url = "https://files.pythonhosted.org/packages/6e/6b/05dba608aae1272b8ea5ff8ef12c47a4a099a04d1e00e28a94687261d403/librt-0.7.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:078ae52ffb3f036396cc4aed558e5b61faedd504a3c1f62b8ae34bf95ae39d94", size = 165322, upload-time = "2025-12-15T16:51:33.986Z" },
{ url = "https://files.pythonhosted.org/packages/8f/bc/199533d3fc04a4cda8d7776ee0d79955ab0c64c79ca079366fbc2617e680/librt-0.7.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce58420e25097b2fc201aef9b9f6d65df1eb8438e51154e1a7feb8847e4a55ab", size = 174216, upload-time = "2025-12-15T16:51:35.384Z" },
{ url = "https://files.pythonhosted.org/packages/62/ec/09239b912a45a8ed117cb4a6616d9ff508f5d3131bd84329bf2f8d6564f1/librt-0.7.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b719c8730c02a606dc0e8413287e8e94ac2d32a51153b300baf1f62347858fba", size = 189005, upload-time = "2025-12-15T16:51:36.687Z" },
{ url = "https://files.pythonhosted.org/packages/46/2e/e188313d54c02f5b0580dd31476bb4b0177514ff8d2be9f58d4a6dc3a7ba/librt-0.7.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3749ef74c170809e6dee68addec9d2458700a8de703de081c888e92a8b015cf9", size = 183960, upload-time = "2025-12-15T16:51:37.977Z" },
{ url = "https://files.pythonhosted.org/packages/eb/84/f1d568d254518463d879161d3737b784137d236075215e56c7c9be191cee/librt-0.7.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b35c63f557653c05b5b1b6559a074dbabe0afee28ee2a05b6c9ba21ad0d16a74", size = 177609, upload-time = "2025-12-15T16:51:40.584Z" },
{ url = "https://files.pythonhosted.org/packages/5d/43/060bbc1c002f0d757c33a1afe6bf6a565f947a04841139508fc7cef6c08b/librt-0.7.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1ef704e01cb6ad39ad7af668d51677557ca7e5d377663286f0ee1b6b27c28e5f", size = 199269, upload-time = "2025-12-15T16:51:41.879Z" },
{ url = "https://files.pythonhosted.org/packages/ff/7f/708f8f02d8012ee9f366c07ea6a92882f48bd06cc1ff16a35e13d0fbfb08/librt-0.7.4-cp312-cp312-win32.whl", hash = "sha256:c66c2b245926ec15188aead25d395091cb5c9df008d3b3207268cd65557d6286", size = 43186, upload-time = "2025-12-15T16:51:43.149Z" },
{ url = "https://files.pythonhosted.org/packages/f1/a5/4e051b061c8b2509be31b2c7ad4682090502c0a8b6406edcf8c6b4fe1ef7/librt-0.7.4-cp312-cp312-win_amd64.whl", hash = "sha256:71a56f4671f7ff723451f26a6131754d7c1809e04e22ebfbac1db8c9e6767a20", size = 49455, upload-time = "2025-12-15T16:51:44.336Z" },
{ url = "https://files.pythonhosted.org/packages/d0/d2/90d84e9f919224a3c1f393af1636d8638f54925fdc6cd5ee47f1548461e5/librt-0.7.4-cp312-cp312-win_arm64.whl", hash = "sha256:419eea245e7ec0fe664eb7e85e7ff97dcdb2513ca4f6b45a8ec4a3346904f95a", size = 42828, upload-time = "2025-12-15T16:51:45.498Z" },
{ url = "https://files.pythonhosted.org/packages/fe/4d/46a53ccfbb39fd0b493fd4496eb76f3ebc15bb3e45d8c2e695a27587edf5/librt-0.7.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d44a1b1ba44cbd2fc3cb77992bef6d6fdb1028849824e1dd5e4d746e1f7f7f0b", size = 55745, upload-time = "2025-12-15T16:51:46.636Z" },
{ url = "https://files.pythonhosted.org/packages/7f/2b/3ac7f5212b1828bf4f979cf87f547db948d3e28421d7a430d4db23346ce4/librt-0.7.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c9cab4b3de1f55e6c30a84c8cee20e4d3b2476f4d547256694a1b0163da4fe32", size = 57166, upload-time = "2025-12-15T16:51:48.219Z" },
{ url = "https://files.pythonhosted.org/packages/e8/99/6523509097cbe25f363795f0c0d1c6a3746e30c2994e25b5aefdab119b21/librt-0.7.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2857c875f1edd1feef3c371fbf830a61b632fb4d1e57160bb1e6a3206e6abe67", size = 165833, upload-time = "2025-12-15T16:51:49.443Z" },
{ url = "https://files.pythonhosted.org/packages/fe/35/323611e59f8fe032649b4fb7e77f746f96eb7588fcbb31af26bae9630571/librt-0.7.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b370a77be0a16e1ad0270822c12c21462dc40496e891d3b0caf1617c8cc57e20", size = 174818, upload-time = "2025-12-15T16:51:51.015Z" },
{ url = "https://files.pythonhosted.org/packages/41/e6/40fb2bb21616c6e06b6a64022802228066e9a31618f493e03f6b9661548a/librt-0.7.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d05acd46b9a52087bfc50c59dfdf96a2c480a601e8898a44821c7fd676598f74", size = 189607, upload-time = "2025-12-15T16:51:52.671Z" },
{ url = "https://files.pythonhosted.org/packages/32/48/1b47c7d5d28b775941e739ed2bfe564b091c49201b9503514d69e4ed96d7/librt-0.7.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:70969229cb23d9c1a80e14225838d56e464dc71fa34c8342c954fc50e7516dee", size = 184585, upload-time = "2025-12-15T16:51:54.027Z" },
{ url = "https://files.pythonhosted.org/packages/75/a6/ee135dfb5d3b54d5d9001dbe483806229c6beac3ee2ba1092582b7efeb1b/librt-0.7.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4450c354b89dbb266730893862dbff06006c9ed5b06b6016d529b2bf644fc681", size = 178249, upload-time = "2025-12-15T16:51:55.248Z" },
{ url = "https://files.pythonhosted.org/packages/04/87/d5b84ec997338be26af982bcd6679be0c1db9a32faadab1cf4bb24f9e992/librt-0.7.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:adefe0d48ad35b90b6f361f6ff5a1bd95af80c17d18619c093c60a20e7a5b60c", size = 199851, upload-time = "2025-12-15T16:51:56.933Z" },
{ url = "https://files.pythonhosted.org/packages/86/63/ba1333bf48306fe398e3392a7427ce527f81b0b79d0d91618c4610ce9d15/librt-0.7.4-cp313-cp313-win32.whl", hash = "sha256:21ea710e96c1e050635700695095962a22ea420d4b3755a25e4909f2172b4ff2", size = 43249, upload-time = "2025-12-15T16:51:58.498Z" },
{ url = "https://files.pythonhosted.org/packages/f9/8a/de2c6df06cdfa9308c080e6b060fe192790b6a48a47320b215e860f0e98c/librt-0.7.4-cp313-cp313-win_amd64.whl", hash = "sha256:772e18696cf5a64afee908662fbcb1f907460ddc851336ee3a848ef7684c8e1e", size = 49417, upload-time = "2025-12-15T16:51:59.618Z" },
{ url = "https://files.pythonhosted.org/packages/31/66/8ee0949efc389691381ed686185e43536c20e7ad880c122dd1f31e65c658/librt-0.7.4-cp313-cp313-win_arm64.whl", hash = "sha256:52e34c6af84e12921748c8354aa6acf1912ca98ba60cdaa6920e34793f1a0788", size = 42824, upload-time = "2025-12-15T16:52:00.784Z" },
{ url = "https://files.pythonhosted.org/packages/74/81/6921e65c8708eb6636bbf383aa77e6c7dad33a598ed3b50c313306a2da9d/librt-0.7.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4f1ee004942eaaed6e06c087d93ebc1c67e9a293e5f6b9b5da558df6bf23dc5d", size = 55191, upload-time = "2025-12-15T16:52:01.97Z" },
{ url = "https://files.pythonhosted.org/packages/0d/d6/3eb864af8a8de8b39cc8dd2e9ded1823979a27795d72c4eea0afa8c26c9f/librt-0.7.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d854c6dc0f689bad7ed452d2a3ecff58029d80612d336a45b62c35e917f42d23", size = 56898, upload-time = "2025-12-15T16:52:03.356Z" },
{ url = "https://files.pythonhosted.org/packages/49/bc/b1d4c0711fdf79646225d576faee8747b8528a6ec1ceb6accfd89ade7102/librt-0.7.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a4f7339d9e445280f23d63dea842c0c77379c4a47471c538fc8feedab9d8d063", size = 163725, upload-time = "2025-12-15T16:52:04.572Z" },
{ url = "https://files.pythonhosted.org/packages/2c/08/61c41cd8f0a6a41fc99ea78a2205b88187e45ba9800792410ed62f033584/librt-0.7.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39003fc73f925e684f8521b2dbf34f61a5deb8a20a15dcf53e0d823190ce8848", size = 172469, upload-time = "2025-12-15T16:52:05.863Z" },
{ url = "https://files.pythonhosted.org/packages/8b/c7/4ee18b4d57f01444230bc18cf59103aeab8f8c0f45e84e0e540094df1df1/librt-0.7.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb15ee29d95875ad697d449fe6071b67f730f15a6961913a2b0205015ca0843", size = 186804, upload-time = "2025-12-15T16:52:07.192Z" },
{ url = "https://files.pythonhosted.org/packages/a1/af/009e8ba3fbf830c936842da048eda1b34b99329f402e49d88fafff6525d1/librt-0.7.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:02a69369862099e37d00765583052a99d6a68af7e19b887e1b78fee0146b755a", size = 181807, upload-time = "2025-12-15T16:52:08.554Z" },
{ url = "https://files.pythonhosted.org/packages/85/26/51ae25f813656a8b117c27a974f25e8c1e90abcd5a791ac685bf5b489a1b/librt-0.7.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ec72342cc4d62f38b25a94e28b9efefce41839aecdecf5e9627473ed04b7be16", size = 175595, upload-time = "2025-12-15T16:52:10.186Z" },
{ url = "https://files.pythonhosted.org/packages/48/93/36d6c71f830305f88996b15c8e017aa8d1e03e2e947b40b55bbf1a34cf24/librt-0.7.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:776dbb9bfa0fc5ce64234b446995d8d9f04badf64f544ca036bd6cff6f0732ce", size = 196504, upload-time = "2025-12-15T16:52:11.472Z" },
{ url = "https://files.pythonhosted.org/packages/08/11/8299e70862bb9d704735bf132c6be09c17b00fbc7cda0429a9df222fdc1b/librt-0.7.4-cp314-cp314-win32.whl", hash = "sha256:0f8cac84196d0ffcadf8469d9ded4d4e3a8b1c666095c2a291e22bf58e1e8a9f", size = 39738, upload-time = "2025-12-15T16:52:12.962Z" },
{ url = "https://files.pythonhosted.org/packages/54/d5/656b0126e4e0f8e2725cd2d2a1ec40f71f37f6f03f135a26b663c0e1a737/librt-0.7.4-cp314-cp314-win_amd64.whl", hash = "sha256:037f5cb6fe5abe23f1dc058054d50e9699fcc90d0677eee4e4f74a8677636a1a", size = 45976, upload-time = "2025-12-15T16:52:14.441Z" },
{ url = "https://files.pythonhosted.org/packages/60/86/465ff07b75c1067da8fa7f02913c4ead096ef106cfac97a977f763783bfb/librt-0.7.4-cp314-cp314-win_arm64.whl", hash = "sha256:a5deebb53d7a4d7e2e758a96befcd8edaaca0633ae71857995a0f16033289e44", size = 39073, upload-time = "2025-12-15T16:52:15.621Z" },
{ url = "https://files.pythonhosted.org/packages/b3/a0/24941f85960774a80d4b3c2aec651d7d980466da8101cae89e8b032a3e21/librt-0.7.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b4c25312c7f4e6ab35ab16211bdf819e6e4eddcba3b2ea632fb51c9a2a97e105", size = 57369, upload-time = "2025-12-15T16:52:16.782Z" },
{ url = "https://files.pythonhosted.org/packages/77/a0/ddb259cae86ab415786c1547d0fe1b40f04a7b089f564fd5c0242a3fafb2/librt-0.7.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:618b7459bb392bdf373f2327e477597fff8f9e6a1878fffc1b711c013d1b0da4", size = 59230, upload-time = "2025-12-15T16:52:18.259Z" },
{ url = "https://files.pythonhosted.org/packages/31/11/77823cb530ab8a0c6fac848ac65b745be446f6f301753b8990e8809080c9/librt-0.7.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1437c3f72a30c7047f16fd3e972ea58b90172c3c6ca309645c1c68984f05526a", size = 183869, upload-time = "2025-12-15T16:52:19.457Z" },
{ url = "https://files.pythonhosted.org/packages/a4/ce/157db3614cf3034b3f702ae5ba4fefda4686f11eea4b7b96542324a7a0e7/librt-0.7.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c96cb76f055b33308f6858b9b594618f1b46e147a4d03a4d7f0c449e304b9b95", size = 194606, upload-time = "2025-12-15T16:52:20.795Z" },
{ url = "https://files.pythonhosted.org/packages/30/ef/6ec4c7e3d6490f69a4fd2803516fa5334a848a4173eac26d8ee6507bff6e/librt-0.7.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28f990e6821204f516d09dc39966ef8b84556ffd648d5926c9a3f681e8de8906", size = 206776, upload-time = "2025-12-15T16:52:22.229Z" },
{ url = "https://files.pythonhosted.org/packages/ad/22/750b37bf549f60a4782ab80e9d1e9c44981374ab79a7ea68670159905918/librt-0.7.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc4aebecc79781a1b77d7d4e7d9fe080385a439e198d993b557b60f9117addaf", size = 203205, upload-time = "2025-12-15T16:52:23.603Z" },
{ url = "https://files.pythonhosted.org/packages/7a/87/2e8a0f584412a93df5faad46c5fa0a6825fdb5eba2ce482074b114877f44/librt-0.7.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:022cc673e69283a42621dd453e2407cf1647e77f8bd857d7ad7499901e62376f", size = 196696, upload-time = "2025-12-15T16:52:24.951Z" },
{ url = "https://files.pythonhosted.org/packages/e5/ca/7bf78fa950e43b564b7de52ceeb477fb211a11f5733227efa1591d05a307/librt-0.7.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2b3ca211ae8ea540569e9c513da052699b7b06928dcda61247cb4f318122bdb5", size = 217191, upload-time = "2025-12-15T16:52:26.194Z" },
{ url = "https://files.pythonhosted.org/packages/d6/49/3732b0e8424ae35ad5c3166d9dd5bcdae43ce98775e0867a716ff5868064/librt-0.7.4-cp314-cp314t-win32.whl", hash = "sha256:8a461f6456981d8c8e971ff5a55f2e34f4e60871e665d2f5fde23ee74dea4eeb", size = 40276, upload-time = "2025-12-15T16:52:27.54Z" },
{ url = "https://files.pythonhosted.org/packages/35/d6/d8823e01bd069934525fddb343189c008b39828a429b473fb20d67d5cd36/librt-0.7.4-cp314-cp314t-win_amd64.whl", hash = "sha256:721a7b125a817d60bf4924e1eec2a7867bfcf64cfc333045de1df7a0629e4481", size = 46772, upload-time = "2025-12-15T16:52:28.653Z" },
{ url = "https://files.pythonhosted.org/packages/36/e9/a0aa60f5322814dd084a89614e9e31139702e342f8459ad8af1984a18168/librt-0.7.4-cp314-cp314t-win_arm64.whl", hash = "sha256:76b2ba71265c0102d11458879b4d53ccd0b32b0164d14deb8d2b598a018e502f", size = 39724, upload-time = "2025-12-15T16:52:29.836Z" },
]
[[package]]
@ -875,34 +957,35 @@ wheels = [
[[package]]
name = "mypy"
version = "1.18.2"
version = "1.19.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "librt", marker = "platform_python_implementation != 'PyPy'" },
{ name = "mypy-extensions" },
{ name = "pathspec" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" },
{ url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" },
{ url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" },
{ url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" },
{ url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" },
{ url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" },
{ url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" },
{ url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" },
{ url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" },
{ url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" },
{ url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" },
{ url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" },
{ url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" },
{ url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" },
{ url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" },
{ url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" },
{ url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" },
{ url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" },
{ url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" },
{ url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" },
{ url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" },
{ url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" },
{ url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" },
{ url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" },
{ url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" },
{ url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" },
{ url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" },
{ url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" },
{ url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" },
{ url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" },
{ url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" },
{ url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" },
{ url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" },
{ url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" },
{ url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" },
{ url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" },
{ url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" },
{ url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" },
]
[[package]]
@ -1161,7 +1244,7 @@ wheels = [
[[package]]
name = "pydantic"
version = "2.12.3"
version = "2.12.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-types" },
@ -1169,76 +1252,80 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" }
sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" },
{ url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
]
[[package]]
name = "pydantic-core"
version = "2.41.4"
version = "2.41.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" }
sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" },
{ url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" },
{ url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" },
{ url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" },
{ url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" },
{ url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" },
{ url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" },
{ url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" },
{ url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" },
{ url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" },
{ url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" },
{ url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" },
{ url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" },
{ url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" },
{ url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" },
{ url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" },
{ url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" },
{ url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" },
{ url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" },
{ url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" },
{ url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" },
{ url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" },
{ url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" },
{ url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" },
{ url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" },
{ url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" },
{ url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" },
{ url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" },
{ url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" },
{ url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" },
{ url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" },
{ url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" },
{ url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" },
{ url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" },
{ url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" },
{ url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" },
{ url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" },
{ url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" },
{ url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" },
{ url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" },
{ url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" },
{ url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" },
{ url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" },
{ url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" },
{ url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" },
{ url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" },
{ url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" },
{ url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" },
{ url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" },
{ url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" },
{ url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" },
{ url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" },
{ url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" },
{ url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" },
{ url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" },
{ url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" },
{ url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
{ url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
{ url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
{ url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
{ url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
{ url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
{ url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
{ url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
{ url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
{ url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
{ url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
{ url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
{ url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
{ url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
{ url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
{ url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
{ url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
{ url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
{ url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
{ url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
{ url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
{ url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
{ url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
{ url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
{ url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
{ url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
{ url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
{ url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
{ url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
{ url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
{ url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
{ url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
{ url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
{ url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
{ url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
{ url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
{ url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
{ url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
{ url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
{ url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
{ url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
{ url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
{ url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
{ url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
{ url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
{ url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
{ url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
{ url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
{ url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
{ url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
{ url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
{ url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
{ url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
{ url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
{ url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
{ url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
{ url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
{ url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
{ url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
{ url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
]
[[package]]
@ -1252,7 +1339,7 @@ wheels = [
[[package]]
name = "pytest"
version = "8.4.2"
version = "9.0.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
@ -1261,22 +1348,22 @@ dependencies = [
{ name = "pluggy" },
{ name = "pygments" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
]
[[package]]
name = "pytest-asyncio"
version = "1.2.0"
version = "1.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pytest" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" }
sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" },
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
]
[[package]]
@ -1305,11 +1392,11 @@ wheels = [
[[package]]
name = "python-dotenv"
version = "1.1.1"
version = "1.2.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
]
[[package]]
@ -1399,41 +1486,41 @@ wheels = [
[[package]]
name = "ruff"
version = "0.14.2"
version = "0.14.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ee/34/8218a19b2055b80601e8fd201ec723c74c7fe1ca06d525a43ed07b6d8e85/ruff-0.14.2.tar.gz", hash = "sha256:98da787668f239313d9c902ca7c523fe11b8ec3f39345553a51b25abc4629c96", size = 5539663, upload-time = "2025-10-23T19:37:00.956Z" }
sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/16/dd/23eb2db5ad9acae7c845700493b72d3ae214dce0b226f27df89216110f2b/ruff-0.14.2-py3-none-linux_armv6l.whl", hash = "sha256:7cbe4e593505bdec5884c2d0a4d791a90301bc23e49a6b1eb642dd85ef9c64f1", size = 12533390, upload-time = "2025-10-23T19:36:18.044Z" },
{ url = "https://files.pythonhosted.org/packages/5a/8c/5f9acff43ddcf3f85130d0146d0477e28ccecc495f9f684f8f7119b74c0d/ruff-0.14.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8d54b561729cee92f8d89c316ad7a3f9705533f5903b042399b6ae0ddfc62e11", size = 12887187, upload-time = "2025-10-23T19:36:22.664Z" },
{ url = "https://files.pythonhosted.org/packages/99/fa/047646491479074029665022e9f3dc6f0515797f40a4b6014ea8474c539d/ruff-0.14.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c8753dfa44ebb2cde10ce5b4d2ef55a41fb9d9b16732a2c5df64620dbda44a3", size = 11925177, upload-time = "2025-10-23T19:36:24.778Z" },
{ url = "https://files.pythonhosted.org/packages/15/8b/c44cf7fe6e59ab24a9d939493a11030b503bdc2a16622cede8b7b1df0114/ruff-0.14.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d0bbeffb8d9f4fccf7b5198d566d0bad99a9cb622f1fc3467af96cb8773c9e3", size = 12358285, upload-time = "2025-10-23T19:36:26.979Z" },
{ url = "https://files.pythonhosted.org/packages/45/01/47701b26254267ef40369aea3acb62a7b23e921c27372d127e0f3af48092/ruff-0.14.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7047f0c5a713a401e43a88d36843d9c83a19c584e63d664474675620aaa634a8", size = 12303832, upload-time = "2025-10-23T19:36:29.192Z" },
{ url = "https://files.pythonhosted.org/packages/2d/5c/ae7244ca4fbdf2bee9d6405dcd5bc6ae51ee1df66eb7a9884b77b8af856d/ruff-0.14.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bf8d2f9aa1602599217d82e8e0af7fd33e5878c4d98f37906b7c93f46f9a839", size = 13036995, upload-time = "2025-10-23T19:36:31.861Z" },
{ url = "https://files.pythonhosted.org/packages/27/4c/0860a79ce6fd4c709ac01173f76f929d53f59748d0dcdd662519835dae43/ruff-0.14.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1c505b389e19c57a317cf4b42db824e2fca96ffb3d86766c1c9f8b96d32048a7", size = 14512649, upload-time = "2025-10-23T19:36:33.915Z" },
{ url = "https://files.pythonhosted.org/packages/7f/7f/d365de998069720a3abfc250ddd876fc4b81a403a766c74ff9bde15b5378/ruff-0.14.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a307fc45ebd887b3f26b36d9326bb70bf69b01561950cdcc6c0bdf7bb8e0f7cc", size = 14088182, upload-time = "2025-10-23T19:36:36.983Z" },
{ url = "https://files.pythonhosted.org/packages/6c/ea/d8e3e6b209162000a7be1faa41b0a0c16a133010311edc3329753cc6596a/ruff-0.14.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61ae91a32c853172f832c2f40bd05fd69f491db7289fb85a9b941ebdd549781a", size = 13599516, upload-time = "2025-10-23T19:36:39.208Z" },
{ url = "https://files.pythonhosted.org/packages/fa/ea/c7810322086db68989fb20a8d5221dd3b79e49e396b01badca07b433ab45/ruff-0.14.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1967e40286f63ee23c615e8e7e98098dedc7301568bd88991f6e544d8ae096", size = 13272690, upload-time = "2025-10-23T19:36:41.453Z" },
{ url = "https://files.pythonhosted.org/packages/a9/39/10b05acf8c45786ef501d454e00937e1b97964f846bf28883d1f9619928a/ruff-0.14.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:2877f02119cdebf52a632d743a2e302dea422bfae152ebe2f193d3285a3a65df", size = 13496497, upload-time = "2025-10-23T19:36:43.61Z" },
{ url = "https://files.pythonhosted.org/packages/59/a1/1f25f8301e13751c30895092485fada29076e5e14264bdacc37202e85d24/ruff-0.14.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e681c5bc777de5af898decdcb6ba3321d0d466f4cb43c3e7cc2c3b4e7b843a05", size = 12266116, upload-time = "2025-10-23T19:36:45.625Z" },
{ url = "https://files.pythonhosted.org/packages/5c/fa/0029bfc9ce16ae78164e6923ef392e5f173b793b26cc39aa1d8b366cf9dc/ruff-0.14.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e21be42d72e224736f0c992cdb9959a2fa53c7e943b97ef5d081e13170e3ffc5", size = 12281345, upload-time = "2025-10-23T19:36:47.618Z" },
{ url = "https://files.pythonhosted.org/packages/a5/ab/ece7baa3c0f29b7683be868c024f0838770c16607bea6852e46b202f1ff6/ruff-0.14.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b8264016f6f209fac16262882dbebf3f8be1629777cf0f37e7aff071b3e9b92e", size = 12629296, upload-time = "2025-10-23T19:36:49.789Z" },
{ url = "https://files.pythonhosted.org/packages/a4/7f/638f54b43f3d4e48c6a68062794e5b367ddac778051806b9e235dfb7aa81/ruff-0.14.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5ca36b4cb4db3067a3b24444463ceea5565ea78b95fe9a07ca7cb7fd16948770", size = 13371610, upload-time = "2025-10-23T19:36:51.882Z" },
{ url = "https://files.pythonhosted.org/packages/8d/35/3654a973ebe5b32e1fd4a08ed2d46755af7267da7ac710d97420d7b8657d/ruff-0.14.2-py3-none-win32.whl", hash = "sha256:41775927d287685e08f48d8eb3f765625ab0b7042cc9377e20e64f4eb0056ee9", size = 12415318, upload-time = "2025-10-23T19:36:53.961Z" },
{ url = "https://files.pythonhosted.org/packages/71/30/3758bcf9e0b6a4193a6f51abf84254aba00887dfa8c20aba18aa366c5f57/ruff-0.14.2-py3-none-win_amd64.whl", hash = "sha256:0df3424aa5c3c08b34ed8ce099df1021e3adaca6e90229273496b839e5a7e1af", size = 13565279, upload-time = "2025-10-23T19:36:56.578Z" },
{ url = "https://files.pythonhosted.org/packages/2e/5d/aa883766f8ef9ffbe6aa24f7192fb71632f31a30e77eb39aa2b0dc4290ac/ruff-0.14.2-py3-none-win_arm64.whl", hash = "sha256:ea9d635e83ba21569fbacda7e78afbfeb94911c9434aff06192d9bc23fd5495a", size = 12554956, upload-time = "2025-10-23T19:36:58.714Z" },
{ url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" },
{ url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" },
{ url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" },
{ url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" },
{ url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" },
{ url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" },
{ url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" },
{ url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" },
{ url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" },
{ url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" },
{ url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" },
{ url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" },
{ url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" },
{ url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" },
{ url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" },
{ url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" },
{ url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" },
{ url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" },
]
[[package]]
name = "sentry-sdk"
version = "2.42.1"
version = "2.48.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/31/04/ec8c1dd9250847303d98516e917978cb1c7083024770d86d657d2ccb5a70/sentry_sdk-2.42.1.tar.gz", hash = "sha256:8598cc6edcfe74cb8074ba6a7c15338cdee93d63d3eb9b9943b4b568354ad5b6", size = 354839, upload-time = "2025-10-20T12:38:40.45Z" }
sdist = { url = "https://files.pythonhosted.org/packages/40/f0/0e9dc590513d5e742d7799e2038df3a05167cba084c6ca4f3cdd75b55164/sentry_sdk-2.48.0.tar.gz", hash = "sha256:5213190977ff7fdff8a58b722fb807f8d5524a80488626ebeda1b5676c0c1473", size = 384828, upload-time = "2025-12-16T14:55:41.722Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0f/cb/c21b96ff379923310b4fb2c06e8d560d801e24aeb300faa72a04776868fc/sentry_sdk-2.42.1-py2.py3-none-any.whl", hash = "sha256:f8716b50c927d3beb41bc88439dc6bcd872237b596df5b14613e2ade104aee02", size = 380952, upload-time = "2025-10-20T12:38:38.88Z" },
{ url = "https://files.pythonhosted.org/packages/4d/19/8d77f9992e5cbfcaa9133c3bf63b4fbbb051248802e1e803fed5c552fbb2/sentry_sdk-2.48.0-py2.py3-none-any.whl", hash = "sha256:6b12ac256769d41825d9b7518444e57fa35b5642df4c7c5e322af4d2c8721172", size = 414555, upload-time = "2025-12-16T14:55:40.152Z" },
]
[package.optional-dependencies]
@ -1470,11 +1557,11 @@ wheels = [
[[package]]
name = "sqlparse"
version = "0.5.3"
version = "0.5.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999, upload-time = "2024-12-10T12:05:30.728Z" }
sdist = { url = "https://files.pythonhosted.org/packages/90/76/437d71068094df0726366574cf3432a4ed754217b436eb7429415cf2d480/sqlparse-0.5.5.tar.gz", hash = "sha256:e20d4a9b0b8585fdf63b10d30066c7c94c5d7a7ec47c889a2d83a3caa93ff28e", size = 120815, upload-time = "2025-12-19T07:17:45.073Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415, upload-time = "2024-12-10T12:05:27.824Z" },
{ url = "https://files.pythonhosted.org/packages/49/4b/359f28a903c13438ef59ebeee215fb25da53066db67b305c125f1c6d2a25/sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba", size = 46138, upload-time = "2025-12-19T07:17:46.573Z" },
]
[[package]]
@ -1493,14 +1580,14 @@ wheels = [
[[package]]
name = "stamina"
version = "25.1.0"
version = "25.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "tenacity" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fd/c4/d242d76ffc88aa1fd14214d3143b542857b32276db4a20f8d99669054a5e/stamina-25.1.0.tar.gz", hash = "sha256:ad674809796ae40512b3b6296cfade826efd63863ff2ca2f59f806342e91e94a", size = 561127, upload-time = "2025-03-12T09:37:08.217Z" }
sdist = { url = "https://files.pythonhosted.org/packages/58/b7/8064b246b3d684720080ee8ffbf1dde5caabe852eb9cb53655eb97992af2/stamina-25.2.0.tar.gz", hash = "sha256:fdff938789e8a0c4c496e1ee8a08ee3c7c3351239f235b53e60d4f5964d07e19", size = 565737, upload-time = "2025-12-11T09:16:59.195Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/ba/d03f7ee711391af1d5f4dd7c44f8abdd06bce247028af2441ba8f6ff329b/stamina-25.1.0-py3-none-any.whl", hash = "sha256:c08291da540e6f4243c20f7ee98f0ed0ac9101d639803c481a029b56d7e9b45d", size = 17323, upload-time = "2025-03-12T09:37:06.886Z" },
{ url = "https://files.pythonhosted.org/packages/8f/81/c525760353dff91ae2e4c42c3f3d9bf0bfeecbb6165cc393e86915f1717d/stamina-25.2.0-py3-none-any.whl", hash = "sha256:7f0de7dba735464c256a31e6372c01b8bb51fb6efd649e6773f4ce804462feea", size = 18791, upload-time = "2025-12-11T09:16:57.235Z" },
]
[[package]]
@ -1544,11 +1631,11 @@ wheels = [
[[package]]
name = "types-docutils"
version = "0.22.2.20251006"
version = "0.22.3.20251115"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/30/79/3b5419ad9af32d99c1a953f2c96faa396280fddba22201d3788ff5b41b8a/types_docutils-0.22.2.20251006.tar.gz", hash = "sha256:c36c0459106eda39e908e9147bcff9dbd88535975cde399433c428a517b9e3b2", size = 56658, upload-time = "2025-10-06T02:55:19.477Z" }
sdist = { url = "https://files.pythonhosted.org/packages/eb/d7/576ec24bf61a280f571e1f22284793adc321610b9bcfba1bf468cf7b334f/types_docutils-0.22.3.20251115.tar.gz", hash = "sha256:0f79ea6a7bd4d12d56c9f824a0090ffae0ea4204203eb0006392906850913e16", size = 56828, upload-time = "2025-11-15T02:59:57.371Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/47/c1eed8aef21d010e8d726855c1a6346f526c40ce1f76ceabf5cd6775f6a1/types_docutils-0.22.2.20251006-py3-none-any.whl", hash = "sha256:1e61afdeb4fab4ae802034deea3e853ced5c9b5e1d156179000cb68c85daf384", size = 91880, upload-time = "2025-10-06T02:55:18.119Z" },
{ url = "https://files.pythonhosted.org/packages/9c/01/61ac9eb38f1f978b47443dc6fd2e0a3b0f647c2da741ddad30771f1b2b6f/types_docutils-0.22.3.20251115-py3-none-any.whl", hash = "sha256:c6e53715b65395d00a75a3a8a74e352c669bc63959e65a207dffaa22f4a2ad6e", size = 91951, upload-time = "2025-11-15T02:59:56.413Z" },
]
[[package]]
@ -1566,11 +1653,11 @@ wheels = [
[[package]]
name = "types-greenlet"
version = "3.2.0.20250915"
version = "3.3.0.20251206"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b9/d7/a76e2cb6fdbc2ecaf37a330fe3168d8d89c5f64b939da78fc62c89febab5/types_greenlet-3.2.0.20250915.tar.gz", hash = "sha256:831a390b1d4789b173b067ac546a4024fa9c43825db9f66194916dd0c85e3925", size = 8861, upload-time = "2025-09-15T03:01:18.104Z" }
sdist = { url = "https://files.pythonhosted.org/packages/fc/d3/23f4ab29a5ce239935bb3c157defcf50df8648c16c65965fae03980d67f3/types_greenlet-3.3.0.20251206.tar.gz", hash = "sha256:3e1ab312ab7154c08edc2e8110fbf00d9920323edc1144ad459b7b0052063055", size = 8901, upload-time = "2025-12-06T03:01:38.634Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ae/dd/4fe5803ea70ceada70746d9e90045d673a49093907c7a6108650de59d257/types_greenlet-3.2.0.20250915-py3-none-any.whl", hash = "sha256:59c8866b6ef8b1c62e289c01c90c8544aef37c610bfa13f93099d992327ae1c7", size = 8809, upload-time = "2025-09-15T03:01:17.327Z" },
{ url = "https://files.pythonhosted.org/packages/7c/8f/aabde1b6e49b25a6804c12a707829e44ba0f5520563c09271f05d3196142/types_greenlet-3.3.0.20251206-py3-none-any.whl", hash = "sha256:8d11041c0b0db545619e8c8a1266aa4aaa4ebeae8ae6b4b7049917a6045a5590", size = 8809, upload-time = "2025-12-06T03:01:37.651Z" },
]
[[package]]
@ -1584,32 +1671,32 @@ wheels = [
[[package]]
name = "types-psutil"
version = "7.0.0.20251001"
version = "7.1.3.20251211"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9e/91/b020f9100b196a1f247cd12575f68dcdad94f032c1e0c42987d7632142ce/types_psutil-7.0.0.20251001.tar.gz", hash = "sha256:60d696200ddae28677e7d88cdebd6e960294e85adefbaafe0f6e5d0e7b4c1963", size = 20469, upload-time = "2025-10-01T03:04:21.292Z" }
sdist = { url = "https://files.pythonhosted.org/packages/d2/d5/85165865b060fed80b5991574c2ae0ddfd4786398dc8bceddfe0a8960b74/types_psutil-7.1.3.20251211.tar.gz", hash = "sha256:2c25f8fd3a1a4aebdffb861b97755c9a2d5d8019dd6ec1a2f2a77ec796652c89", size = 25198, upload-time = "2025-12-11T03:16:44.651Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c0/99/50f30e0b648e6f583165cb2e535b0256a02a03efa4868cb2f017ad25b3d8/types_psutil-7.0.0.20251001-py3-none-any.whl", hash = "sha256:adc31de8386d31c61bd4123112fd51e2c700c7502a001cad72a3d56ba6b463d1", size = 23164, upload-time = "2025-10-01T03:04:20.089Z" },
{ url = "https://files.pythonhosted.org/packages/29/61/658be05b56aeec195386b3f5c48cfa5bdaf8e989de3e4d802eeba457bd05/types_psutil-7.1.3.20251211-py3-none-any.whl", hash = "sha256:369872d955d7d47d77f4832b41e2300f832126e3fa97eb107d2d6a294c23c650", size = 32055, upload-time = "2025-12-11T03:16:43.864Z" },
]
[[package]]
name = "types-pygments"
version = "2.19.0.20250809"
version = "2.19.0.20251121"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-docutils" },
]
sdist = { url = "https://files.pythonhosted.org/packages/51/1b/a6317763a8f2de01c425644273e5fbe3145d648a081f3bad590b3c34e000/types_pygments-2.19.0.20250809.tar.gz", hash = "sha256:01366fd93ef73c792e6ee16498d3abf7a184f1624b50b77f9506a47ed85974c2", size = 18454, upload-time = "2025-08-09T03:17:14.322Z" }
sdist = { url = "https://files.pythonhosted.org/packages/90/3b/cd650700ce9e26b56bd1a6aa4af397bbbc1784e22a03971cb633cdb0b601/types_pygments-2.19.0.20251121.tar.gz", hash = "sha256:eef114fde2ef6265365522045eac0f8354978a566852f69e75c531f0553822b1", size = 18590, upload-time = "2025-11-21T03:03:46.623Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8d/c4/d9f0923a941159664d664a0b714242fbbd745046db2d6c8de6fe1859c572/types_pygments-2.19.0.20250809-py3-none-any.whl", hash = "sha256:8e813e5fc25f741b81cadc1e181d402ebd288e34a9812862ddffee2f2b57db7c", size = 25407, upload-time = "2025-08-09T03:17:13.223Z" },
{ url = "https://files.pythonhosted.org/packages/99/8a/9244b21f1d60dcc62e261435d76b02f1853b4771663d7ec7d287e47a9ba9/types_pygments-2.19.0.20251121-py3-none-any.whl", hash = "sha256:cb3bfde34eb75b984c98fb733ce4f795213bd3378f855c32e75b49318371bb25", size = 25674, upload-time = "2025-11-21T03:03:45.72Z" },
]
[[package]]
name = "types-setuptools"
version = "80.9.0.20250822"
version = "80.9.0.20251221"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/19/bd/1e5f949b7cb740c9f0feaac430e301b8f1c5f11a81e26324299ea671a237/types_setuptools-80.9.0.20250822.tar.gz", hash = "sha256:070ea7716968ec67a84c7f7768d9952ff24d28b65b6594797a464f1b3066f965", size = 41296, upload-time = "2025-08-22T03:02:08.771Z" }
sdist = { url = "https://files.pythonhosted.org/packages/1b/49/cefdde98e1783c09a100f18ade39335e654e8e3364650e200d39069de701/types_setuptools-80.9.0.20251221.tar.gz", hash = "sha256:05da599f5a062bbee3e83d60318576ba23111a768b7a2e46aa11644109c5d17f", size = 42240, upload-time = "2025-12-21T03:20:36.236Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b6/2d/475bf15c1cdc172e7a0d665b6e373ebfb1e9bf734d3f2f543d668b07a142/types_setuptools-80.9.0.20250822-py3-none-any.whl", hash = "sha256:53bf881cb9d7e46ed12c76ef76c0aaf28cfe6211d3fab12e0b83620b1a8642c3", size = 63179, upload-time = "2025-08-22T03:02:07.643Z" },
{ url = "https://files.pythonhosted.org/packages/58/40/999a63965aaf1f67988ddf64a9ba602fde041a4199840d256dd60c7f9fa9/types_setuptools-80.9.0.20251221-py3-none-any.whl", hash = "sha256:fecf4b9ebfc4cdd9cd38b898b653ad197507d7a62d465a168b709c56e94b02c4", size = 64205, upload-time = "2025-12-21T03:20:35.049Z" },
]
[[package]]
@ -1635,20 +1722,20 @@ wheels = [
[[package]]
name = "tzdata"
version = "2025.2"
version = "2025.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
{ url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" },
]
[[package]]
name = "urllib3"
version = "2.5.0"
version = "2.6.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
{ url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" },
]
[[package]]

View file

@ -1,2 +1 @@
"""Workflow generation module for GitHub Actions."""

View file

@ -4,4 +4,3 @@ from django.apps import AppConfig
class WorkflowGenConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "workflow_gen"

View file

@ -8,14 +8,12 @@ from typing import TYPE_CHECKING, Any
import sentry_sdk
from ninja import NinjaAPI, Schema
from openai.types.chat import (
ChatCompletionSystemMessageParam,
ChatCompletionUserMessageParam,
)
from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam
from aiservice.analytics.posthog import ph
from aiservice.env_specific import debug_log_sensitive_data, llm_clients
from aiservice.models.aimodels import EXECUTE_MODEL
from aiservice.env_specific import debug_log_sensitive_data
from aiservice.llm import EXECUTE_MODEL, LLMResponse, call_llm
from aiservice.observability.decorators import observe_llm_call
if TYPE_CHECKING:
from django.http import HttpRequest
@ -70,71 +68,82 @@ def _extract_yaml_steps(text: str) -> str | None:
return None
@observe_llm_call("workflow_generation")
async def call_workflow_gen_llm(
trace_id: str,
model,
messages: list[dict[str, str]],
temperature: int = 0,
n: int = 1,
user_id: str | None = None,
context: dict | None = None,
) -> LLMResponse:
"""Call LLM for workflow generation with automatic observability.
This function is decorated with @observe_llm_call which automatically:
- Records call start (non-blocking)
- Captures timing and token usage
- Records completion (non-blocking)
- Handles errors automatically
All observability runs in the background without blocking the LLM call.
"""
return await call_llm(
model_name=model.name, model_type=model.model_type, messages=messages, n=n, temperature=temperature
)
async def generate_workflow_steps_llm(
repo_files: dict[str, str],
directory_structure: dict[str, Any],
user_id: str,
trace_id: str = "",
codeflash_config: dict[str, Any] | None = None,
) -> str | None:
"""Generate workflow steps using LLM."""
# Format repo files for prompt
files_text = "\n".join(
[f"**{path}:**\n{content[:8000]}\n" for path, content in repo_files.items()]
)
files_text = "\n".join([f"**{path}:**\n{content[:8000]}\n" for path, content in repo_files.items()])
# Format directory structure
structure_text = json.dumps(directory_structure, indent=2)
# Format codeflash config
config_text = (
json.dumps(codeflash_config, indent=2) if codeflash_config else "Not available"
)
config_text = json.dumps(codeflash_config, indent=2) if codeflash_config else "Not available"
user_prompt = USER_PROMPT.format(
repo_files=files_text,
directory_structure=structure_text,
codeflash_config=config_text,
repo_files=files_text, directory_structure=structure_text, codeflash_config=config_text
)
system_message = ChatCompletionSystemMessageParam(
role="system", content=SYSTEM_PROMPT
)
system_message = ChatCompletionSystemMessageParam(role="system", content=SYSTEM_PROMPT)
user_message = ChatCompletionUserMessageParam(role="user", content=user_prompt)
debug_log_sensitive_data(
f"Generating workflow steps with prompt length: {len(user_prompt)}"
)
debug_log_sensitive_data(f"Generating workflow steps with prompt length: {len(user_prompt)}")
try:
llm_client = llm_clients[EXECUTE_MODEL.model_type]
if llm_client is None:
logger.error(
f"LLM client for model type '{EXECUTE_MODEL.model_type}' is not available"
)
return None
response = await llm_client.with_options(max_retries=2).chat.completions.create(
model=EXECUTE_MODEL.name,
# Call LLM with automatic observability (decorator handles everything)
response = await call_workflow_gen_llm(
trace_id=trace_id,
model=EXECUTE_MODEL,
messages=[system_message, user_message],
n=1,
temperature=0,
n=1,
user_id=user_id,
context={"num_files": len(repo_files)},
)
if not response.choices or not response.choices[0].message.content:
if not response.content:
logger.warning("LLM returned empty response for workflow generation")
return None
response_text = response.choices[0].message.content.strip()
response_text = response.content.strip()
# Extract YAML steps
steps_yaml = _extract_yaml_steps(response_text)
if steps_yaml:
logger.info(
f"Successfully generated workflow steps ({len(steps_yaml)} chars)"
)
logger.info(f"Successfully generated workflow steps ({len(steps_yaml)} chars)")
return steps_yaml
logger.warning(
f"Could not extract valid YAML steps from LLM response: {response_text[:200]}"
)
logger.warning(f"Could not extract valid YAML steps from LLM response: {response_text[:200]}")
return None
except Exception as e:
@ -147,6 +156,7 @@ class WorkflowGenInputSchema(Schema):
repo_files: dict[str, str] # path -> content
directory_structure: dict[str, Any] # 2-level nested structure
codeflash_config: dict[str, Any] | None = None
trace_id: str = "" # Optional trace ID for observability
class WorkflowGenResponseSchema(Schema):
@ -159,11 +169,7 @@ class WorkflowGenErrorResponseSchema(Schema):
@workflow_gen_api.post(
"/",
response={
200: WorkflowGenResponseSchema,
400: WorkflowGenErrorResponseSchema,
500: WorkflowGenErrorResponseSchema,
},
response={200: WorkflowGenResponseSchema, 400: WorkflowGenErrorResponseSchema, 500: WorkflowGenErrorResponseSchema},
)
async def generate_workflow_steps(
request: HttpRequest, data: WorkflowGenInputSchema
@ -182,6 +188,8 @@ async def generate_workflow_steps(
workflow_steps = await generate_workflow_steps_llm(
repo_files=data.repo_files,
directory_structure=data.directory_structure,
user_id=request.user,
trace_id=data.trace_id,
codeflash_config=data.codeflash_config,
)
@ -191,16 +199,10 @@ async def generate_workflow_steps(
error="Failed to generate workflow steps. Please try again or use the static template."
)
ph(
request.user,
"aiservice-workflow-gen-success",
properties={"steps_length": len(workflow_steps)},
)
ph(request.user, "aiservice-workflow-gen-success", properties={"steps_length": len(workflow_steps)})
return 200, WorkflowGenResponseSchema(workflow_steps=workflow_steps)
except Exception as e:
logger.error(f"Error in generate_workflow_steps endpoint: {e}")
sentry_sdk.capture_exception(e)
return 500, WorkflowGenErrorResponseSchema(
error="Internal server error while generating workflow steps"
)
return 500, WorkflowGenErrorResponseSchema(error="Internal server error while generating workflow steps")

View file

@ -1,12 +1,12 @@
{
"name": "codeflash",
"version": "0.0.18",
"version": "0.0.19",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "codeflash",
"version": "0.0.18",
"version": "0.0.19",
"workspaces": [
"packages/*"
],

View file

@ -2,7 +2,7 @@
"name": "codeflash",
"displayName": "Codeflash - Python Optimization",
"description": "Codeflash automates Python performance optimization with AI. It discovers the fastest version of your code through AI-powered optimizations, verifies correctness, and automatically delivers performance gains.",
"version": "0.0.18",
"version": "0.0.19",
"icon": "media/Codeflash_black_background.jpg",
"publisher": "codeflash",
"repository": {

View file

@ -26,17 +26,17 @@ export const STEP_DEFINITIONS: StepDefinition[] = [
{
id: "environment",
title: "Install Codeflash Python package",
shortTitle: "Environment",
shortTitle: "Install",
},
{
id: "server",
title: "Starting Codeflash server",
shortTitle: "Server",
shortTitle: "Start",
},
{
id: "init",
title: "Initializing Codeflash",
shortTitle: "Initialize",
shortTitle: "Activate",
},
];

View file

@ -356,9 +356,17 @@ export function buildResultTestReport(
testType.includes("Concolic"))
) {
// Add a heading for the test type details
reportTableMd += `<details>\n`
reportTableMd += `<summary>${testType} and Runtime</summary>\n\n`
// Extract emoji if present at the start, then format as "[emoji] Click to see [name]"
const emojiMatch = testType.match(/^(\p{Emoji_Presentation}|\p{Emoji}\uFE0F?)/u)
if (emojiMatch) {
const emoji = emojiMatch[0]
const testName = testType.slice(emoji.length).trim()
reportTableMd += `<summary>${emoji} Click to see ${testName}</summary>\n\n`
} else {
reportTableMd += `<summary>Click to see ${testType}</summary>\n\n`
}
// Include the relevant test code
if (testType.includes("Existing")) {

View file

@ -13,7 +13,7 @@
"@azure/keyvault-keys": "^4.7.2",
"@azure/keyvault-secrets": "^4.7.0",
"@codeflash-ai/code-suggester": "^5.0.3",
"@codeflash-ai/common": "^1.0.22",
"@codeflash-ai/common": "^1.0.23",
"@octokit/app": "^16.0.1",
"@octokit/auth-app": "^8.0.1",
"@octokit/core": "^7.0.2",
@ -1093,9 +1093,9 @@
"license": "ISC"
},
"node_modules/@codeflash-ai/common": {
"version": "1.0.22",
"resolved": "https://npm.pkg.github.com/download/@codeflash-ai/common/1.0.22/26f248d8c6ced1d2b0bfbc9382db9313e2f1dc7d",
"integrity": "sha512-DgKkA+T1Uu/bnK+r2x7xMnJQE2HoyL/uDROuNSRIjnzdj0mZoAA0t5CRUYESwPlu+UBNLM+5St6spy5v/cqriA==",
"version": "1.0.23",
"resolved": "https://npm.pkg.github.com/download/@codeflash-ai/common/1.0.23/8b3ab84e6c7e82be30f5685ef27784c44512e70e",
"integrity": "sha512-jVHa6hRiC3lvgj9i4UB0/JEdNTP6Dk7Gqlk8BXCJ32hoaw2fCKTwbOpPc8vJSq+r3F8lG0WvFUVS9f1TB5kpag==",
"dependencies": {
"@azure/identity": "^4.2.0",
"@azure/keyvault-secrets": "^4.8.0",

View file

@ -28,7 +28,7 @@
"@azure/keyvault-keys": "^4.7.2",
"@azure/keyvault-secrets": "^4.7.0",
"@codeflash-ai/code-suggester": "^5.0.3",
"@codeflash-ai/common": "^1.0.22",
"@codeflash-ai/common": "^1.0.23",
"@octokit/app": "^16.0.1",
"@octokit/auth-app": "^8.0.1",
"@octokit/core": "^7.0.2",

View file

@ -10,7 +10,7 @@
"dependencies": {
"@auth0/nextjs-auth0": "^3.3.0",
"@azure/msal-node": "^3.7.3",
"@codeflash-ai/common": "^1.0.22",
"@codeflash-ai/common": "^1.0.23",
"@hookform/resolvers": "^3.3.2",
"@monaco-editor/react": "^4.7.0",
"@prisma/client": "^6.7.0",
@ -708,9 +708,9 @@
}
},
"node_modules/@codeflash-ai/common": {
"version": "1.0.22",
"resolved": "https://npm.pkg.github.com/download/@codeflash-ai/common/1.0.22/26f248d8c6ced1d2b0bfbc9382db9313e2f1dc7d",
"integrity": "sha512-DgKkA+T1Uu/bnK+r2x7xMnJQE2HoyL/uDROuNSRIjnzdj0mZoAA0t5CRUYESwPlu+UBNLM+5St6spy5v/cqriA==",
"version": "1.0.23",
"resolved": "https://npm.pkg.github.com/download/@codeflash-ai/common/1.0.23/8b3ab84e6c7e82be30f5685ef27784c44512e70e",
"integrity": "sha512-jVHa6hRiC3lvgj9i4UB0/JEdNTP6Dk7Gqlk8BXCJ32hoaw2fCKTwbOpPc8vJSq+r3F8lG0WvFUVS9f1TB5kpag==",
"dependencies": {
"@azure/identity": "^4.2.0",
"@azure/keyvault-secrets": "^4.8.0",

View file

@ -20,7 +20,7 @@
"dependencies": {
"@auth0/nextjs-auth0": "^3.3.0",
"@azure/msal-node": "^3.7.3",
"@codeflash-ai/common": "^1.0.22",
"@codeflash-ai/common": "^1.0.23",
"@hookform/resolvers": "^3.3.2",
"@monaco-editor/react": "^4.7.0",
"@prisma/client": "^6.7.0",

View file

@ -10,6 +10,7 @@ import {
} from "./actions"
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"
import { useSearchParams, useRouter } from "next/navigation"
import { OptimizationUsageCard } from "@/components/dashboard/OptimizationUsageCard"
// @ts-expect-error - ToDo fix the type error
export function BillingView({ userId, subscription: initialSubscription, plans }) {
@ -137,6 +138,14 @@ export function BillingView({ userId, subscription: initialSubscription, plans }
<div className="max-w-2xl p-6 space-y-6">
<h1 className="text-2xl font-bold">Billing & Subscription</h1>
{/* Optimization Usage Card */}
<OptimizationUsageCard
optimizationsUsed={subscription.optimizations_used || 0}
optimizationsLimit={subscription.optimizations_limit || 0}
currentPeriodEnd={subscription.current_period_end}
planType={subscription.plan_type}
/>
<Card>
<CardHeader>
<CardTitle>

View file

@ -2,9 +2,9 @@
import React, { useState, useEffect, useCallback } from "react"
import { Users, UserPlus, RefreshCw, AlertCircle, Building2 } from "lucide-react"
import { getUserIdAndUsername } from "@/app/utils/auth"
import { Loading } from "@/components/ui/loading"
import { DashboardErrorBoundary } from "@/components/dashboard/DashboardErrorBoundary"
import { ConfirmDialog } from "@/components/confirm-dialog"
import { MembersSkeleton } from "@/components/members/MembersSkeleton"
import { GitHubUserSearchResult, Member } from "@/lib/types"
import {
addOrganizationMember,
@ -171,7 +171,7 @@ function OrganizationMembers() {
}
if (loading) {
return <Loading />
return <MembersSkeleton count={6} />
}
if (!currentOrg?.id) {

View file

@ -15,11 +15,11 @@ import {
} from "lucide-react"
import { getUserIdAndUsername } from "@/app/utils/auth"
import { format, subDays } from "date-fns"
import { Loading } from "@/components/ui/loading"
import { ActiveUsersLeaderboard } from "@/components/dashboard/ActiveUsersLeaderboard"
import { CompactPullRequestActivityCard } from "@/components/dashboard/CompactPullRequestActivityCard"
import { DashboardErrorBoundary } from "@/components/dashboard/DashboardErrorBoundary"
import { MetricCard } from "@/components/dashboard/MetricCard"
import { RepositoryDetailSkeleton } from "@/components/repositories/RepositoryDetailSkeleton"
import Image from "next/image"
import { useParams, useRouter, useSearchParams } from "next/navigation"
import {
@ -679,7 +679,7 @@ function RepositoryDetail() {
}, [last30DaysStart, now])
if (loading) {
return <Loading />
return <RepositoryDetailSkeleton showTabNavigation={!currentOrg} />
}
if (error) {

View file

@ -246,15 +246,19 @@ const RepositoryCard = ({ repo }: { repo: RepositoryWithUsage }) => (
</Link>
)
// Loading State Component
const RepositoriesLoading = ({ isRefreshing = false }: { isRefreshing?: boolean }) => (
<div className="flex flex-col items-center justify-center h-[70vh]">
<div className="animate-spin rounded-full h-10 w-10 sm:h-12 sm:w-12 border-t-2 border-b-2 border-primary mb-4"></div>
<p className="text-muted-foreground animate-pulse">
{isRefreshing ? "Refreshing repositories..." : "Loading repositories..."}
</p>
</div>
)
// Import skeleton loaders
import {
RepositoriesSkeleton,
RepositoriesRefreshingSkeleton,
} from "@/components/repositories/RepositoriesSkeleton"
// Loading State Component (now using skeleton loaders)
const RepositoriesLoading = ({ isRefreshing = false }: { isRefreshing?: boolean }) =>
isRefreshing ? (
<RepositoriesRefreshingSkeleton />
) : (
<RepositoriesSkeleton message="Loading repositories..." />
)
// Page Header Component
const PageHeader = ({ totalCount }: { totalCount: number }) => (

View file

@ -10,6 +10,7 @@ import {
getActiveRepoIdsLast30DaysByRepoIds,
getOptimizationsTimeSeriesDataByRepoIds,
getOptimizationEventsByRepoIds,
checkAndResetSubscriptionPeriod,
} from "@codeflash-ai/common"
import { eachDayOfInterval, startOfDay } from "date-fns"
@ -354,3 +355,35 @@ export async function statistics(payload: AccountPayload, year: number) {
)
}
}
export async function getSubscriptionData(userId: string) {
try {
const subscription = await checkAndResetSubscriptionPeriod(userId)
if (!subscription) {
return null
}
return {
optimizations_used: subscription.optimizations_used || 0,
optimizations_limit: subscription.optimizations_limit || 0,
current_period_end: subscription.current_period_end,
plan_type: subscription.plan_type || "free",
}
} catch (error) {
console.error("Failed to fetch subscription data:", error)
return null
}
}
export async function getCurrentUserSubscriptionData() {
try {
const { getUserIdAndUsername } = await import("@/app/utils/auth")
const currentUser = await getUserIdAndUsername()
if (!currentUser?.userId) {
return null
}
return await getSubscriptionData(currentUser.userId)
} catch (error) {
console.error("Failed to fetch current user subscription data:", error)
return null
}
}

View file

@ -15,11 +15,11 @@ import {
import { getAllRepositories, RepositoryWithUsage, statistics } from "./action"
import { getUserIdAndUsername } from "@/app/utils/auth"
import { format, subDays } from "date-fns"
import { Loading } from "@/components/ui/loading"
import { ActiveUsersLeaderboard } from "@/components/dashboard/ActiveUsersLeaderboard"
import { CompactPullRequestActivityCard } from "@/components/dashboard/CompactPullRequestActivityCard"
import { DashboardErrorBoundary } from "@/components/dashboard/DashboardErrorBoundary"
import { MetricCard } from "@/components/dashboard/MetricCard"
import { DashboardSkeleton } from "@/components/dashboard/DashboardSkeleton"
import { useViewMode } from "../app/ViewModeContext"
import { useOutsideClick } from "@/components/hooks/useOutsideClick"
@ -204,7 +204,7 @@ function Dashboard() {
setIsYearDropdownOpen(false)
}, [])
if (loading) return <Loading />
if (loading) return <DashboardSkeleton />
if (error) return <ErrorDisplay error={error} onRetry={fetchDashboardData} />
return (

View file

@ -0,0 +1,27 @@
import { getSession } from "@auth0/nextjs-auth0"
import { redirect } from "next/navigation"
import { ReactNode } from "react"
import { ObservabilityNav } from "@/components/observability/observability-nav"
/**
* Observability layout with authentication
* Applied to all routes under /observability
*/
export default async function ObservabilityLayout({ children }: { children: ReactNode }) {
const session = await getSession()
// Require authentication for observability pages
if (!session) {
redirect("/login")
}
// Optional: Add team/role-based access control here
// For now, any authenticated user can access observability
return (
<div className="min-h-screen bg-gray-50 dark:bg-gray-900">
<ObservabilityNav />
<div className="flex-1">{children}</div>
</div>
)
}

View file

@ -0,0 +1,317 @@
import { PrismaClient } from "@prisma/client"
import Link from "next/link"
import { Metadata } from "next"
import { getSession } from "@auth0/nextjs-auth0"
import { isTeamMember } from "@/app/utils/auth"
import { redirect, notFound } from "next/navigation"
const prisma = new PrismaClient()
interface LLMCallDetailPageProps {
params: {
id: string
}
}
export async function generateMetadata({ params }: LLMCallDetailPageProps): Promise<Metadata> {
return {
title: `LLM Call ${params.id.substring(0, 8)} - Observability`,
description: "View LLM call details for prompt engineering analysis",
}
}
export default async function LLMCallDetailPage({ params }: LLMCallDetailPageProps) {
// Authentication check
const session = await getSession()
if (!session) {
redirect("/")
}
const hasTeamAccess = await isTeamMember()
if (!hasTeamAccess) {
redirect("/")
}
// Fetch LLM call details
const llmCall = await prisma.llm_calls.findUnique({
where: { id: params.id },
})
if (!llmCall) {
notFound()
}
// Fetch related errors
const relatedErrors = await prisma.optimization_errors.findMany({
where: { llm_call_id: params.id },
orderBy: { created_at: "desc" },
})
return (
<div className="container mx-auto px-4 py-8">
{/* Header */}
<div className="mb-6">
<Link
href="/observability/llm-calls"
className="text-blue-600 dark:text-blue-400 hover:underline mb-2 inline-block"
>
Back to LLM Calls
</Link>
<h1 className="text-3xl font-bold text-gray-900 dark:text-white">LLM Call Detail</h1>
<p className="text-gray-600 dark:text-gray-400 mt-1">
ID: {llmCall.id} Trace:{" "}
<Link
href={`/observability/trace/${llmCall.trace_id}`}
className="text-blue-600 dark:text-blue-400 hover:underline"
>
{llmCall.trace_id}
</Link>
</p>
</div>
{/* Summary Cards */}
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6">
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Status</div>
<div
className={`text-xl font-bold ${
llmCall.status === "success"
? "text-green-600 dark:text-green-400"
: llmCall.status === "failed"
? "text-red-600 dark:text-red-400"
: "text-yellow-600 dark:text-yellow-400"
}`}
>
{llmCall.status}
</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Latency</div>
<div className="text-xl font-bold text-gray-900 dark:text-white">
{llmCall.latency_ms ? `${llmCall.latency_ms}ms` : "N/A"}
</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Tokens</div>
<div className="text-xl font-bold text-gray-900 dark:text-white">
{llmCall.total_tokens || "N/A"}
</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Cost</div>
<div className="text-xl font-bold text-gray-900 dark:text-white">
{llmCall.llm_cost ? `$${llmCall.llm_cost.toFixed(4)}` : "N/A"}
</div>
</div>
</div>
{/* Metadata */}
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 mb-6 border border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-semibold mb-4 text-gray-900 dark:text-white">Metadata</h2>
<div className="grid grid-cols-2 gap-4">
<div>
<span className="text-gray-600 dark:text-gray-400">Call Type:</span>{" "}
<span className="font-medium text-gray-900 dark:text-gray-100">
{llmCall.call_type}
</span>
</div>
<div>
<span className="text-gray-600 dark:text-gray-400">Model:</span>{" "}
<span className="font-medium text-gray-900 dark:text-gray-100">
{llmCall.model_name}
</span>
</div>
<div>
<span className="text-gray-600 dark:text-gray-400">Temperature:</span>{" "}
<span className="font-medium text-gray-900 dark:text-gray-100">
{llmCall.temperature || "default"}
</span>
</div>
<div>
<span className="text-gray-600 dark:text-gray-400">Candidates Requested:</span>{" "}
<span className="font-medium text-gray-900 dark:text-gray-100">
{llmCall.n_candidates || "N/A"}
</span>
</div>
<div>
<span className="text-gray-600 dark:text-gray-400">Created:</span>{" "}
<span className="font-medium text-gray-900 dark:text-gray-100">
{new Date(llmCall.created_at).toLocaleString()}
</span>
</div>
<div>
<span className="text-gray-600 dark:text-gray-400">Parsing Status:</span>{" "}
<span
className={`font-medium ${
llmCall.parsing_status === "success"
? "text-green-600 dark:text-green-400"
: "text-red-600 dark:text-red-400"
}`}
>
{llmCall.parsing_status || "N/A"}
</span>
</div>
</div>
</div>
{/* Token Breakdown */}
{llmCall.prompt_tokens && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 mb-6 border border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-semibold mb-4 text-gray-900 dark:text-white">Token Usage</h2>
<div className="grid grid-cols-3 gap-4">
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Prompt Tokens</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">
{llmCall.prompt_tokens}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Completion Tokens</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">
{llmCall.completion_tokens}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Total Tokens</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">
{llmCall.total_tokens}
</div>
</div>
</div>
</div>
)}
{/* Parsing Results */}
{llmCall.candidates_generated !== null && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 mb-6 border border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-semibold mb-4 text-gray-900 dark:text-white">
Parsing Results
</h2>
<div className="grid grid-cols-2 gap-4">
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Candidates Generated</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">
{llmCall.candidates_generated}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Candidates Valid</div>
<div className="text-2xl font-bold text-green-600 dark:text-green-400">
{llmCall.candidates_valid}
</div>
</div>
</div>
{llmCall.parsing_errors && (
<div className="mt-4">
<div className="text-sm text-gray-600 dark:text-gray-400 mb-2">Parsing Errors:</div>
<pre className="bg-red-50 dark:bg-red-900/20 p-3 rounded text-sm overflow-auto text-gray-900 dark:text-gray-100">
{JSON.stringify(llmCall.parsing_errors, null, 2)}
</pre>
</div>
)}
</div>
)}
{/* Prompts */}
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 mb-6 border border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-semibold mb-4 text-gray-900 dark:text-white">System Prompt</h2>
<pre className="bg-gray-50 dark:bg-gray-900 p-4 rounded text-sm overflow-auto whitespace-pre-wrap text-gray-900 dark:text-gray-100">
{llmCall.system_prompt}
</pre>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 mb-6 border border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-semibold mb-4 text-gray-900 dark:text-white">User Prompt</h2>
<pre className="bg-gray-50 dark:bg-gray-900 p-4 rounded text-sm overflow-auto whitespace-pre-wrap text-gray-900 dark:text-gray-100">
{llmCall.user_prompt}
</pre>
</div>
{/* Response */}
{llmCall.raw_response && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 mb-6 border border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-semibold mb-4 text-gray-900 dark:text-white">LLM Response</h2>
<pre className="bg-gray-50 dark:bg-gray-900 p-4 rounded text-sm overflow-auto whitespace-pre-wrap text-gray-900 dark:text-gray-100">
{llmCall.raw_response}
</pre>
</div>
)}
{/* Error Information */}
{llmCall.status === "failed" && llmCall.error_message && (
<div className="bg-red-50 dark:bg-red-900/20 rounded-lg shadow p-6 mb-6 border border-red-200 dark:border-red-800">
<h2 className="text-xl font-semibold mb-4 text-red-800 dark:text-red-400">
Error Information
</h2>
<div className="mb-2">
<span className="text-gray-600 dark:text-gray-400">Error Type:</span>{" "}
<span className="font-medium text-red-800 dark:text-red-400">{llmCall.error_type}</span>
</div>
<div className="mb-2">
<span className="text-gray-600 dark:text-gray-400">Error Message:</span>
</div>
<pre className="bg-white dark:bg-gray-900 p-3 rounded text-sm overflow-auto text-red-700 dark:text-red-400">
{llmCall.error_message}
</pre>
</div>
)}
{/* Related Errors */}
{relatedErrors.length > 0 && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 mb-6 border border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-semibold mb-4 text-gray-900 dark:text-white">
Related Errors
</h2>
<div className="space-y-3">
{relatedErrors.map(error => (
<div key={error.id} className="border-l-4 border-red-500 dark:border-red-600 pl-4">
<div className="flex items-center gap-2 mb-1">
<span
className={`px-2 py-1 rounded text-sm ${
error.severity === "critical"
? "bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200"
: error.severity === "error"
? "bg-orange-100 dark:bg-orange-900 text-orange-800 dark:text-orange-200"
: "bg-yellow-100 dark:bg-yellow-900 text-yellow-800 dark:text-yellow-200"
}`}
>
{error.severity}
</span>
<span className="text-sm text-gray-600 dark:text-gray-400">
{error.error_type}
</span>
</div>
<div className="text-sm mb-1 text-gray-900 dark:text-gray-100">
{error.error_message}
</div>
{error.context && (
<details className="text-sm text-gray-600 dark:text-gray-400">
<summary className="cursor-pointer">View context</summary>
<pre className="bg-gray-50 dark:bg-gray-900 p-2 rounded mt-2 overflow-auto text-gray-900 dark:text-gray-100">
{JSON.stringify(error.context, null, 2)}
</pre>
</details>
)}
</div>
))}
</div>
</div>
)}
{/* Actions */}
<div className="flex gap-4">
<Link
href={`/observability/trace/${llmCall.trace_id}`}
className="px-6 py-3 bg-blue-600 dark:bg-blue-700 text-white rounded hover:bg-blue-700 dark:hover:bg-blue-600"
>
View Full Trace
</Link>
<Link
href="/observability/llm-calls"
className="px-6 py-3 border border-gray-300 dark:border-gray-600 rounded hover:bg-gray-50 dark:hover:bg-gray-700 text-gray-900 dark:text-gray-100"
>
Back to List
</Link>
</div>
</div>
)
}

View file

@ -0,0 +1,383 @@
import { PrismaClient } from "@prisma/client"
import Link from "next/link"
import { Metadata } from "next"
import { getSession } from "@auth0/nextjs-auth0"
import { isTeamMember } from "@/app/utils/auth"
import { redirect } from "next/navigation"
export const metadata: Metadata = {
title: "LLM Calls - Observability",
description: "View all LLM API calls for prompt engineering analysis",
}
const prisma = new PrismaClient()
interface SearchParams {
call_type?: string
model?: string
status?: string
trace_id?: string
page?: string
}
export default async function LLMCallsPage({ searchParams }: { searchParams: SearchParams }) {
// Authentication check - only allow team members
const session = await getSession()
if (!session) {
redirect("/")
}
const hasTeamAccess = await isTeamMember()
if (!hasTeamAccess) {
redirect("/")
}
const page = parseInt(searchParams.page || "1")
const pageSize = 50
const skip = (page - 1) * pageSize
// Build where clause based on filters
const where: any = {}
if (searchParams.call_type) {
where.call_type = searchParams.call_type
}
if (searchParams.model) {
where.model_name = { contains: searchParams.model }
}
if (searchParams.status) {
where.status = searchParams.status
}
if (searchParams.trace_id) {
// Use startsWith for prefix matching to find multi-model related calls
// e.g., searching for "f6f046aa-47e1-4f1b-9611-064787479" finds all calls in the batch
where.trace_id = { startsWith: searchParams.trace_id }
}
// Fetch LLM calls with pagination
const [llmCalls, totalCount] = await Promise.all([
prisma.llm_calls.findMany({
where,
orderBy: { created_at: "desc" },
take: pageSize,
skip,
select: {
id: true,
trace_id: true,
call_type: true,
model_name: true,
status: true,
parsing_status: true,
candidates_generated: true,
candidates_valid: true,
prompt_tokens: true,
completion_tokens: true,
llm_cost: true,
latency_ms: true,
created_at: true,
error_message: true,
context: true,
},
}),
prisma.llm_calls.count({ where }),
])
const totalPages = Math.ceil(totalCount / pageSize)
// Get unique call types and models for filters
const [callTypes, models] = await Promise.all([
prisma.llm_calls.findMany({
select: { call_type: true },
distinct: ["call_type"],
}),
prisma.llm_calls.findMany({
select: { model_name: true },
distinct: ["model_name"],
}),
])
return (
<div className="container mx-auto px-4 py-8">
<div className="mb-6">
{/* Title and Search Bar on Same Line */}
<div className="flex items-center justify-between gap-4 mb-2">
<h1 className="text-3xl font-bold text-gray-900 dark:text-white whitespace-nowrap">
LLM Calls
</h1>
{/* Compact Search Bar */}
<form method="get" className="flex items-center gap-2 flex-1 max-w-xl">
<input
type="text"
name="trace_id"
placeholder="Search by Trace ID..."
defaultValue={searchParams.trace_id || ""}
className="flex-1 px-3 py-2 text-sm border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-700 text-gray-900 dark:text-gray-100 placeholder-gray-500 dark:placeholder-gray-400"
/>
<button
type="submit"
className="px-4 py-2 text-sm bg-blue-600 dark:bg-blue-700 text-white rounded hover:bg-blue-700 dark:hover:bg-blue-600 whitespace-nowrap"
>
Search
</button>
{searchParams.trace_id && (
<Link
href="/observability/llm-calls"
className="px-4 py-2 text-sm border border-gray-300 dark:border-gray-600 rounded hover:bg-gray-50 dark:hover:bg-gray-700 text-gray-900 dark:text-gray-100 whitespace-nowrap"
>
Clear
</Link>
)}
</form>
</div>
<p className="text-gray-600 dark:text-gray-400">
Track and analyze all LLM API calls for prompt engineering
</p>
</div>
{/* Filters */}
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 mb-6 border border-gray-200 dark:border-gray-700">
<form method="get" className="flex flex-wrap gap-4">
<div>
<label className="block text-sm font-medium mb-1 text-gray-700 dark:text-gray-300">
Call Type
</label>
<select
name="call_type"
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-700 text-gray-900 dark:text-gray-100"
defaultValue={searchParams.call_type || ""}
>
<option value="">All</option>
{callTypes.map(ct => (
<option key={ct.call_type} value={ct.call_type}>
{ct.call_type}
</option>
))}
</select>
</div>
<div>
<label className="block text-sm font-medium mb-1 text-gray-700 dark:text-gray-300">
Model
</label>
<select
name="model"
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-700 text-gray-900 dark:text-gray-100"
defaultValue={searchParams.model || ""}
>
<option value="">All</option>
{models.map(m => (
<option key={m.model_name} value={m.model_name}>
{m.model_name}
</option>
))}
</select>
</div>
<div>
<label className="block text-sm font-medium mb-1 text-gray-700 dark:text-gray-300">
Status
</label>
<select
name="status"
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-700 text-gray-900 dark:text-gray-100"
defaultValue={searchParams.status || ""}
>
<option value="">All</option>
<option value="success">Success</option>
<option value="failed">Failed</option>
<option value="in_progress">In Progress</option>
</select>
</div>
<div className="flex items-end">
<button
type="submit"
className="px-4 py-2 bg-blue-600 dark:bg-blue-700 text-white rounded hover:bg-blue-700 dark:hover:bg-blue-600"
>
Filter
</button>
</div>
</form>
</div>
{/* Stats Summary */}
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6">
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Total Calls</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">{totalCount}</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Success Rate</div>
<div className="text-2xl font-bold text-green-600 dark:text-green-400">
{totalCount > 0
? Math.round(
(llmCalls.filter(c => c.status === "success").length / llmCalls.length) * 100,
)
: 0}
%
</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Total Cost</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">
${llmCalls.reduce((sum, c) => sum + (c.llm_cost || 0), 0).toFixed(3)}
</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Avg Latency</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">
{llmCalls.length > 0
? Math.round(
llmCalls.reduce((sum, c) => sum + (c.latency_ms || 0), 0) / llmCalls.length,
)
: 0}
ms
</div>
</div>
</div>
{/* LLM Calls Table */}
<div className="bg-white dark:bg-gray-800 rounded-lg shadow overflow-hidden border border-gray-200 dark:border-gray-700">
<table className="min-w-full divide-y divide-gray-200 dark:divide-gray-700">
<thead className="bg-gray-50 dark:bg-gray-900">
<tr>
<th className="px-3 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
#
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Timestamp
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Trace ID
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Type
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Model
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Status
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Tokens
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Cost
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Latency
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-700 dark:text-gray-300 uppercase tracking-wider">
Candidates
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-gray-800 divide-y divide-gray-200 dark:divide-gray-700">
{llmCalls.map(call => {
const ctx = call.context as any
const callSequence = ctx?.call_sequence
return (
<tr key={call.id} className="hover:bg-gray-50 dark:hover:bg-gray-700">
<td className="px-3 py-4 whitespace-nowrap text-sm text-gray-500 dark:text-gray-400 font-mono">
{callSequence ? `#${callSequence}` : "-"}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm">
<Link
href={`/observability/llm-call/${call.id}`}
className="text-blue-600 dark:text-blue-400 hover:underline"
>
{new Date(call.created_at).toLocaleString()}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm">
<Link
href={`/observability/trace/${call.trace_id}`}
className="text-purple-600 dark:text-purple-400 hover:underline font-mono text-xs"
>
{call.trace_id.substring(0, 8)}...
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm">
<span className="px-2 py-1 bg-blue-100 dark:bg-blue-900 text-blue-800 dark:text-blue-200 rounded">
{call.call_type}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-900 dark:text-gray-100">
{call.model_name}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm">
<span
className={`px-2 py-1 rounded ${
call.status === "success"
? "bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200"
: call.status === "failed"
? "bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200"
: "bg-yellow-100 dark:bg-yellow-900 text-yellow-800 dark:text-yellow-200"
}`}
>
{call.status}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-900 dark:text-gray-100">
{call.prompt_tokens && call.completion_tokens
? `${call.prompt_tokens + call.completion_tokens}`
: "-"}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-900 dark:text-gray-100">
{call.llm_cost ? `$${call.llm_cost.toFixed(4)}` : "-"}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-900 dark:text-gray-100">
{call.latency_ms ? `${call.latency_ms}ms` : "-"}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-900 dark:text-gray-100">
{call.candidates_valid}/{call.candidates_generated || 0}
</td>
</tr>
)
})}
</tbody>
</table>
{llmCalls.length === 0 && (
<div className="text-center py-12 text-gray-500 dark:text-gray-400">
No LLM calls found.{" "}
{!searchParams.trace_id && (
<>
Run the test script to generate sample data:
<br />
<code className="bg-gray-100 dark:bg-gray-700 text-gray-800 dark:text-gray-200 px-2 py-1 rounded mt-2 inline-block">
python django/aiservice/test_observability_local.py
</code>
</>
)}
</div>
)}
</div>
{/* Pagination */}
{totalPages > 1 && (
<div className="mt-6 flex justify-center gap-2">
{page > 1 && (
<Link
href={`?page=${page - 1}${searchParams.call_type ? `&call_type=${searchParams.call_type}` : ""}${searchParams.model ? `&model=${searchParams.model}` : ""}${searchParams.status ? `&status=${searchParams.status}` : ""}`}
className="px-4 py-2 border border-gray-300 dark:border-gray-600 rounded hover:bg-gray-50 dark:hover:bg-gray-700 text-gray-900 dark:text-gray-100"
>
Previous
</Link>
)}
<span className="px-4 py-2 text-gray-900 dark:text-gray-100">
Page {page} of {totalPages}
</span>
{page < totalPages && (
<Link
href={`?page=${page + 1}${searchParams.call_type ? `&call_type=${searchParams.call_type}` : ""}${searchParams.model ? `&model=${searchParams.model}` : ""}${searchParams.status ? `&status=${searchParams.status}` : ""}`}
className="px-4 py-2 border border-gray-300 dark:border-gray-600 rounded hover:bg-gray-50 dark:hover:bg-gray-700 text-gray-900 dark:text-gray-100"
>
Next
</Link>
)}
</div>
)}
</div>
)
}

View file

@ -0,0 +1,368 @@
import { PrismaClient } from "@prisma/client"
import Link from "next/link"
import { notFound, redirect } from "next/navigation"
import { getSession } from "@auth0/nextjs-auth0"
import { isTeamMember } from "@/app/utils/auth"
interface TracePageProps {
params: {
trace_id: string
}
}
const prisma = new PrismaClient()
export default async function TracePage({ params }: TracePageProps) {
const { trace_id } = params
// Authentication check - only allow team members
const session = await getSession()
if (!session) {
redirect("/")
}
const hasTeamAccess = await isTeamMember()
if (!hasTeamAccess) {
redirect("/")
}
// Fetch all data for this trace
// Use prefix matching (first 33 chars) to group multi-model calls that share the same base trace_id
// e.g., f6f046aa-47e1-4f1b-9611-064787479000 and f6f046aa-47e1-4f1b-9611-064787479001
// both have prefix f6f046aa-47e1-4f1b-9611-064787479
const tracePrefix = trace_id.substring(0, 33)
const [rawLlmCalls, errors] = await Promise.all([
prisma.llm_calls.findMany({
where: { trace_id: { startsWith: tracePrefix } },
orderBy: { created_at: "asc" },
}),
prisma.optimization_errors.findMany({
where: { trace_id: { startsWith: tracePrefix } },
orderBy: { created_at: "asc" },
}),
])
// Sort by call_sequence from context if available, otherwise by created_at
const llmCalls = rawLlmCalls.sort((a, b) => {
const seqA = (a.context as any)?.call_sequence ?? Infinity
const seqB = (b.context as any)?.call_sequence ?? Infinity
if (seqA !== seqB) return seqA - seqB
return new Date(a.created_at).getTime() - new Date(b.created_at).getTime()
})
// If no data found, show 404
if (llmCalls.length === 0 && errors.length === 0) {
notFound()
}
// Calculate summary metrics
const totalCost = llmCalls.reduce((sum, call) => sum + (call.llm_cost || 0), 0)
const totalTokens = llmCalls.reduce((sum, call) => sum + (call.total_tokens || 0), 0)
const totalLatency = llmCalls.reduce((sum, call) => sum + (call.latency_ms || 0), 0)
const successfulCalls = llmCalls.filter(c => c.status === "success").length
const failedCalls = llmCalls.filter(c => c.status === "failed").length
// Calculate timeline data
const firstCall = llmCalls[0]
const lastCall = llmCalls[llmCalls.length - 1]
const totalDuration =
firstCall && lastCall
? new Date(lastCall.created_at).getTime() - new Date(firstCall.created_at).getTime()
: 0
// Status determination
const status = failedCalls > 0 ? "Failed" : "Completed"
const statusColor =
failedCalls > 0 ? "text-red-600 dark:text-red-400" : "text-green-600 dark:text-green-400"
return (
<div className="container mx-auto p-6">
{/* Header with Breadcrumb */}
<div className="mb-6">
<div className="flex items-center gap-2 text-sm text-gray-600 dark:text-gray-400 mb-3">
<Link
href="/observability/traces"
className="hover:text-blue-600 dark:hover:text-blue-400"
>
Traces
</Link>
<span>/</span>
<span className="text-gray-900 dark:text-white font-mono">
{trace_id.substring(0, 8)}...
</span>
</div>
<h2 className="text-2xl font-bold text-gray-900 dark:text-white">Trace Details</h2>
<p className="text-gray-600 dark:text-gray-400 font-mono text-sm mt-1">{trace_id}</p>
</div>
{/* Summary Card */}
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 border border-gray-200 dark:border-gray-700 mb-6">
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Status</div>
<div className={`text-xl font-bold ${statusColor}`}>{status}</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Duration</div>
<div className="text-xl font-bold text-gray-900 dark:text-white">
{(totalDuration / 1000).toFixed(2)}s
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Total Cost</div>
<div className="text-xl font-bold text-gray-900 dark:text-white">
${totalCost.toFixed(4)}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">LLM Calls</div>
<div className="text-xl font-bold text-gray-900 dark:text-white">{llmCalls.length}</div>
</div>
</div>
</div>
{/* Timeline */}
{llmCalls.length > 0 && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 border border-gray-200 dark:border-gray-700 mb-6">
<h2 className="text-xl font-bold text-gray-900 dark:text-white mb-4">Timeline</h2>
<div className="space-y-3">
{llmCalls.map(call => {
const offsetMs =
new Date(call.created_at).getTime() - new Date(firstCall.created_at).getTime()
const offsetSec = (offsetMs / 1000).toFixed(2)
const durationSec = ((call.latency_ms || 0) / 1000).toFixed(2)
const statusIcon = call.status === "success" ? "✓" : "✗"
const statusColor =
call.status === "success"
? "text-green-600 dark:text-green-400"
: "text-red-600 dark:text-red-400"
const ctx = call.context as any
const callSequence = ctx?.call_sequence
return (
<div key={call.id} className="flex items-center gap-4 text-sm">
<span className="font-mono text-gray-500 dark:text-gray-400 w-8">
{callSequence ? `#${callSequence}` : "-"}
</span>
<span className={`font-mono ${statusColor}`}>{statusIcon}</span>
<span className="font-mono text-gray-600 dark:text-gray-400 w-16">
{offsetSec}s
</span>
<Link
href={`/observability/llm-call/${call.id}`}
className="text-blue-600 dark:text-blue-400 hover:underline font-medium"
>
{call.call_type}
</Link>
<span className="text-gray-600 dark:text-gray-400">
({durationSec}s, ${(call.llm_cost || 0).toFixed(4)})
</span>
<span className="text-gray-500 dark:text-gray-500 text-xs">
{call.model_name}
</span>
</div>
)
})}
</div>
</div>
)}
{/* LLM Calls Table */}
<div className="bg-white dark:bg-gray-800 rounded-lg shadow border border-gray-200 dark:border-gray-700 mb-6">
<div className="p-6 border-b border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-bold text-gray-900 dark:text-white">
LLM Calls ({llmCalls.length})
</h2>
</div>
<div className="overflow-x-auto">
<table className="w-full">
<thead className="bg-gray-50 dark:bg-gray-900">
<tr>
<th className="px-3 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
#
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Type
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Model
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Status
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Tokens
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Cost
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Latency
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Time
</th>
</tr>
</thead>
<tbody className="divide-y divide-gray-200 dark:divide-gray-700">
{llmCalls.map(call => {
const ctx = call.context as any
const callSequence = ctx?.call_sequence
return (
<tr key={call.id} className="hover:bg-gray-50 dark:hover:bg-gray-700/50">
<td className="px-3 py-4 whitespace-nowrap text-sm text-gray-500 dark:text-gray-400 font-mono">
{callSequence ? `#${callSequence}` : "-"}
</td>
<td className="px-6 py-4 whitespace-nowrap">
<Link
href={`/observability/llm-call/${call.id}`}
className="text-blue-600 dark:text-blue-400 hover:underline font-medium"
>
{call.call_type}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-700 dark:text-gray-300">
{call.model_name}
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span
className={`inline-flex px-2 py-1 text-xs font-semibold rounded-full ${
call.status === "success"
? "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-300"
: call.status === "failed"
? "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-300"
: "bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-300"
}`}
>
{call.status}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-700 dark:text-gray-300">
{call.total_tokens ? call.total_tokens.toLocaleString() : "N/A"}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-700 dark:text-gray-300">
${(call.llm_cost || 0).toFixed(4)}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-700 dark:text-gray-300">
{call.latency_ms ? `${call.latency_ms}ms` : "N/A"}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-600 dark:text-gray-400">
{new Date(call.created_at).toLocaleString()}
</td>
</tr>
)
})}
</tbody>
</table>
</div>
</div>
{/* Errors */}
{errors.length > 0 && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow border border-gray-200 dark:border-gray-700 mb-6">
<div className="p-6 border-b border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-bold text-red-600 dark:text-red-400">
Errors ({errors.length})
</h2>
</div>
<div className="divide-y divide-gray-200 dark:divide-gray-700">
{errors.map(error => (
<div key={error.id} className="p-6">
<div className="flex items-start gap-4">
<span className="text-2xl"></span>
<div className="flex-1">
<div className="flex items-center gap-2 mb-2">
<span className="font-semibold text-gray-900 dark:text-white">
{error.error_type}
</span>
<span className="text-sm text-gray-500 dark:text-gray-400">
{error.error_category}
</span>
<span
className={`px-2 py-1 text-xs font-semibold rounded-full ${
error.severity === "error"
? "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-300"
: error.severity === "warning"
? "bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-300"
: "bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-300"
}`}
>
{error.severity}
</span>
</div>
<p className="text-gray-700 dark:text-gray-300 mb-2">{error.error_message}</p>
{error.error_code && (
<p className="text-sm text-gray-500 dark:text-gray-400">
Error Code:{" "}
<code className="font-mono bg-gray-100 dark:bg-gray-900 px-1 py-0.5 rounded">
{error.error_code}
</code>
</p>
)}
<p className="text-xs text-gray-500 dark:text-gray-400 mt-2">
{new Date(error.created_at).toLocaleString()}
</p>
</div>
</div>
</div>
))}
</div>
</div>
)}
{/* Summary Stats */}
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6 border border-gray-200 dark:border-gray-700">
<h2 className="text-xl font-bold text-gray-900 dark:text-white mb-4">Summary</h2>
<div className="grid grid-cols-2 md:grid-cols-3 gap-4">
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Total Duration</div>
<div className="text-lg font-semibold text-gray-900 dark:text-white">
{(totalDuration / 1000).toFixed(2)}s
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">LLM Calls</div>
<div className="text-lg font-semibold text-gray-900 dark:text-white">
{llmCalls.length}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Total Cost</div>
<div className="text-lg font-semibold text-gray-900 dark:text-white">
${totalCost.toFixed(4)}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Total Tokens</div>
<div className="text-lg font-semibold text-gray-900 dark:text-white">
{totalTokens.toLocaleString()}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Successful Calls</div>
<div className="text-lg font-semibold text-green-600 dark:text-green-400">
{successfulCalls}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Failed Calls</div>
<div className="text-lg font-semibold text-red-600 dark:text-red-400">
{failedCalls}
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Total Latency</div>
<div className="text-lg font-semibold text-gray-900 dark:text-white">
{totalLatency.toLocaleString()}ms
</div>
</div>
<div>
<div className="text-sm text-gray-600 dark:text-gray-400">Errors</div>
<div className="text-lg font-semibold text-red-600 dark:text-red-400">
{errors.length}
</div>
</div>
</div>
</div>
</div>
)
}

View file

@ -0,0 +1,305 @@
import { PrismaClient } from "@prisma/client"
import Link from "next/link"
import { getSession } from "@auth0/nextjs-auth0"
import { isTeamMember } from "@/app/utils/auth"
import { redirect } from "next/navigation"
interface SearchParams {
trace_id?: string
page?: string
}
const prisma = new PrismaClient()
export default async function TracesPage({ searchParams }: { searchParams: SearchParams }) {
// Authentication check - only allow team members
const session = await getSession()
if (!session) {
redirect("/")
}
const hasTeamAccess = await isTeamMember()
if (!hasTeamAccess) {
redirect("/")
}
// Pagination
const page = parseInt(searchParams.page || "1")
const pageSize = 50
const skip = (page - 1) * pageSize
// Build where clause for trace filtering
const where: any = {}
if (searchParams.trace_id) {
where.trace_id = { contains: searchParams.trace_id }
}
// Fetch LLM calls with pagination
const [llmCalls, totalCount] = await Promise.all([
prisma.llm_calls.findMany({
where,
orderBy: { created_at: "desc" },
take: pageSize * 10, // Fetch enough to fill ~10 pages of traces
skip: skip > 0 ? skip * 2 : 0, // Rough skip for trace grouping
}),
prisma.llm_calls.count({ where }),
])
// Group by trace_id and calculate aggregates
const traceMap = new Map<
string,
{
trace_id: string
first_seen: Date
last_seen: Date
call_count: number
total_cost: number
total_tokens: number
failed_calls: number
status: string
call_types: Set<string>
}
>()
llmCalls.forEach(call => {
if (!call.trace_id) return
const existing = traceMap.get(call.trace_id)
if (existing) {
existing.call_count++
existing.total_cost += call.llm_cost || 0
existing.total_tokens += call.total_tokens || 0
if (call.status === "failed") existing.failed_calls++
if (call.call_type) existing.call_types.add(call.call_type)
if (new Date(call.created_at) < existing.first_seen) {
existing.first_seen = new Date(call.created_at)
}
if (new Date(call.created_at) > existing.last_seen) {
existing.last_seen = new Date(call.created_at)
}
} else {
traceMap.set(call.trace_id, {
trace_id: call.trace_id,
first_seen: new Date(call.created_at),
last_seen: new Date(call.created_at),
call_count: 1,
total_cost: call.llm_cost || 0,
total_tokens: call.total_tokens || 0,
failed_calls: call.status === "failed" ? 1 : 0,
status: call.status || "unknown",
call_types: new Set(call.call_type ? [call.call_type] : []),
})
}
})
// Convert to array, sort by last_seen desc, and paginate
const allTraces = Array.from(traceMap.values()).sort(
(a, b) => b.last_seen.getTime() - a.last_seen.getTime(),
)
const traces = allTraces.slice(0, pageSize)
const totalTraces = allTraces.length
const hasMore = llmCalls.length === pageSize * 10 // More data available
return (
<div className="container mx-auto p-6">
{/* Header */}
<div className="mb-6">
{/* Title and Search Bar on Same Line */}
<div className="flex items-center justify-between gap-4 mb-2">
<h2 className="text-2xl font-bold text-gray-900 dark:text-white whitespace-nowrap">
All Traces
</h2>
{/* Compact Search Bar */}
<form method="get" className="flex items-center gap-2 flex-1 max-w-xl">
<input
type="text"
name="trace_id"
placeholder="Search by Trace ID..."
defaultValue={searchParams.trace_id || ""}
className="flex-1 px-3 py-2 text-sm border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-700 text-gray-900 dark:text-gray-100 placeholder-gray-500 dark:placeholder-gray-400"
/>
<button
type="submit"
className="px-4 py-2 text-sm bg-blue-600 dark:bg-blue-700 text-white rounded hover:bg-blue-700 dark:hover:bg-blue-600 whitespace-nowrap"
>
Search
</button>
{searchParams.trace_id && (
<Link
href="/observability/traces"
className="px-4 py-2 text-sm border border-gray-300 dark:border-gray-600 rounded hover:bg-gray-50 dark:hover:bg-gray-700 text-gray-900 dark:text-gray-100 whitespace-nowrap"
>
Clear
</Link>
)}
</form>
</div>
<p className="text-gray-600 dark:text-gray-400">
View optimization request traces with aggregated metrics
</p>
</div>
{/* Summary Stats */}
<div className="grid grid-cols-1 md:grid-cols-4 gap-4 mb-6">
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Total Traces</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">{traces.length}</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Total LLM Calls</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">{llmCalls.length}</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Total Cost</div>
<div className="text-2xl font-bold text-gray-900 dark:text-white">
${traces.reduce((sum, t) => sum + t.total_cost, 0).toFixed(4)}
</div>
</div>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-4 border border-gray-200 dark:border-gray-700">
<div className="text-sm text-gray-600 dark:text-gray-400">Failed Traces</div>
<div className="text-2xl font-bold text-red-600 dark:text-red-400">
{traces.filter(t => t.failed_calls > 0).length}
</div>
</div>
</div>
{/* Traces Table */}
<div className="bg-white dark:bg-gray-800 rounded-lg shadow border border-gray-200 dark:border-gray-700">
<div className="overflow-x-auto">
<table className="w-full">
<thead className="bg-gray-50 dark:bg-gray-900">
<tr>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Trace ID
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Status
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Calls
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Call Types
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Cost
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Tokens
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Duration
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">
Time
</th>
</tr>
</thead>
<tbody className="divide-y divide-gray-200 dark:divide-gray-700">
{traces.length === 0 ? (
<tr>
<td
colSpan={8}
className="px-6 py-12 text-center text-gray-500 dark:text-gray-400"
>
No traces found. Run an optimization to see traces here.
</td>
</tr>
) : (
traces.map(trace => {
const duration = (trace.last_seen.getTime() - trace.first_seen.getTime()) / 1000
const statusColor =
trace.failed_calls > 0
? "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-300"
: "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-300"
const statusText = trace.failed_calls > 0 ? "Failed" : "Success"
return (
<tr key={trace.trace_id} className="hover:bg-gray-50 dark:hover:bg-gray-700/50">
<td className="px-6 py-4">
<Link
href={`/observability/trace/${trace.trace_id}`}
className="text-blue-600 dark:text-blue-400 hover:underline font-mono text-sm"
>
{trace.trace_id.substring(0, 8)}...
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span
className={`inline-flex px-2 py-1 text-xs font-semibold rounded-full ${statusColor}`}
>
{statusText}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-700 dark:text-gray-300">
{trace.call_count}
{trace.failed_calls > 0 && (
<span className="text-red-600 dark:text-red-400 ml-1">
({trace.failed_calls} failed)
</span>
)}
</td>
<td className="px-6 py-4 text-sm text-gray-700 dark:text-gray-300">
<div className="flex flex-wrap gap-1">
{Array.from(trace.call_types)
.slice(0, 3)
.map(type => (
<span
key={type}
className="px-2 py-0.5 text-xs bg-gray-100 dark:bg-gray-700 rounded"
>
{type}
</span>
))}
{trace.call_types.size > 3 && (
<span className="px-2 py-0.5 text-xs text-gray-500 dark:text-gray-400">
+{trace.call_types.size - 3}
</span>
)}
</div>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-700 dark:text-gray-300">
${trace.total_cost.toFixed(4)}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-700 dark:text-gray-300">
{trace.total_tokens.toLocaleString()}
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-700 dark:text-gray-300">
{duration.toFixed(2)}s
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-gray-600 dark:text-gray-400">
{trace.last_seen.toLocaleString()}
</td>
</tr>
)
})
)}
</tbody>
</table>
</div>
</div>
{/* Pagination */}
<div className="mt-6 flex justify-center items-center gap-4">
{page > 1 && (
<Link
href={`/observability/traces?page=${page - 1}${searchParams.trace_id ? `&trace_id=${searchParams.trace_id}` : ""}`}
className="px-4 py-2 border border-gray-300 dark:border-gray-600 rounded hover:bg-gray-50 dark:hover:bg-gray-700 text-gray-900 dark:text-gray-100"
>
Previous
</Link>
)}
<span className="text-gray-600 dark:text-gray-400">
Page {page} Showing {traces.length} traces
</span>
{hasMore && traces.length === pageSize && (
<Link
href={`/observability/traces?page=${page + 1}${searchParams.trace_id ? `&trace_id=${searchParams.trace_id}` : ""}`}
className="px-4 py-2 border border-gray-300 dark:border-gray-600 rounded hover:bg-gray-50 dark:hover:bg-gray-700 text-gray-900 dark:text-gray-100"
>
Next
</Link>
)}
</div>
</div>
)
}

View file

@ -15,7 +15,10 @@ export function ConditionalLayout({ children, user }: { children: React.ReactNod
const [isAnnouncementVisible, setIsAnnouncementVisible] = useState(true)
const shouldHideLayout =
HIDDEN_PAGES.includes(pathname) || pathname.startsWith("/trace/") || !user
HIDDEN_PAGES.includes(pathname) ||
pathname.startsWith("/trace/") ||
pathname.startsWith("/observability") ||
!user
// Auto-collapse announcement after 4 seconds
useEffect(() => {

View file

@ -0,0 +1,143 @@
"use client"
import React from "react"
import { Skeleton } from "@/components/ui/skeleton"
/**
* Skeleton loader for MetricCard component
* Mimics the structure of the actual MetricCard with icon, title, value, and optional chart
*/
const MetricCardSkeleton: React.FC<{ showChart?: boolean }> = ({ showChart = true }) => (
<div className="bg-card rounded-xl border border-border p-4 h-full">
<div className="flex items-start justify-between mb-2">
<div className="flex-1">
{/* Title skeleton */}
<Skeleton className="h-4 w-32 mb-2" />
{/* Value skeleton */}
<Skeleton className="h-8 w-20" />
</div>
{/* Icon skeleton */}
<Skeleton className="w-8 h-8 sm:w-10 sm:h-10 rounded-lg sm:rounded-xl" />
</div>
{showChart ? (
/* Chart skeleton */
<div className="mt-2">
<Skeleton className="h-[60px] w-full rounded-md" />
</div>
) : (
/* Time text skeleton for cards without charts */
<div className="mt-2">
<Skeleton className="h-3 w-24" />
</div>
)}
</div>
)
/**
* Skeleton loader for Pull Request Activity Card
* Mimics the PR activity chart card structure
*/
const PullRequestActivityCardSkeleton: React.FC = () => (
<div className="bg-card rounded-xl border border-border p-4 h-full flex flex-col">
<div className="flex items-center justify-between mb-3">
<div>
<Skeleton className="h-5 w-40 mb-1" />
<Skeleton className="h-3 w-28" />
</div>
<Skeleton className="h-6 w-20 rounded-md" />
</div>
{/* Stats row skeleton */}
<div className="flex justify-between text-xs mb-3">
{[1, 2, 3].map(i => (
<div key={i} className="flex items-center gap-1">
<Skeleton className="w-5 h-5 rounded-md" />
<div>
<Skeleton className="h-3 w-12 mb-1" />
<Skeleton className="h-4 w-8" />
</div>
</div>
))}
</div>
{/* Chart skeleton */}
<div className="flex-1 min-h-0">
<Skeleton className="h-full w-full rounded-md" />
</div>
</div>
)
/**
* Skeleton loader for Active Users Leaderboard
* Mimics the leaderboard structure with user rows
*/
const ActiveUsersLeaderboardSkeleton: React.FC = () => (
<div className="bg-card rounded-xl border border-border p-4 h-full flex flex-col">
<div className="mb-3">
<Skeleton className="h-5 w-40 mb-1" />
<Skeleton className="h-3 w-28" />
</div>
{/* User rows skeleton */}
<div className="space-y-3 flex-1">
{[1, 2, 3, 4, 5].map(i => (
<div key={i} className="flex items-center gap-3 pb-3 border-b border-border last:border-0">
{/* Rank */}
<Skeleton className="h-6 w-6 rounded-full" />
{/* Avatar */}
<Skeleton className="h-9 w-9 rounded-full" />
{/* Username and count */}
<div className="flex-1">
<Skeleton className="h-4 w-24 mb-1" />
<Skeleton className="h-3 w-16" />
</div>
{/* Activity indicator */}
<Skeleton className="h-6 w-12 rounded-full" />
</div>
))}
</div>
</div>
)
/**
* Complete Dashboard Skeleton Loader
* Displays skeleton placeholders matching the full dashboard layout
* Used while dashboard data is being fetched
*/
export const DashboardSkeleton: React.FC = () => {
return (
<div className="h-screen py-6 sm:py-8 px-4 sm:px-6 max-w-[1400px] mx-auto">
{/* Header skeleton */}
<div className="mb-6 sm:mb-8">
<div className="flex items-center justify-between mb-2">
<Skeleton className="h-8 w-40" />
<Skeleton className="h-6 w-24 rounded-md" />
</div>
</div>
{/* Main metrics grid */}
<div className="grid grid-cols-1 gap-3 sm:gap-5 mb-6 sm:mb-8">
{/* Top 2 large metric cards with charts */}
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3 sm:gap-5">
<MetricCardSkeleton showChart={true} />
<MetricCardSkeleton showChart={true} />
</div>
{/* Bottom 4 smaller metric cards */}
<div className="grid grid-cols-1 sm:grid-cols-2 md:grid-cols-4 gap-3 sm:gap-5">
<MetricCardSkeleton showChart={false} />
<MetricCardSkeleton showChart={false} />
<MetricCardSkeleton showChart={false} />
<MetricCardSkeleton showChart={false} />
</div>
</div>
{/* Activity cards grid */}
<div className="grid grid-cols-1 md:grid-cols-2 gap-3 sm:gap-5 mb-6 sm:mb-8 h-96 md:h-[500px]">
<PullRequestActivityCardSkeleton />
<ActiveUsersLeaderboardSkeleton />
</div>
</div>
)
}

View file

@ -0,0 +1,131 @@
"use client"
import React, { useMemo } from "react"
import { Zap, HelpCircle } from "lucide-react"
import { Progress } from "@/components/ui/progress"
import { format } from "date-fns"
import {
formatCredits,
calculateCreditsPercentage,
getProgressBarClassName,
roundCredits,
} from "@/lib/utils"
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"
interface OptimizationUsageCardProps {
optimizationsUsed: number
optimizationsLimit: number
currentPeriodEnd?: Date | null
planType?: string
}
export const OptimizationUsageCard: React.FC<OptimizationUsageCardProps> = ({
optimizationsUsed,
optimizationsLimit,
currentPeriodEnd,
planType,
}) => {
const percentage = useMemo(() => {
return calculateCreditsPercentage(optimizationsUsed, optimizationsLimit)
}, [optimizationsUsed, optimizationsLimit])
const remaining = useMemo(() => {
const roundedUsed = roundCredits(optimizationsUsed)
const roundedLimit = roundCredits(optimizationsLimit)
return Math.max(0, roundedLimit - roundedUsed)
}, [optimizationsUsed, optimizationsLimit])
const progressBarClassName = useMemo(() => {
return getProgressBarClassName(percentage)
}, [percentage])
const formattedUsed = useMemo(() => {
return formatCredits(optimizationsUsed)
}, [optimizationsUsed])
const formattedLimit = useMemo(() => {
return formatCredits(optimizationsLimit)
}, [optimizationsLimit])
const formattedRemaining = useMemo(() => {
return new Intl.NumberFormat("en-US").format(remaining)
}, [remaining])
const periodEndText = useMemo(() => {
if (!currentPeriodEnd) return null
try {
return format(new Date(currentPeriodEnd), "MMM d, yyyy")
} catch {
return null
}
}, [currentPeriodEnd])
const isMobile = typeof window !== "undefined" && window.innerWidth < 640
return (
<div className="bg-card rounded-xl border border-border p-4 hover:shadow-md transition-all duration-300 hover:border-primary/20 group h-full">
<div className="flex items-start justify-between mb-4">
<div className="flex-1">
<div className="flex items-center gap-1.5 mb-1">
<p className="text-muted-foreground text-xs sm:text-sm font-medium">
Optimization Attempts Usage
</p>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<button
type="button"
className="inline-flex items-center justify-center text-muted-foreground hover:text-foreground transition-colors"
aria-label="Optimization attempts calculation info"
>
<HelpCircle size={14} className="shrink-0" />
</button>
</TooltipTrigger>
<TooltipContent className="max-w-xs">
<p className="text-xs">
Confirmed performance gains use 1 attempt.
<br />
Search runs with no gains use just 0.5 attempt.
<br />
<br />
Your optimization attempts reset at the start of each billing cycle.
</p>
</TooltipContent>
</Tooltip>
</TooltipProvider>
</div>
<div className="flex items-end gap-1 sm:gap-2 mt-1">
<h3 className="text-xl sm:text-3xl font-bold text-foreground">{formattedUsed}</h3>
<span className="text-muted-foreground text-sm sm:text-base mb-0.5 sm:mb-1">/</span>
<span className="text-muted-foreground text-sm sm:text-base mb-0.5 sm:mb-1">
{formattedLimit}
</span>
</div>
<p className="text-xs sm:text-sm text-muted-foreground mt-1">
{formattedRemaining} remaining
</p>
</div>
<div className="w-8 h-8 sm:w-10 sm:h-10 rounded-lg sm:rounded-xl bg-gradient-to-br from-blue-500/20 to-blue-600/20 flex items-center justify-center text-blue-500">
<Zap size={isMobile ? 16 : 20} />
</div>
</div>
<div className="mt-4 space-y-2">
<div className="flex items-center justify-between text-xs text-muted-foreground">
<span>{percentage}% used</span>
{planType && (
<span className="capitalize text-xs px-2 py-0.5 rounded-md bg-muted">
{planType} plan
</span>
)}
</div>
<div className="relative">
<Progress value={percentage} className={`h-3 ${progressBarClassName}`} />
</div>
{periodEndText && (
<p className="text-xs text-muted-foreground italic">Resets on {periodEndText}</p>
)}
</div>
</div>
)
}

View file

@ -22,12 +22,16 @@ import {
Check,
UserCircle,
Menu,
Zap,
} from "lucide-react"
import { UserProfile } from "@auth0/nextjs-auth0/client"
import { SignOut } from "../ui/SignOut"
import { useViewMode } from "@/app/app/ViewModeContext"
import { Breadcrumb } from "./bread-crumb"
import { SIDEBAR_ANNOUNCEMENT } from "@/config/announcements"
import { getCurrentUserSubscriptionData } from "@/app/dashboard/action"
import { Progress } from "@/components/ui/progress"
import { formatCredits, calculateCreditsPercentage, getProgressBarClassName } from "@/lib/utils"
interface SidebarProps {
className: string
@ -43,6 +47,10 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
const dropdownRef = useRef<HTMLDivElement>(null)
const { loading: loadingOrgs, orgs, switchToMode, currentOrg, mode } = useViewMode()
const [isMobileOpen, setIsMobileOpen] = useState(false)
const [subscription, setSubscription] = useState<{
optimizations_used: number
optimizations_limit: number
} | null>(null)
const onMobileClose = () => {
setIsMobileOpen(false)
@ -74,6 +82,32 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
return () => document.removeEventListener("mousedown", handleClickOutside)
}, [])
// Fetch subscription data for personal accounts
useEffect(() => {
const fetchSubscription = async () => {
if (mode === "personal") {
try {
const subscriptionData = await getCurrentUserSubscriptionData()
if (subscriptionData) {
setSubscription({
optimizations_used: subscriptionData.optimizations_used || 0,
optimizations_limit: subscriptionData.optimizations_limit || 0,
})
} else {
setSubscription(null)
}
} catch (error) {
console.error("Failed to fetch subscription data:", error)
setSubscription(null)
}
} else {
setSubscription(null)
}
}
fetchSubscription()
}, [mode])
const toggleTheme = () => {
const newMode = !isDarkMode
setIsDarkMode(newMode)
@ -334,6 +368,42 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
{isDarkMode ? "Light mode" : "Dark mode"}
</Button>
{/* Optimization Attempts Usage - Only for Personal Accounts */}
{mode === "personal" &&
subscription &&
(() => {
const percentage = calculateCreditsPercentage(
subscription.optimizations_used,
subscription.optimizations_limit,
)
const progressBarClassName = getProgressBarClassName(percentage)
return (
<div className="px-2 py-1.5 rounded-lg bg-muted/50 border border-border/50 space-y-1.5">
<div className="flex items-center justify-between gap-2">
<div className="flex items-center gap-2 min-w-0 flex-1">
<Zap size={14} className="text-primary shrink-0" />
<span className="text-xs text-muted-foreground truncate">
Optimization Attempts
</span>
</div>
<div className="flex items-center gap-1 shrink-0">
<span className="text-xs font-medium text-foreground">
{formatCredits(subscription.optimizations_used)}
</span>
<span className="text-xs text-muted-foreground">/</span>
<span className="text-xs text-muted-foreground">
{formatCredits(subscription.optimizations_limit)}
</span>
</div>
</div>
<div className="relative">
<Progress value={percentage} className={`h-1.5 ${progressBarClassName}`} />
</div>
</div>
)
})()}
{/* Profile with Organization Switcher */}
<div className="relative" ref={dropdownRef}>
{profileButton()}

View file

@ -0,0 +1,82 @@
"use client"
import React from "react"
import { Skeleton } from "@/components/ui/skeleton"
/**
* Skeleton loader for individual member row
* Mimics the member list item structure
*/
const MemberRowSkeleton: React.FC = () => (
<div className="flex items-center justify-between p-4 bg-card rounded-xl border border-border">
<div className="flex items-center gap-3 flex-1">
{/* Avatar skeleton */}
<Skeleton className="h-10 w-10 rounded-full" />
{/* Member info skeleton */}
<div className="flex-1">
<Skeleton className="h-4 w-32 mb-1.5" />
<Skeleton className="h-3 w-24" />
</div>
</div>
<div className="flex items-center gap-3">
{/* Role badge skeleton */}
<Skeleton className="h-6 w-16 rounded-full" />
{/* Action button skeleton */}
<Skeleton className="h-8 w-8 rounded-md" />
</div>
</div>
)
/**
* Complete Members Page Skeleton Loader
* Displays skeleton placeholders for the members page or tab
* Used while member data is being fetched
*/
export const MembersSkeleton: React.FC<{ count?: number }> = ({ count = 5 }) => {
return (
<div className="space-y-4">
{/* Header skeleton */}
<div className="flex items-center justify-between mb-6">
<div>
<Skeleton className="h-7 w-32 sm:w-40 mb-2" />
<Skeleton className="h-4 w-48 sm:w-64" />
</div>
<Skeleton className="h-9 w-28 rounded-lg" />
</div>
{/* Search and filter skeleton */}
<div className="flex flex-col sm:flex-row gap-3 mb-4">
<Skeleton className="h-10 flex-1 rounded-lg" />
<Skeleton className="h-10 w-32 rounded-lg" />
</div>
{/* Member rows skeleton */}
<div className="space-y-3">
{[...Array(count)].map((_, index) => (
<MemberRowSkeleton key={index} />
))}
</div>
</div>
)
}
/**
* Compact members skeleton for smaller sections
* Useful for loading states in tabs or smaller containers
*/
export const CompactMembersSkeleton: React.FC = () => {
return (
<div className="space-y-3">
<div className="flex items-center justify-between mb-4">
<Skeleton className="h-6 w-28" />
<Skeleton className="h-8 w-24 rounded-lg" />
</div>
{[...Array(3)].map((_, index) => (
<MemberRowSkeleton key={index} />
))}
</div>
)
}

View file

@ -0,0 +1,55 @@
"use client"
import Link from "next/link"
import { usePathname } from "next/navigation"
import { Activity, ListTree } from "lucide-react"
import { cn } from "@/lib/utils"
const navItems = [
{ href: "/observability/traces", label: "Traces", icon: ListTree },
{ href: "/observability/llm-calls", label: "LLM Calls", icon: Activity },
]
export function ObservabilityNav() {
const pathname = usePathname()
return (
<nav className="bg-white dark:bg-gray-800 border-b border-gray-200 dark:border-gray-700 sticky top-0 z-10">
<div className="container mx-auto px-6 py-3">
<div className="flex items-center justify-between">
{/* Logo/Title */}
<div className="flex items-center gap-2">
<Activity className="h-6 w-6 text-blue-600 dark:text-blue-400" />
<h1 className="text-xl font-bold text-gray-900 dark:text-white">
Codeflash Observability
</h1>
</div>
{/* Navigation Links */}
<div className="flex items-center gap-2">
{navItems.map(item => {
const Icon = item.icon
const isActive = pathname === item.href || pathname.startsWith(item.href + "/")
return (
<Link
key={item.href}
href={item.href}
className={cn(
"flex items-center gap-2 px-4 py-2 rounded-lg text-sm font-medium transition-colors",
isActive
? "bg-blue-50 dark:bg-blue-900/20 text-blue-700 dark:text-blue-300"
: "text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-700 hover:text-gray-900 dark:hover:text-white",
)}
>
<Icon className="h-4 w-4" />
{item.label}
</Link>
)
})}
</div>
</div>
</div>
</nav>
)
}

View file

@ -0,0 +1,100 @@
"use client"
import React from "react"
import { Skeleton } from "@/components/ui/skeleton"
import { Card } from "@/components/ui/card"
/**
* Skeleton loader for individual Repository Card
* Mimics the structure of the RepositoryCard component
*/
const RepositoryCardSkeleton: React.FC = () => (
<Card className="bg-card bg-muted/5 rounded-xl border border-border overflow-hidden">
<div className="p-5">
<div className="flex items-start">
{/* Avatar skeleton */}
<div className="mr-3 flex-shrink-0">
<Skeleton className="w-9 h-9 sm:w-11 sm:h-11 rounded-full" />
</div>
<div className="flex-1 min-w-0">
{/* Repository name and badge skeleton */}
<div className="flex items-center flex-wrap gap-1 mb-2">
<Skeleton className="h-5 w-40 sm:w-48" />
<Skeleton className="h-5 w-14 rounded-full" />
</div>
{/* Organization/full name skeleton */}
<Skeleton className="h-4 w-32 sm:w-40 mb-2" />
{/* Repository stats skeleton */}
<div className="flex items-center flex-wrap gap-1.5 sm:gap-2">
<Skeleton className="h-6 w-16 rounded-full" />
<Skeleton className="h-6 w-14 rounded-full" />
<Skeleton className="h-6 w-12 rounded-full" />
</div>
</div>
</div>
{/* Last optimized date skeleton */}
<div className="mt-3 sm:mt-4">
<Skeleton className="h-3 w-36" />
</div>
</div>
</Card>
)
/**
* Complete Repositories Page Skeleton Loader
* Displays skeleton placeholders for the repositories page
* Used while repository data is being fetched
*/
export const RepositoriesSkeleton: React.FC<{ message?: string }> = ({
message = "Loading repositories...",
}) => {
return (
<div className="space-y-4 sm:space-y-6">
{/* Header skeleton */}
<div className="flex items-center justify-between mb-4 sm:mb-6">
<div>
<Skeleton className="h-8 w-32 sm:w-40 mb-2" />
<Skeleton className="h-4 w-48 sm:w-64" />
</div>
<Skeleton className="h-9 w-20 sm:w-24 rounded-lg" />
</div>
{/* Optional loading message */}
{message && (
<div className="text-center mb-4">
<p className="text-sm text-muted-foreground animate-pulse">{message}</p>
</div>
)}
{/* Repository cards grid skeleton */}
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4 sm:gap-5">
{[...Array(6)].map((_, index) => (
<RepositoryCardSkeleton key={index} />
))}
</div>
</div>
)
}
/**
* Compact loading state for refreshing repositories
* Shows fewer skeleton cards with a subtle animation
*/
export const RepositoriesRefreshingSkeleton: React.FC = () => {
return (
<div className="space-y-4">
<div className="text-center py-2">
<p className="text-sm text-muted-foreground animate-pulse">Refreshing repositories...</p>
</div>
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4 sm:gap-5">
{[...Array(3)].map((_, index) => (
<RepositoryCardSkeleton key={index} />
))}
</div>
</div>
)
}

View file

@ -0,0 +1,136 @@
"use client"
import React from "react"
import { Skeleton } from "@/components/ui/skeleton"
/**
* Skeleton loader for Repository Header
* Mimics the repository header structure with avatar, name, and badges
*/
const RepositoryHeaderSkeleton: React.FC = () => (
<div className="mb-6 sm:mb-8">
<div className="flex items-start">
<div className="flex items-start gap-4 w-full">
{/* Avatar skeleton */}
<div className="flex-shrink-0">
<Skeleton className="w-12 h-12 sm:w-16 sm:h-16 rounded-full" />
</div>
{/* Repository info skeleton */}
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2 flex-wrap mb-2">
<Skeleton className="h-7 w-48 sm:w-64" />
<Skeleton className="h-6 w-16 rounded-full" />
<Skeleton className="h-6 w-16 rounded-full" />
</div>
<Skeleton className="h-4 w-36 sm:w-48 mb-2" />
<div className="flex items-center gap-2">
<Skeleton className="h-5 w-32" />
</div>
</div>
</div>
</div>
</div>
)
/**
* Skeleton loader for Tab Navigation
* Mimics the tab navigation buttons
*/
const TabNavigationSkeleton: React.FC = () => (
<div className="mb-6 sm:mb-8">
<div className="flex gap-2">
<Skeleton className="h-10 w-28 rounded-lg" />
<Skeleton className="h-10 w-28 rounded-lg" />
</div>
</div>
)
/**
* Skeleton loader for Metric Card
* Reusable component for metric card placeholders
*/
const MetricCardSkeleton: React.FC<{ showChart?: boolean }> = ({ showChart = true }) => (
<div className="bg-card rounded-xl border border-border p-4 h-full">
<div className="flex items-start justify-between mb-2">
<div className="flex-1">
<Skeleton className="h-4 w-32 mb-2" />
<Skeleton className="h-8 w-20" />
</div>
<Skeleton className="w-8 h-8 sm:w-10 sm:h-10 rounded-lg sm:rounded-xl" />
</div>
{showChart && <Skeleton className="h-[60px] w-full rounded-md mt-2" />}
</div>
)
/**
* Skeleton loader for Statistics Tab
* Mimics the complete statistics section with metrics and charts
*/
const StatisticsTabSkeleton: React.FC = () => (
<div className="grid grid-cols-1 gap-3 sm:gap-5">
{/* Top 2 large metric cards */}
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3 sm:gap-5">
<MetricCardSkeleton showChart={true} />
<MetricCardSkeleton showChart={true} />
</div>
{/* Activity cards */}
<div className="grid grid-cols-1 md:grid-cols-2 gap-3 sm:gap-5 h-96 md:h-[500px]">
<div className="bg-card rounded-xl border border-border p-4 h-full flex flex-col">
<div className="flex items-center justify-between mb-3">
<div>
<Skeleton className="h-5 w-40 mb-1" />
<Skeleton className="h-3 w-28" />
</div>
<Skeleton className="h-6 w-20 rounded-md" />
</div>
<div className="flex-1">
<Skeleton className="h-full w-full rounded-md" />
</div>
</div>
<div className="bg-card rounded-xl border border-border p-4 h-full flex flex-col">
<div className="mb-3">
<Skeleton className="h-5 w-40 mb-1" />
<Skeleton className="h-3 w-28" />
</div>
<div className="space-y-3 flex-1">
{[1, 2, 3, 4, 5].map(i => (
<div
key={i}
className="flex items-center gap-3 pb-3 border-b border-border last:border-0"
>
<Skeleton className="h-6 w-6 rounded-full" />
<Skeleton className="h-9 w-9 rounded-full" />
<div className="flex-1">
<Skeleton className="h-4 w-24 mb-1" />
<Skeleton className="h-3 w-16" />
</div>
<Skeleton className="h-6 w-12 rounded-full" />
</div>
))}
</div>
</div>
</div>
</div>
)
/**
* Complete Repository Detail Skeleton Loader
* Displays skeleton placeholders for the repository detail page
* Used while repository data is being fetched
*/
export const RepositoryDetailSkeleton: React.FC<{ showTabNavigation?: boolean }> = ({
showTabNavigation = true,
}) => {
return (
<div className="flex-1 bg-background">
<div className="h-screen py-6 sm:py-8 px-4 sm:px-6 max-w-[1400px] mx-auto">
<RepositoryHeaderSkeleton />
{showTabNavigation && <TabNavigationSkeleton />}
<StatisticsTabSkeleton />
</div>
</div>
)
}

View file

@ -297,216 +297,233 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
return (
<div className="flex flex-col h-screen bg-gradient-to-br from-slate-900 via-slate-800 to-slate-900 text-slate-200">
{/* Header Section - Left Aligned */}
<div className="px-6 py-4 border-b border-slate-700/50">
<div className="flex flex-col lg:flex-row justify-between items-start gap-6">
{/* Left Side - Title and Primary Info */}
<div className="flex-1">
<div className="flex items-center gap-3 mb-3">
<h1 className="text-3xl font-bold bg-gradient-to-r from-cyan-400 via-blue-500 to-purple-600 text-transparent bg-clip-text">
CodeFlash Optimization
</h1>
{metadata.pullNumber && (
<a
href={`https://github.com/${metadata.owner || repoFullName.split("/")[0]}/${metadata.repo || repoFullName.split("/")[1]}/pull/${metadata.pullNumber}`}
target="_blank"
rel="noopener noreferrer"
className="flex items-center gap-1 text-cyan-400 hover:text-cyan-300 transition-colors bg-slate-800/50 px-2 py-1 rounded-md"
>
<GitPullRequest className="h-4 w-4" />
<span>PR #{metadata.pullNumber}</span>
<ExternalLink className="h-3 w-3" />
</a>
)}
</div>
{/* Header Section - Mobile Optimized */}
<div className="px-3 sm:px-4 md:px-6 py-2 sm:py-3 md:py-4 border-b border-slate-700/50 overflow-y-auto max-h-[40vh] md:max-h-none">
<div className="flex flex-col gap-3 sm:gap-4 md:gap-6">
{/* Top Row - Title and PR Link */}
<div className="flex items-center justify-between gap-2">
<h1 className="text-lg sm:text-xl md:text-2xl lg:text-3xl font-bold bg-gradient-to-r from-cyan-400 via-blue-500 to-purple-600 text-transparent bg-clip-text truncate flex-1">
CodeFlash Optimization
</h1>
{metadata.pullNumber && (
<a
href={`https://github.com/${metadata.owner || repoFullName.split("/")[0]}/${metadata.repo || repoFullName.split("/")[1]}/pull/${metadata.pullNumber}`}
target="_blank"
rel="noopener noreferrer"
className="flex items-center gap-1 text-cyan-400 hover:text-cyan-300 transition-colors bg-slate-800/50 px-2 py-1 rounded-md text-xs sm:text-sm flex-shrink-0"
>
<GitPullRequest className="h-3 w-3 sm:h-4 sm:w-4" />
<span className="hidden sm:inline">PR #{metadata.pullNumber}</span>
<span className="sm:hidden">#{metadata.pullNumber}</span>
<ExternalLink className="h-2.5 w-2.5 sm:h-3 sm:w-3" />
</a>
)}
</div>
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 text-sm">
<div className="flex items-center gap-2">
<FileCode className="h-4 w-4 text-cyan-400" />
<span className="text-slate-400">Repository:</span>
<span className="text-slate-200 font-medium">{repoFullName}</span>
</div>
<div className="flex items-center gap-2">
<span className="text-slate-400">Function:</span>
<code className="text-purple-400 bg-slate-800/50 px-2 py-1 rounded font-mono">
{functionName}
</code>
</div>
{/* Info Row - Repository and Function (Compact on Mobile) */}
<div className="flex flex-col sm:flex-row sm:items-center gap-2 sm:gap-4 text-xs sm:text-sm">
<div className="flex items-center gap-2 min-w-0">
<FileCode className="h-3 w-3 sm:h-4 sm:w-4 text-cyan-400 flex-shrink-0" />
<span className="text-slate-400 flex-shrink-0">Repo:</span>
<span className="text-slate-200 font-medium truncate">{repoFullName}</span>
</div>
<div className="flex items-center gap-2 min-w-0">
<span className="text-slate-400 flex-shrink-0">Function:</span>
<code className="text-purple-400 bg-slate-800/50 px-1.5 sm:px-2 py-0.5 sm:py-1 rounded font-mono text-xs sm:text-sm truncate">
{functionName}
</code>
</div>
</div>
{/* Right Side - Performance Metrics */}
<div className="flex flex-col lg:flex-row items-start lg:items-center gap-6">
{/* Edit Button */}
<div className="flex items-center gap-2">
{!isEditing ? (
<button
onClick={handleEditClick}
className="flex items-center gap-2 px-3 py-2 bg-purple-600 hover:bg-purple-700 text-white rounded-lg transition-colors text-sm font-medium"
>
<Edit3 className="h-4 w-4" />
Edit Code
</button>
) : (
<div className="flex items-center gap-2">
<button
onClick={handleSaveCode}
disabled={saveStatus === "saving"}
className="flex items-center gap-2 px-3 py-2 bg-green-600 hover:bg-green-700 disabled:bg-green-800 text-white rounded-lg transition-colors text-sm font-medium"
>
<Save className="h-4 w-4" />
{saveStatus === "saving"
? "Saving..."
: saveStatus === "saved"
? "Saved!"
: "Save"}
</button>
<button
onClick={handleCancelEdit}
className="flex items-center gap-2 px-3 py-2 bg-red-600 hover:bg-red-700 text-white rounded-lg transition-colors text-sm font-medium"
>
<X className="h-4 w-4" />
Cancel
</button>
{/* Performance Metrics Row - Compact on Mobile */}
<div className="flex flex-wrap items-center gap-2 sm:gap-4">
{/* Performance Boost - Smaller on Mobile */}
<div className="bg-slate-800/30 rounded-lg p-2 sm:p-3 md:p-4 border border-slate-700/30 flex items-center gap-1.5 sm:gap-2">
<Zap className="h-4 w-4 sm:h-5 sm:w-5 md:h-6 md:w-6 text-yellow-400 flex-shrink-0" />
<div>
<div className="text-[10px] sm:text-xs text-slate-400 uppercase tracking-wider">
Boost
</div>
)}
</div>
<div className="bg-slate-800/30 rounded-lg p-4 border border-slate-700/30">
<div className="text-xs text-slate-400 uppercase tracking-wider mb-2">
Performance Boost
</div>
<div className="flex items-center gap-2">
<Zap className="h-6 w-6 text-yellow-400" />
<span className="text-3xl font-bold bg-gradient-to-r from-green-400 to-emerald-500 text-transparent bg-clip-text">
<span className="text-lg sm:text-xl md:text-2xl lg:text-3xl font-bold bg-gradient-to-r from-green-400 to-emerald-500 text-transparent bg-clip-text block leading-tight">
{speedup}
</span>
</div>
</div>
<div className="flex gap-4">
{prCommentFields?.loop_count && (
<div className="text-center">
<div className="text-xs text-slate-400 uppercase tracking-wider mb-1">
Benchmarking loops
</div>
<div className="text-lg font-semibold text-blue-400">
{prCommentFields.loop_count.toLocaleString()}
</div>
{/* Additional Metrics - Hidden on very small screens */}
{prCommentFields?.loop_count && (
<div className="hidden sm:block text-center">
<div className="text-[10px] sm:text-xs text-slate-400 uppercase tracking-wider mb-1">
Loops
</div>
)}
{prCommentFields?.original_runtime && prCommentFields?.best_runtime && (
<div className="text-center">
<div className="text-xs text-slate-400 uppercase tracking-wider mb-1">
Runtime
</div>
<div className="text-sm">
<span className="text-red-400 line-through">
{prCommentFields.original_runtime}
</span>
<span className="mx-2 text-slate-500"></span>
<span className="text-green-400">{prCommentFields.best_runtime}</span>
</div>
<div className="text-sm sm:text-base md:text-lg font-semibold text-blue-400">
{prCommentFields.loop_count.toLocaleString()}
</div>
)}
</div>
</div>
</div>
{/* Test Results Summary */}
{testStats.totalPassed > 0 || testStats.totalFailed > 0 ? (
<div className="mt-6 bg-slate-800/30 rounded-lg p-4 border border-slate-700/30">
<div
className="flex items-center justify-between cursor-pointer"
onClick={() => setShowTestDetails(!showTestDetails)}
>
<div className="flex items-center gap-3">
<TestTube className="h-5 w-5 text-blue-400" />
<span className="font-semibold">Test Results</span>
<div className="flex items-center gap-4">
<div className="flex items-center gap-1">
<CheckCircle2 className="h-4 w-4 text-green-400" />
<span className="text-green-400 font-medium">
{testStats.totalPassed} passed
</span>
</div>
{testStats.totalFailed > 0 && (
<div className="flex items-center gap-1">
<XCircle className="h-4 w-4 text-red-400" />
<span className="text-red-400 font-medium">
{testStats.totalFailed} failed
</span>
</div>
)}
</div>
</div>
{showTestDetails ? (
<ChevronUp className="h-4 w-4" />
) : (
<ChevronDown className="h-4 w-4" />
)}
</div>
{showTestDetails && (
<div className="mt-4 grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-3">
{testStats.categories.map((category, idx) => (
<div
key={idx}
className="bg-slate-700/50 rounded-lg p-3 flex items-center justify-between"
>
<div className="flex items-center gap-2">
<span className="text-sm text-slate-300">{category.name}</span>
</div>
<div className="flex items-center gap-2 text-sm">
{category.passed > 0 && (
<span className="text-green-400">{category.passed}</span>
)}
{category.failed > 0 && (
<span className="text-red-400">{category.failed}</span>
)}
</div>
</div>
))}
</div>
)}
{prCommentFields?.original_runtime && prCommentFields?.best_runtime && (
<div className="hidden md:block text-center">
<div className="text-[10px] sm:text-xs text-slate-400 uppercase tracking-wider mb-1">
Runtime
</div>
<div className="text-xs sm:text-sm">
<span className="text-red-400 line-through">
{prCommentFields.original_runtime}
</span>
<span className="mx-1 sm:mx-2 text-slate-500"></span>
<span className="text-green-400">{prCommentFields.best_runtime}</span>
</div>
</div>
)}
{/* Edit Button - Compact on Mobile */}
<div className="ml-auto flex items-center gap-1.5 sm:gap-2">
{!isEditing ? (
<button
onClick={handleEditClick}
className="flex items-center gap-1 sm:gap-2 px-2 sm:px-3 py-1.5 sm:py-2 bg-purple-600 hover:bg-purple-700 text-white rounded-lg transition-colors text-xs sm:text-sm font-medium"
>
<Edit3 className="h-3 w-3 sm:h-4 sm:w-4" />
<span className="hidden sm:inline">Edit Code</span>
<span className="sm:hidden">Edit</span>
</button>
) : (
<div className="flex items-center gap-1.5 sm:gap-2">
<button
onClick={handleSaveCode}
disabled={saveStatus === "saving"}
className="flex items-center gap-1 sm:gap-2 px-2 sm:px-3 py-1.5 sm:py-2 bg-green-600 hover:bg-green-700 disabled:bg-green-800 text-white rounded-lg transition-colors text-xs sm:text-sm font-medium"
>
<Save className="h-3 w-3 sm:h-4 sm:w-4" />
<span className="hidden sm:inline">
{saveStatus === "saving"
? "Saving..."
: saveStatus === "saved"
? "Saved!"
: "Save"}
</span>
<span className="sm:hidden">
{saveStatus === "saving" ? "..." : saveStatus === "saved" ? "✓" : "Save"}
</span>
</button>
<button
onClick={handleCancelEdit}
className="flex items-center gap-1 sm:gap-2 px-2 sm:px-3 py-1.5 sm:py-2 bg-red-600 hover:bg-red-700 text-white rounded-lg transition-colors text-xs sm:text-sm font-medium"
>
<X className="h-3 w-3 sm:h-4 sm:w-4" />
<span className="hidden sm:inline">Cancel</span>
</button>
</div>
)}
</div>
</div>
) : null}
{/* Test Results Summary - Collapsed by default on mobile */}
{testStats.totalPassed > 0 || testStats.totalFailed > 0 ? (
<div className="bg-slate-800/30 rounded-lg p-2 sm:p-3 md:p-4 border border-slate-700/30">
<div
className="flex items-center justify-between cursor-pointer gap-2"
onClick={() => setShowTestDetails(!showTestDetails)}
>
<div className="flex items-center gap-2 sm:gap-3 min-w-0 flex-1">
<TestTube className="h-4 w-4 sm:h-5 sm:w-5 text-blue-400 flex-shrink-0" />
<span className="font-semibold text-xs sm:text-sm md:text-base truncate">
Test Results
</span>
<div className="flex items-center gap-2 sm:gap-4 flex-shrink-0">
<div className="flex items-center gap-1">
<CheckCircle2 className="h-3 w-3 sm:h-4 sm:w-4 text-green-400" />
<span className="text-green-400 font-medium text-xs sm:text-sm">
{testStats.totalPassed}
</span>
</div>
{testStats.totalFailed > 0 && (
<div className="flex items-center gap-1">
<XCircle className="h-3 w-3 sm:h-4 sm:w-4 text-red-400" />
<span className="text-red-400 font-medium text-xs sm:text-sm">
{testStats.totalFailed}
</span>
</div>
)}
</div>
</div>
{showTestDetails ? (
<ChevronUp className="h-4 w-4 flex-shrink-0" />
) : (
<ChevronDown className="h-4 w-4 flex-shrink-0" />
)}
</div>
{showTestDetails && (
<div className="mt-3 sm:mt-4 grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-2 sm:gap-3">
{testStats.categories.map((category, idx) => (
<div
key={idx}
className="bg-slate-700/50 rounded-lg p-2 sm:p-3 flex items-center justify-between gap-2"
>
<div className="flex items-center gap-2 min-w-0 flex-1">
<span className="text-xs sm:text-sm text-slate-300 truncate">
{category.name}
</span>
</div>
<div className="flex items-center gap-2 text-xs sm:text-sm flex-shrink-0">
{category.passed > 0 && (
<span className="text-green-400">{category.passed}</span>
)}
{category.failed > 0 && (
<span className="text-red-400">{category.failed}</span>
)}
</div>
</div>
))}
</div>
)}
</div>
) : null}
</div>
</div>
{/* File Path/Tabs */}
{/* File Path/Tabs - Mobile Optimized */}
{fileKeys.length === 1 ? (
// Single file - show full path with view toggle
<div className="bg-[rgba(15,15,15,0.95)] border-b border-[rgba(255,255,255,0.05)] px-5 py-3 flex items-center justify-between">
<div className="flex items-center gap-2">
<FileText size={14} className="text-sky-400" />
<span className="text-sm text-slate-300 font-mono">{fileKeys[0]}</span>
<div className="bg-[rgba(15,15,15,0.95)] border-b border-[rgba(255,255,255,0.05)] px-3 sm:px-4 md:px-5 py-2 sm:py-2.5 md:py-3 flex items-center justify-between gap-2">
<div className="flex items-center gap-1.5 sm:gap-2 min-w-0 flex-1">
<FileText size={12} className="text-sky-400 flex-shrink-0 sm:w-3.5 sm:h-3.5" />
<span className="text-xs sm:text-sm text-slate-300 font-mono truncate">
{fileKeys[0]}
</span>
</div>
{/* View Toggle for mobile compatibility */}
<div className="flex items-center gap-2">
<div className="flex items-center gap-1.5 sm:gap-2 flex-shrink-0">
<button
onClick={() => setUseInlineView(!useInlineView)}
className={`flex items-center gap-2 px-3 py-1 rounded-md text-xs transition-colors ${
className={`flex items-center gap-1 sm:gap-1.5 px-2 sm:px-3 py-1 rounded-md text-[10px] sm:text-xs transition-colors ${
useInlineView
? "bg-blue-600/20 text-blue-400 border border-blue-600/30"
: "bg-slate-700/50 text-slate-400 hover:text-slate-300"
}`}
title={useInlineView ? "Switch to side-by-side view" : "Switch to inline view"}
>
{useInlineView ? <Smartphone size={12} /> : <Monitor size={12} />}
{useInlineView ? "Inline" : "Side-by-side"}
{useInlineView ? (
<Smartphone size={10} className="sm:w-3 sm:h-3" />
) : (
<Monitor size={10} className="sm:w-3 sm:h-3" />
)}
<span className="hidden sm:inline">{useInlineView ? "Inline" : "Side-by-side"}</span>
</button>
</div>
</div>
) : (
// Multiple files - show tabs with full path on hover
<div className="bg-[rgba(15,15,15,0.95)] border-b border-[rgba(255,255,255,0.05)]">
<div className="flex items-center justify-between">
<div className="flex overflow-x-auto whitespace-nowrap scrollbar-thin scrollbar-thumb-slate-700 scrollbar-track-slate-800">
<div className="flex items-center justify-between gap-2">
<div className="flex overflow-x-auto whitespace-nowrap scrollbar-thin scrollbar-thumb-slate-700 scrollbar-track-slate-800 flex-1 min-w-0">
{fileKeys.map(fileKey => (
<button
key={fileKey}
onClick={() => setActiveFileKey(fileKey)}
title={fileKey}
className={`px-5 py-3 text-sm transition-all duration-200 ease-in-out focus:outline-none flex items-center gap-2
className={`px-3 sm:px-4 md:px-5 py-2 sm:py-2.5 md:py-3 text-xs sm:text-sm transition-all duration-200 ease-in-out focus:outline-none flex items-center gap-1.5 sm:gap-2 flex-shrink-0
${
activeFileKey === fileKey
? "text-white bg-[rgba(255,255,255,0.05)] border-b-2 border-sky-400"
@ -514,26 +531,34 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
}`}
>
<FileText
size={14}
className={activeFileKey === fileKey ? "text-sky-400" : "text-slate-500"}
size={12}
className={`${activeFileKey === fileKey ? "text-sky-400" : "text-slate-500"} flex-shrink-0 sm:w-3.5 sm:h-3.5`}
/>
{fileKey.split("/").pop()} {/* Show only filename */}
<span className="truncate max-w-[120px] sm:max-w-none">
{fileKey.split("/").pop()}
</span>
</button>
))}
</div>
{/* View Toggle for mobile compatibility */}
<div className="flex items-center gap-2 px-5">
<div className="flex items-center gap-1.5 sm:gap-2 px-2 sm:px-3 md:px-5 flex-shrink-0">
<button
onClick={() => setUseInlineView(!useInlineView)}
className={`flex items-center gap-2 px-3 py-1 rounded-md text-xs transition-colors ${
className={`flex items-center gap-1 sm:gap-1.5 px-2 sm:px-3 py-1 rounded-md text-[10px] sm:text-xs transition-colors ${
useInlineView
? "bg-blue-600/20 text-blue-400 border border-blue-600/30"
: "bg-slate-700/50 text-slate-400 hover:text-slate-300"
}`}
title={useInlineView ? "Switch to side-by-side view" : "Switch to inline view"}
>
{useInlineView ? <Smartphone size={12} /> : <Monitor size={12} />}
{useInlineView ? "Inline" : "Side-by-side"}
{useInlineView ? (
<Smartphone size={10} className="sm:w-3 sm:h-3" />
) : (
<Monitor size={10} className="sm:w-3 sm:h-3" />
)}
<span className="hidden sm:inline">
{useInlineView ? "Inline" : "Side-by-side"}
</span>
</button>
</div>
</div>
@ -643,27 +668,27 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
</div>
)}
</div>
{/* Bottom Section with Explanation and Generated Tests */}
<div className="bg-gradient-to-t from-slate-900 to-slate-800/50 border-t border-slate-700/50">
{/* Bottom Section with Explanation and Generated Tests - Mobile Optimized */}
<div className="bg-gradient-to-t from-slate-900 to-slate-800/50 border-t border-slate-700/50 max-h-[30vh] sm:max-h-[35vh] md:max-h-none overflow-y-auto">
{/* Optimization review details */}
{review_quality && (
<div className="p-4 border-b border-slate-700/30">
<div className="p-2 sm:p-3 md:p-4 border-b border-slate-700/30">
<div
className="flex items-center justify-between cursor-pointer"
className="flex items-center justify-between cursor-pointer gap-2"
onClick={() => setShowOptimizationQuality(!showOptimizationQuality)}
>
<h3 className="text-sm font-semibold text-cyan-400 flex items-center gap-2">
<Zap className="h-4 w-4" />
🎯Optimization Quality: {review_quality}
<h3 className="text-xs sm:text-sm font-semibold text-cyan-400 flex items-center gap-1.5 sm:gap-2 min-w-0 flex-1">
<Zap className="h-3 w-3 sm:h-4 sm:w-4 flex-shrink-0" />
<span className="truncate">🎯 Quality: {review_quality}</span>
</h3>
{showOptimizationQuality ? (
<ChevronUp className="h-4 w-4 text-slate-400" />
<ChevronUp className="h-3 w-3 sm:h-4 sm:w-4 text-slate-400 flex-shrink-0" />
) : (
<ChevronDown className="h-4 w-4 text-slate-400" />
<ChevronDown className="h-3 w-3 sm:h-4 sm:w-4 text-slate-400 flex-shrink-0" />
)}
</div>
{showOptimizationQuality && (
<div className="mt-2 text-sm text-slate-300 whitespace-pre-wrap bg-slate-800/50 rounded-lg p-3 max-h-32 overflow-y-auto scrollbar-thin scrollbar-thumb-slate-600 scrollbar-track-slate-800">
<div className="mt-2 text-xs sm:text-sm text-slate-300 whitespace-pre-wrap bg-slate-800/50 rounded-lg p-2 sm:p-3 max-h-24 sm:max-h-32 overflow-y-auto scrollbar-thin scrollbar-thumb-slate-600 scrollbar-track-slate-800">
{review_explanation}
</div>
)}
@ -671,23 +696,24 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
)}
{/* Optimization Explanation */}
{explanation && (
<div className="p-4 border-b border-slate-700/30">
<div className="p-2 sm:p-3 md:p-4 border-b border-slate-700/30">
<div
className="flex items-center justify-between cursor-pointer"
className="flex items-center justify-between cursor-pointer gap-2"
onClick={() => setShowOptimizationExplanation(!showOptimizationExplanation)}
>
<h3 className="text-sm font-semibold text-cyan-400 flex items-center gap-2">
<Zap className="h-4 w-4" />
Optimization Explanation
<h3 className="text-xs sm:text-sm font-semibold text-cyan-400 flex items-center gap-1.5 sm:gap-2">
<Zap className="h-3 w-3 sm:h-4 sm:w-4 flex-shrink-0" />
<span className="hidden sm:inline">Optimization Explanation</span>
<span className="sm:hidden">Explanation</span>
</h3>
{showOptimizationExplanation ? (
<ChevronUp className="h-4 w-4 text-slate-400" />
<ChevronUp className="h-3 w-3 sm:h-4 sm:w-4 text-slate-400 flex-shrink-0" />
) : (
<ChevronDown className="h-4 w-4 text-slate-400" />
<ChevronDown className="h-3 w-3 sm:h-4 sm:w-4 text-slate-400 flex-shrink-0" />
)}
</div>
{showOptimizationExplanation && (
<div className="mt-2 text-sm text-slate-300 whitespace-pre-wrap bg-slate-800/50 rounded-lg p-3 max-h-32 overflow-y-auto scrollbar-thin scrollbar-thumb-slate-600 scrollbar-track-slate-800">
<div className="mt-2 text-xs sm:text-sm text-slate-300 whitespace-pre-wrap bg-slate-800/50 rounded-lg p-2 sm:p-3 max-h-24 sm:max-h-32 overflow-y-auto scrollbar-thin scrollbar-thumb-slate-600 scrollbar-track-slate-800">
{explanation}
</div>
)}
@ -696,21 +722,22 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
{/* Generated Tests Toggle */}
{metadata.generatedTests && (
<div className="p-4">
<div className="p-2 sm:p-3 md:p-4">
<button
onClick={() => setShowGeneratedTests(!showGeneratedTests)}
className="flex items-center gap-2 text-sm font-semibold text-purple-400 hover:text-purple-300 transition-colors"
className="flex items-center gap-1.5 sm:gap-2 text-xs sm:text-sm font-semibold text-purple-400 hover:text-purple-300 transition-colors w-full"
>
<TestTube className="h-4 w-4" />
Generated Tests
<TestTube className="h-3 w-3 sm:h-4 sm:w-4 flex-shrink-0" />
<span className="hidden sm:inline">Generated Tests</span>
<span className="sm:hidden">Tests</span>
{showGeneratedTests ? (
<ChevronUp className="h-4 w-4" />
<ChevronUp className="h-3 w-3 sm:h-4 sm:w-4 ml-auto flex-shrink-0" />
) : (
<ChevronDown className="h-4 w-4" />
<ChevronDown className="h-3 w-3 sm:h-4 sm:w-4 ml-auto flex-shrink-0" />
)}
</button>
{showGeneratedTests && (
<div className="mt-3 bg-slate-800/50 rounded-lg p-3 max-h-64 overflow-y-auto scrollbar-thin scrollbar-thumb-slate-600 scrollbar-track-slate-800">
<div className="mt-2 sm:mt-3 bg-slate-800/50 rounded-lg p-2 sm:p-3 max-h-48 sm:max-h-64 overflow-y-auto scrollbar-thin scrollbar-thumb-slate-600 scrollbar-track-slate-800">
<ReactMarkdown
className="prose prose-sm prose-invert max-w-none"
remarkPlugins={[remarkGfm]}
@ -739,13 +766,13 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
style={vscDarkPlus}
language={language}
PreTag="div"
className="!bg-slate-900/50 !text-xs rounded-md"
className="!bg-slate-900/50 !text-[10px] sm:!text-xs rounded-md"
customStyle={{
margin: 0,
padding: "0.75rem",
padding: "0.5rem",
backgroundColor: "rgb(15 23 42 / 0.5)",
fontSize: "0.75rem",
lineHeight: "1.5",
fontSize: "0.625rem",
lineHeight: "1.4",
}}
{...restProps}
>
@ -753,35 +780,45 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
</SyntaxHighlighter>
) : (
<code
className="bg-slate-700/50 px-1 py-0.5 rounded text-xs text-cyan-300 font-mono"
className="bg-slate-700/50 px-1 py-0.5 rounded text-[10px] sm:text-xs text-cyan-300 font-mono"
{...restProps}
>
{children}
</code>
)
},
p: ({ children }) => <p className="text-sm text-slate-300 mb-3">{children}</p>,
p: ({ children }) => (
<p className="text-xs sm:text-sm text-slate-300 mb-2 sm:mb-3">{children}</p>
),
h1: ({ children }) => (
<h1 className="text-lg font-bold text-white mb-2">{children}</h1>
<h1 className="text-base sm:text-lg font-bold text-white mb-1.5 sm:mb-2">
{children}
</h1>
),
h2: ({ children }) => (
<h2 className="text-base font-semibold text-white mb-2">{children}</h2>
<h2 className="text-sm sm:text-base font-semibold text-white mb-1.5 sm:mb-2">
{children}
</h2>
),
h3: ({ children }) => (
<h3 className="text-sm font-semibold text-white mb-1">{children}</h3>
<h3 className="text-xs sm:text-sm font-semibold text-white mb-1">
{children}
</h3>
),
ul: ({ children }) => (
<ul className="list-disc list-inside text-sm text-slate-300 mb-3">
<ul className="list-disc list-inside text-xs sm:text-sm text-slate-300 mb-2 sm:mb-3">
{children}
</ul>
),
ol: ({ children }) => (
<ol className="list-decimal list-inside text-sm text-slate-300 mb-3">
<ol className="list-decimal list-inside text-xs sm:text-sm text-slate-300 mb-2 sm:mb-3">
{children}
</ol>
),
li: ({ children }) => (
<li className="text-sm text-slate-300 mb-1">{children}</li>
<li className="text-xs sm:text-sm text-slate-300 mb-0.5 sm:mb-1">
{children}
</li>
),
}}
>
@ -805,15 +842,15 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
)}
</div>
{/* Secret Prompt Modal */}
{/* Secret Prompt Modal - Mobile Optimized */}
{showSecretPrompt && (
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
<div className="bg-slate-800 rounded-lg p-6 max-w-md w-full mx-4">
<div className="flex items-center gap-3 mb-4">
<Lock className="h-5 w-5 text-amber-400" />
<h3 className="text-lg font-semibold text-white">Enter Edit Secret</h3>
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50 p-4">
<div className="bg-slate-800 rounded-lg p-4 sm:p-6 max-w-md w-full mx-4">
<div className="flex items-center gap-2 sm:gap-3 mb-3 sm:mb-4">
<Lock className="h-4 w-4 sm:h-5 sm:w-5 text-amber-400 flex-shrink-0" />
<h3 className="text-base sm:text-lg font-semibold text-white">Enter Edit Secret</h3>
</div>
<p className="text-slate-300 text-sm mb-4">
<p className="text-slate-300 text-xs sm:text-sm mb-3 sm:mb-4">
Please enter the secret key to enable code editing.
</p>
<input
@ -822,22 +859,22 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
value={editSecret}
onChange={e => setEditSecret(e.target.value)}
onKeyDown={e => e.key === "Enter" && handleSecretSubmit()}
className="w-full px-3 py-2 bg-slate-700 border border-slate-600 rounded-lg text-white placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-purple-500 focus:border-transparent mb-4"
className="w-full px-3 py-2 bg-slate-700 border border-slate-600 rounded-lg text-white placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-purple-500 focus:border-transparent mb-3 sm:mb-4 text-sm sm:text-base"
autoFocus
/>
<div className="flex gap-3 justify-end">
<div className="flex gap-2 sm:gap-3 justify-end">
<button
onClick={() => {
setShowSecretPrompt(false)
setEditSecret("")
}}
className="px-4 py-2 text-slate-300 hover:text-white transition-colors"
className="px-3 sm:px-4 py-1.5 sm:py-2 text-slate-300 hover:text-white transition-colors text-sm sm:text-base"
>
Cancel
</button>
<button
onClick={handleSecretSubmit}
className="px-4 py-2 bg-purple-600 hover:bg-purple-700 text-white rounded-lg transition-colors"
className="px-3 sm:px-4 py-1.5 sm:py-2 bg-purple-600 hover:bg-purple-700 text-white rounded-lg transition-colors text-sm sm:text-base"
>
Unlock
</button>

View file

@ -0,0 +1,36 @@
import * as React from "react"
import { cn } from "@/lib/utils"
/**
* Reusable Skeleton component for loading states
* Provides a shimmer animation effect for placeholder content
*/
const Skeleton = React.forwardRef<HTMLDivElement, React.HTMLAttributes<HTMLDivElement>>(
({ className, ...props }, ref) => (
<div ref={ref} className={cn("animate-pulse rounded-md bg-muted/50", className)} {...props} />
),
)
Skeleton.displayName = "Skeleton"
/**
* Shimmer effect skeleton with gradient animation
* Provides a more dynamic loading appearance
*/
const SkeletonShimmer = React.forwardRef<HTMLDivElement, React.HTMLAttributes<HTMLDivElement>>(
({ className, ...props }, ref) => (
<div
ref={ref}
className={cn(
"relative overflow-hidden rounded-md bg-muted/50",
"before:absolute before:inset-0",
"before:-translate-x-full before:animate-shimmer",
"before:bg-gradient-to-r before:from-transparent before:via-white/10 before:to-transparent",
className,
)}
{...props}
/>
),
)
SkeletonShimmer.displayName = "SkeletonShimmer"
export { Skeleton, SkeletonShimmer }

View file

@ -0,0 +1,30 @@
"use client"
import * as React from "react"
import * as TooltipPrimitive from "@radix-ui/react-tooltip"
import { cn } from "@/lib/utils"
const TooltipProvider = TooltipPrimitive.Provider
const Tooltip = TooltipPrimitive.Root
const TooltipTrigger = TooltipPrimitive.Trigger
const TooltipContent = React.forwardRef<
React.ElementRef<typeof TooltipPrimitive.Content>,
React.ComponentPropsWithoutRef<typeof TooltipPrimitive.Content>
>(({ className, sideOffset = 4, ...props }, ref) => (
<TooltipPrimitive.Content
ref={ref}
sideOffset={sideOffset}
className={cn(
"z-50 overflow-hidden rounded-md border bg-popover px-3 py-1.5 text-sm text-popover-foreground shadow-md animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2",
className,
)}
{...props}
/>
))
TooltipContent.displayName = TooltipPrimitive.Content.displayName
export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }

View file

@ -4,3 +4,49 @@ import { twMerge } from "tailwind-merge"
export function cn(...inputs: ClassValue[]): string {
return twMerge(clsx(inputs))
}
/**
* Round optimization attempts to nearest 0.5 by dividing by 100 and rounding to half increments
* (e.g., 4000 -> 40, 450 -> 4.5, 550 -> 5.5)
*/
export function roundCredits(credits: number): number {
const value = credits / 100
return Math.round(value * 2) / 2
}
/**
* Format optimization attempts for display (rounds to 0.5 increments and formats with commas)
*/
export function formatCredits(credits: number): string {
const rounded = roundCredits(credits)
// Format with up to 1 decimal place (for 0.5 increments)
return new Intl.NumberFormat("en-US", {
minimumFractionDigits: rounded % 1 === 0 ? 0 : 1,
maximumFractionDigits: 1,
}).format(rounded)
}
/**
* Calculate usage percentage based on rounded optimization attempts
* Ensures percentage is calculated correctly after rounding to 0.5 increments
*/
export function calculateCreditsPercentage(used: number, limit: number): number {
if (limit === 0) return 0
const roundedUsed = roundCredits(used)
const roundedLimit = roundCredits(limit)
if (roundedLimit === 0) return 0
return Math.min(100, Math.round((roundedUsed / roundedLimit) * 100))
}
/**
* Get progress bar className based on percentage
*/
export function getProgressBarClassName(percentage: number): string {
if (percentage < 80) {
return "[&>div]:bg-gradient-to-r [&>div]:from-emerald-500 [&>div]:to-primary"
}
if (percentage < 95) {
return "[&>div]:bg-primary"
}
return "[&>div]:bg-gradient-to-r [&>div]:from-orange-500 [&>div]:to-red-500"
}

View file

@ -16,6 +16,14 @@ const config: Config = {
fontFamily: {
sans: ["var(--font-sans)", ...fontFamily.sans],
},
keyframes: {
shimmer: {
"100%": { transform: "translateX(100%)" },
},
},
animation: {
shimmer: "shimmer 2s infinite",
},
},
},
plugins: [],

View file

@ -1,12 +1,12 @@
{
"name": "@codeflash-ai/common",
"version": "1.0.22",
"version": "1.0.23",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@codeflash-ai/common",
"version": "1.0.22",
"version": "1.0.23",
"dependencies": {
"@azure/identity": "^4.2.0",
"@azure/keyvault-secrets": "^4.8.0",

View file

@ -1,6 +1,6 @@
{
"name": "@codeflash-ai/common",
"version": "1.0.22",
"version": "1.0.23",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
"repository": {

View file

@ -0,0 +1,91 @@
-- CreateTable
CREATE TABLE "public"."llm_calls" (
"id" UUID NOT NULL,
"trace_id" CHAR(36) NOT NULL,
"call_type" VARCHAR(50) NOT NULL,
"model_name" VARCHAR(100) NOT NULL,
"system_prompt" TEXT NOT NULL,
"user_prompt" TEXT NOT NULL,
"messages" JSONB NOT NULL,
"temperature" DOUBLE PRECISION,
"n_candidates" INTEGER,
"max_tokens" INTEGER,
"raw_response" TEXT,
"parsed_response" JSONB,
"prompt_tokens" INTEGER,
"completion_tokens" INTEGER,
"total_tokens" INTEGER,
"llm_cost" DOUBLE PRECISION,
"latency_ms" INTEGER,
"status" VARCHAR(20) NOT NULL,
"retry_count" INTEGER NOT NULL DEFAULT 0,
"error_type" VARCHAR(50),
"error_message" TEXT,
"parsing_status" VARCHAR(20),
"candidates_generated" INTEGER DEFAULT 0,
"candidates_valid" INTEGER DEFAULT 0,
"parsing_errors" JSONB,
"user_id" TEXT,
"python_version" VARCHAR(20),
"is_async" BOOLEAN NOT NULL DEFAULT false,
"context" JSONB,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "llm_calls_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "public"."optimization_errors" (
"id" TEXT NOT NULL,
"trace_id" CHAR(36) NOT NULL,
"llm_call_id" UUID,
"error_type" VARCHAR(50) NOT NULL,
"error_category" VARCHAR(50) NOT NULL,
"severity" VARCHAR(20) NOT NULL,
"error_message" TEXT NOT NULL,
"error_code" VARCHAR(50),
"stack_trace" TEXT,
"context" JSONB,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "optimization_errors_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "llm_calls_trace_id_idx" ON "public"."llm_calls"("trace_id");
-- CreateIndex
CREATE INDEX "llm_calls_call_type_idx" ON "public"."llm_calls"("call_type");
-- CreateIndex
CREATE INDEX "llm_calls_model_name_idx" ON "public"."llm_calls"("model_name");
-- CreateIndex
CREATE INDEX "llm_calls_status_idx" ON "public"."llm_calls"("status");
-- CreateIndex
CREATE INDEX "llm_calls_created_at_idx" ON "public"."llm_calls"("created_at" DESC);
-- CreateIndex
CREATE INDEX "llm_calls_call_type_status_idx" ON "public"."llm_calls"("call_type", "status");
-- CreateIndex
CREATE INDEX "llm_calls_parsing_status_idx" ON "public"."llm_calls"("parsing_status");
-- CreateIndex
CREATE INDEX "optimization_errors_trace_id_idx" ON "public"."optimization_errors"("trace_id");
-- CreateIndex
CREATE INDEX "optimization_errors_error_type_idx" ON "public"."optimization_errors"("error_type");
-- CreateIndex
CREATE INDEX "optimization_errors_error_category_idx" ON "public"."optimization_errors"("error_category");
-- CreateIndex
CREATE INDEX "optimization_errors_severity_idx" ON "public"."optimization_errors"("severity");
-- CreateIndex
CREATE INDEX "optimization_errors_llm_call_id_idx" ON "public"."optimization_errors"("llm_call_id");
-- AddForeignKey
ALTER TABLE "public"."optimization_errors" ADD CONSTRAINT "optimization_errors_llm_call_id_fkey" FOREIGN KEY ("llm_call_id") REFERENCES "public"."llm_calls"("id") ON DELETE SET NULL ON UPDATE CASCADE;

View file

@ -24,15 +24,15 @@ model cf_api_keys {
}
model users {
user_id String @id
github_username String @unique
user_id String @id
github_username String @unique
email String?
name String?
onboarding_completed Boolean @default(false)
created_at DateTime @default(now()) @db.Timestamptz(6)
referral_source String? @db.VarChar(255)
additional_comments String? @db.Text
onboarding_completed Boolean @default(false)
created_at DateTime @default(now()) @db.Timestamptz(6)
referral_source String? @db.VarChar(255)
additional_comments String? @db.Text
api_keys cf_api_keys[]
subscriptions subscriptions?
optimization_events optimization_events[]
@ -129,33 +129,33 @@ model pr_code_context_hash_cache {
}
model cf_app_installations {
id String @id @default(uuid())
installation_id Int @unique
id String @id @default(uuid())
installation_id Int @unique
account_id Int
account_login String
account_type String // 'User' or 'Organization'
created_at DateTime @default(now()) @db.Timestamptz(6)
updated_at DateTime @updatedAt
is_active Boolean @default(true)
created_at DateTime @default(now()) @db.Timestamptz(6)
updated_at DateTime @updatedAt
is_active Boolean @default(true)
repositories repositories[]
}
model repositories {
id String @id @default(uuid())
github_repo_id String @unique
id String @id @default(uuid())
github_repo_id String @unique
installation_id Int
name String
full_name String @unique
full_name String @unique
is_private Boolean
is_active Boolean @default(true)
has_github_action Boolean @default(false)
created_at DateTime @default(now()) @db.Timestamptz(6)
is_active Boolean @default(true)
has_github_action Boolean @default(false)
created_at DateTime @default(now()) @db.Timestamptz(6)
last_optimized DateTime?
organization_id String?
optimizations_limit Int? // Custom limit for this repository
optimizations_used Int @default(0) // Track usage
added_by String?
optimizations_used Int @default(0) // Track usage
added_by String?
installation_info cf_app_installations @relation(fields: [installation_id], references: [installation_id], onDelete: Cascade)
organization organizations? @relation(fields: [organization_id], references: [id], onDelete: SetNull)
@ -167,13 +167,13 @@ model repositories {
}
model repository_members {
id String @id @default(uuid())
id String @id @default(uuid())
repository_id String
user_id String
role String // 'owner', 'admin', 'member'
added_at DateTime @default(now()) @db.Timestamptz(6)
added_by String?
added_at DateTime @default(now()) @db.Timestamptz(6)
added_by String?
repository repositories @relation(fields: [repository_id], references: [id], onDelete: Cascade)
user users @relation(fields: [user_id], references: [user_id], onDelete: Cascade)
@ -188,8 +188,8 @@ model optimization_events {
user_id String? // Can be null for system events
repository_id String?
baseBranch String?
trace_id String @unique
pr_id String? @unique
trace_id String @unique
pr_id String? @unique
pr_url String?
api_key_id Int?
metadata Json?
@ -201,6 +201,7 @@ model optimization_events {
file_path String?
speedup_x Float?
speedup_pct Float?
llm_cost Float?
created_at DateTime @default(now()) @db.Timestamptz(6)
// Relations
@ -208,7 +209,6 @@ model optimization_events {
repository repositories? @relation(fields: [repository_id], references: [id], onDelete: SetNull)
api_key cf_api_keys? @relation(fields: [api_key_id], references: [id], onDelete: SetNull)
comments comments[]
llm_cost Float?
@@index([event_type, created_at])
@@index([repository_id, event_type])
@ -236,15 +236,15 @@ model comments {
}
model organizations {
id String @id @default(uuid())
name String @unique
github_org_id String? @unique
id String @id @default(uuid())
name String @unique
github_org_id String? @unique
description String?
website String?
created_at DateTime @default(now()) @db.Timestamptz(6)
updated_at DateTime @updatedAt
auto_add_github_members Boolean @default(true)
added_by String?
created_at DateTime @default(now()) @db.Timestamptz(6)
updated_at DateTime @updatedAt
auto_add_github_members Boolean @default(true)
added_by String?
repositories repositories[]
organization_members organization_members[]
@ -257,9 +257,9 @@ model organization_members {
id String @id @default(uuid())
organization_id String
user_id String
role String
role String
added_at DateTime @default(now()) @db.Timestamptz(6)
added_by String?
added_by String?
organization organizations @relation(fields: [organization_id], references: [id], onDelete: Cascade)
user users @relation(fields: [user_id], references: [user_id], onDelete: Cascade)
@ -267,4 +267,95 @@ model organization_members {
@@unique([organization_id, user_id])
@@index([user_id])
@@index([organization_id])
}
}
// ==================== Observability Tables ====================
model llm_calls {
id String @id @default(uuid()) @db.Uuid
trace_id String @db.Char(36)
call_type String @db.VarChar(50) // 'optimization', 'test_generation', 'ranking', 'refinement'
model_name String @db.VarChar(100) // 'gpt-4', 'gpt-3.5-turbo', etc.
// Prompt capture (full content for prompt engineering analysis)
system_prompt String @db.Text
user_prompt String @db.Text
messages Json // Full messages array sent to LLM
// Request configuration
temperature Float?
n_candidates Int? // Number of completions requested
max_tokens Int?
// Response capture
raw_response String? @db.Text // Full LLM response before parsing
parsed_response Json? // Structured parsed response
// Token usage and cost
prompt_tokens Int?
completion_tokens Int?
total_tokens Int?
llm_cost Float?
// Timing
latency_ms Int?
// Status and errors
status String @db.VarChar(20) // 'success', 'failed', 'partial_success'
retry_count Int @default(0)
error_type String? @db.VarChar(50)
error_message String? @db.Text
// Parsing results
parsing_status String? @db.VarChar(20) // 'success', 'failed', 'partial'
candidates_generated Int? @default(0)
candidates_valid Int? @default(0) // Passed syntax validation
parsing_errors Json? // Details of parsing failures
// Context
user_id String?
python_version String? @db.VarChar(20)
is_async Boolean @default(false)
context Json? // Additional metadata (optimization_id, speedup, etc.)
created_at DateTime @default(now()) @db.Timestamptz(6)
// Relations
optimization_errors optimization_errors[]
@@index([trace_id])
@@index([call_type])
@@index([model_name])
@@index([status])
@@index([created_at(sort: Desc)])
@@index([call_type, status])
@@index([parsing_status])
}
model optimization_errors {
id String @id @default(uuid())
trace_id String @db.Char(36)
llm_call_id String? @db.Uuid
// Error classification
error_type String @db.VarChar(50) // 'validation', 'llm_api', 'llm_parsing', 'test_failure', 'compilation'
error_category String @db.VarChar(50) // 'user_error', 'system_error', 'llm_error', 'infrastructure'
severity String @db.VarChar(20) // 'critical', 'error', 'warning', 'info'
// Error details
error_message String @db.Text
error_code String? @db.VarChar(50)
stack_trace String? @db.Text
context Json? // Additional error context including test failure details
created_at DateTime @default(now()) @db.Timestamptz(6)
// Relations
llm_call llm_calls? @relation(fields: [llm_call_id], references: [id], onDelete: SetNull)
@@index([trace_id])
@@index([error_type])
@@index([error_category])
@@index([severity])
@@index([llm_call_id])
}