add universe optimize orchestrator with project management,

This commit is contained in:
misrasaurabh1 2026-04-16 16:34:36 -07:00
parent a52d80b0aa
commit 2a101a11f2
13 changed files with 2363 additions and 7 deletions

View file

@ -0,0 +1,6 @@
results/
universe_optimize.db
__pycache__/
*.pyc
.venv/
.tmp_bootstrap.sh

View file

@ -0,0 +1 @@
3.13

View file

@ -0,0 +1,75 @@
#!/bin/bash
# Bootstrap script for Universe Optimize VMs.
# Executed by the orchestrator via SSH on a fresh Ubuntu 24.04 VM.
#
# Placeholders replaced by the orchestrator before uploading:
# __REPO_NAME__ - e.g. "flask"
# __GITHUB_PAT__ - GitHub PAT for cloning/pushing
# __CODEFLASH_API_KEY__ - Codeflash API key
# __AWS_BEARER_TOKEN_BEDROCK__ - AWS Bedrock bearer token for Claude
set -euo pipefail
echo "=== [bootstrap] Starting VM bootstrap ==="
# ── System dependencies ──────────────────────────────────────────────
sudo apt-get update -qq
sudo apt-get install -y -qq git curl build-essential python3-dev python3-venv jq
# ── Python: install uv ──────────────────────────────────────────────
curl -LsSf https://astral.sh/uv/install.sh | sh
export PATH="$HOME/.local/bin:$PATH"
echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc
# ── Node.js (needed for Claude Code CLI) ────────────────────────────
curl -fsSL https://deb.nodesource.com/setup_22.x | sudo -E bash -
sudo apt-get install -y -qq nodejs
# ── Install Claude Code CLI ─────────────────────────────────────────
npm install -g @anthropic-ai/claude-code
# ── Install GitHub CLI ──────────────────────────────────────────────
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
sudo apt-get update -qq && sudo apt-get install -y -qq gh
# ── Environment variables (Bedrock auth for Claude Code) ────────────
# Write to a dedicated file, then source from both .bashrc and .profile
# so it works in both interactive and non-interactive SSH sessions.
cat > ~/.env_universe << 'ENVEOF'
export CLAUDE_CODE_USE_BEDROCK=1
export AWS_REGION=us-east-1
export AWS_BEARER_TOKEN_BEDROCK="__AWS_BEARER_TOKEN_BEDROCK__"
export CODEFLASH_API_KEY="__CODEFLASH_API_KEY__"
export LC_ALL=en_US.UTF-8
export LANG=en_US.UTF-8
ENVEOF
echo 'source ~/.env_universe' >> ~/.bashrc
echo 'source ~/.env_universe' >> ~/.profile
source ~/.env_universe
# ── GitHub auth ─────────────────────────────────────────────────────
git config --global credential.helper store
echo "https://codeflash-ai:__GITHUB_PAT__@github.com" > ~/.git-credentials
git config --global user.name "codeflash-ai"
git config --global user.email "bot@codeflash.ai"
# Authenticate gh CLI
echo "__GITHUB_PAT__" | gh auth login --with-token 2>/dev/null || true
# ── Create results directory ────────────────────────────────────────
mkdir -p ~/results
# ── Clone the forked repo ───────────────────────────────────────────
git clone "https://github.com/codeflash-ai/__REPO_NAME__.git" ~/project
# ── Copy optimization.md as CLAUDE.md ───────────────────────────────
# (orchestrator SCPs optimization.md to ~/optimization.md before running this)
cp ~/optimization.md ~/project/CLAUDE.md
# ── codeflash-agent plugin ──────────────────────────────────────────
# (orchestrator SCPs the dist/ directory to ~/codeflash-agent/dist before running this)
echo "=== [bootstrap] Bootstrap complete ==="
echo " Project cloned to ~/project"
echo " CLAUDE.md injected"
echo " Ready to launch Claude Code"

View file

@ -0,0 +1,21 @@
Subject: I created {num_optimizations} PRs that speed up {repo_name}
Hi {first_name},
I'm Saurabh, ex-CMU and Meta, and CEO and Founder of Codeflash.
We work with companies like Unstructured.io and HuggingFace who all face a growing challenge: as AI coding tools generate more of the codebase, performance regressions slip in faster than teams can catch them.
Codeflash is the performance layer that sits on top of your AI coding workflow. It finds provably faster implementations for your existing code and ensures every new PR ships optimized.
I ran Codeflash on a fork of {repo} and created {num_optimizations} merge-ready commits that significantly speed up several crucial components -- {best_function} now runs {best_speedup}! {optimizations_summary}
Unstructured.io used Codeflash across their entire infrastructure and cut compute costs by 50%.
I'd love to walk you through the results and show you how much more free performance is hiding across your full codebase. Would sometime this week work?
{calendly_link}
Thanks,
Saurabh
Founder, Codeflash.ai

View file

@ -0,0 +1,15 @@
Subject: Re: I created {num_optimizations} PRs that speed up {repo_name}
Hi {first_name},
Wanted to follow up -- did your team get a chance to look at the optimizations?
You can see all {num_optimizations} commits here:
{branch_url}
If those didn't hit the right area of your codebase, I have an open offer: share any performance benchmark your team cares about, and I'll run Codeflash against it and send you the results. No commitment, just proof.
{calendly_link}
Saurabh
Founder, codeflash.ai

View file

@ -0,0 +1,14 @@
Subject: Re: I created {num_optimizations} PRs that speed up {repo_name}
Hi {first_name},
With AI coding tools writing more of the code, performance regressions are showing up faster and quieter than before. By the time they surface, it's production issues and fire drills.
Unstructured.io plugged Codeflash into their workflow and cut compute costs by 50% -- and now every PR is automatically checked before it merges.
Happy to show you what that looks like for {company_name}.
{calendly_link}
Saurabh
Founder, codeflash.ai

View file

@ -0,0 +1,87 @@
# Codeflash Optimization Run
You are running an automated optimization session on this open-source project.
Your goal is to find provably-faster code implementations and create individual
PRs with benchmark results for each optimization.
## Your Mission
### Phase 1: Setup
1. Understand the project: read the README, project structure, and key source files.
2. Set up the project: install dependencies, verify tests pass.
3. Install codeflash: `pip install codeflash` (or `uv add --dev codeflash` if using uv).
### Phase 2: Optimize and Create PRs
Launch the optimization agent directly using the Agent tool (do NOT use `/codeflash-optimize` or AskUserQuestion — you are running non-interactively):
```
Agent(subagent_type: "codeflash-deep", name: "optimizer", run_in_background: true, prompt: "AUTONOMOUS MODE: The user has already been asked for context (included below). Do NOT ask the user any questions — work fully autonomously. Make all decisions yourself: generate a run tag from today's date, identify benchmark tiers from available tests, choose optimization targets from profiler output. If something is ambiguous, pick the reasonable default and document your choice in HANDOFF.md.\n\nOptimize this project for performance.\n\nUser context: go — no special constraints, optimize everything you can find.\n\nIMPORTANT — After each KEEP, immediately do the following before moving to the next target:\n1. Write a pytest-benchmark test for the optimized function with realistic inputs\n2. Run `codeflash compare <base-commit> <head-commit> --inject <benchmark-test-path>` to generate benchmark tables. If codeflash compare is not available, run the benchmark manually on base and head commits and format the results as a markdown table.\n3. Create a PR branch: `git checkout main && git checkout -b codeflash/opt-<function-name> && git cherry-pick <commit-sha> && git add <benchmark-test> && git commit --amend --no-edit`\n4. Push the branch: `git push origin codeflash/opt-<function-name>`\n5. Open a PR with `gh pr create` using the benchmark results in the body (see PR body format below)\n6. Switch back to the optimize branch: `git checkout codeflash/optimize`\n7. Record the PR URL in results.tsv and HANDOFF.md\n\nDo NOT batch PR creation for later. Each optimization gets its PR immediately after the experiment proves it works.\n\nPR Body Format:\n## Summary\n- <One-line description of what was optimized and how>\n\n## Benchmark\n### <Machine specs e.g. Azure Standard_D4s_v5, 4 vCPU, 16 GiB RAM, Python 3.12>\n\n| Function | base | head | Delta | Speedup |\n|:---|---:|---:|:---|---:|\n| `<function_name>` | ... | ... | ... | X.Xx |\n\n---\n*Generated by codeflash agent*\n\n<details>\n<summary><b>Benchmark test source</b></summary>\n\n```python\n<benchmark test code>\n```\n</details>\n\n## Test plan\n- [x] All existing tests pass\n- [x] Benchmark shows measurable improvement")
```
Wait for the optimizer agent to complete.
### Phase 3: Collect Results
Create the results directory and write summary:
```bash
mkdir -p ~/results
```
Write `~/results/summary.json` with this structure:
```json
{
"repo": "<org/repo>",
"language": "<python|javascript|java>",
"branch": "codeflash/optimize",
"total_experiments": 0,
"total_keeps": 0,
"total_discards": 0,
"optimizations": [
{
"commit": "<sha>",
"function": "<function_name>",
"file": "<file_path>",
"description": "<what was optimized>",
"cpu_speedup": "<e.g. 2.3x faster>",
"memory_reduction": "<e.g. -50 MiB>",
"technique": "<e.g. replaced list with set, eliminated deepcopy>",
"pr_number": "<PR number if created>",
"pr_url": "<PR URL if created>",
"benchmark_test": "<path to benchmark test file>"
}
],
"headline_stats": {
"best_single_speedup": "<e.g. 5x faster>",
"best_function": "<function_name>",
"total_cpu_improvement_pct": 0,
"total_memory_saved_mb": 0
},
"pr_ready_commits": 0,
"prs_created": [],
"status": "completed|plateaued|failed",
"error": "<if failed, why>"
}
```
Also copy `.codeflash/results.tsv` and `.codeflash/HANDOFF.md` to `~/results/` (if they exist).
Then push all branches to the remote fork:
```bash
git push origin --all
```
## Important
- Work fully autonomously. Do not ask questions -- make reasonable decisions.
- If tests fail during setup, note the pre-existing failures and work around them.
- If the project cannot be set up (missing deps, private packages), write summary.json with status "failed" and an error message, then stop.
- Time limit: aim to complete within 8 hours. If still running after 8 hours, wrap up, write summary.json with whatever results you have, and stop.
- Always push all branches before finishing, even if results are partial.
- Each optimization MUST have its own PR with benchmark results created immediately after the optimization is verified. Do NOT batch PR creation for later.
CRITICAL: Do NOT use AskUserQuestion at any point. You are running in fully autonomous non-interactive mode. Make all decisions yourself.

View file

@ -0,0 +1,988 @@
#!/usr/bin/env python3
"""
Universe Optimize Orchestrator
Manages the full pipeline: provision Azure VMs, fork repos, run Codeflash
optimizations via Claude Code, collect results, and draft outreach emails.
Usage:
python orchestrator.py provision <project-id> Provision VM, fork repo, bootstrap
python orchestrator.py run <project-id> Launch Claude Code on the VM
python orchestrator.py status [project-id] Dashboard or single project status
python orchestrator.py monitor [--interval=600] Poll all running VMs
python orchestrator.py collect <project-id> SCP results, verify push, destroy VM
python orchestrator.py results <project-id> Show detailed optimization results
python orchestrator.py email <project-id> Build context + render emails
python orchestrator.py email --rerender-all Re-render all emails after template edit
python orchestrator.py email <project-id> --show Preview rendered emails
python orchestrator.py email <project-id> --show-context Show raw context.json
python orchestrator.py mark-sent <project-id> Mark emails as sent
python orchestrator.py destroy <project-id> Destroy VM without collecting
"""
from __future__ import annotations
import argparse
import json
import os
import shlex
import sqlite3
import subprocess
import sys
import time
from datetime import datetime, timezone
from glob import glob
from pathlib import Path
from textwrap import dedent
# ---------------------------------------------------------------------------
# Paths
# ---------------------------------------------------------------------------
BASE_DIR = Path(__file__).resolve().parent
PROJECTS_FILE = BASE_DIR / "projects.json"
OPTIMIZATION_MD = BASE_DIR / "optimization.md"
BOOTSTRAP_SH = BASE_DIR / "bootstrap.sh"
EMAIL_TEMPLATES_DIR = BASE_DIR / "email_templates"
RESULTS_DIR = BASE_DIR / "results"
DB_PATH = BASE_DIR / "universe_optimize.db"
CODEFLASH_AGENT_DIST = Path.home() / "Library/CloudStorage/Dropbox/hacks/codeflash-agent/dist"
# ---------------------------------------------------------------------------
# Config
# ---------------------------------------------------------------------------
AZURE_RESOURCE_GROUP = os.environ.get("UO_AZURE_RESOURCE_GROUP", "universe-optimize-rg")
AZURE_LOCATION = os.environ.get("UO_AZURE_LOCATION", "eastus")
AZURE_VM_SIZE = os.environ.get("UO_AZURE_VM_SIZE", "Standard_D4s_v5")
AZURE_VM_IMAGE = "Canonical:ubuntu-24_04-lts:server:latest"
AZURE_ADMIN_USER = "azureuser"
SSH_KEY_PATH = Path.home() / ".ssh" / "id_rsa"
GITHUB_PAT = os.environ.get(
"UO_GITHUB_PAT",
"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",
)
GITHUB_ORG = "codeflash-ai"
# Bedrock auth for Claude Code on VMs
AWS_BEARER_TOKEN_BEDROCK = os.environ.get(
"AWS_BEARER_TOKEN_BEDROCK",
"ABSKc2FydGhha0Bjb2RlZmxhc2guYWkrMS1hdC05OTIzODI0NjM5MDc6bS9tdGx4SW0wQ08yazMwU3QxZFdlbWRWeTM0NnJWZElBZmFNNmVobC9UU2tRVTBPQm4wUXVPS3ZFQWs9",
)
CODEFLASH_API_KEY = os.environ.get(
"CODEFLASH_API_KEY",
"cf-kNZFz7nM3Tl3wY4t5Kh1E58A-UhxKvLhokyTfFIz5YJ_xJJLcBBFrdp8kJF6G8ld",
)
CALENDLY_LINK = "https://calendly.com/codeflash-saurabh/30min"
# Max time (seconds) before we consider a VM stuck
VM_TIMEOUT_SECONDS = 5 * 3600 # 5 hours
# ===========================================================================
# Database
# ===========================================================================
def get_db() -> sqlite3.Connection:
db = sqlite3.connect(str(DB_PATH))
db.row_factory = sqlite3.Row
db.execute("""
CREATE TABLE IF NOT EXISTS projects (
id TEXT PRIMARY KEY,
repo TEXT NOT NULL,
language TEXT NOT NULL,
fork_url TEXT,
company TEXT,
contact_name TEXT,
contact_email TEXT,
contact_title TEXT,
status TEXT DEFAULT 'pending',
vm_name TEXT,
vm_ip TEXT,
created_at TEXT,
started_at TEXT,
completed_at TEXT,
optimization_branch TEXT,
num_optimizations INTEGER DEFAULT 0,
best_speedup TEXT,
summary_json TEXT,
email_draft_path TEXT
)
""")
db.commit()
return db
def load_projects() -> list[dict]:
with open(PROJECTS_FILE) as f:
return json.load(f)
def load_project(project_id: str) -> dict:
for p in load_projects():
if p["id"] == project_id:
return p
raise ValueError(f"Project {project_id} not found in projects.json")
def upsert_project(db: sqlite3.Connection, project: dict) -> None:
contact = project.get("target_contact", {})
db.execute("""
INSERT INTO projects (id, repo, language, company, contact_name, contact_email, contact_title, status, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, 'pending', ?)
ON CONFLICT(id) DO UPDATE SET
repo=excluded.repo, language=excluded.language, company=excluded.company,
contact_name=excluded.contact_name, contact_email=excluded.contact_email,
contact_title=excluded.contact_title
""", (
project["id"], project["repo"], project["language"],
project.get("company", ""), contact.get("name", ""),
contact.get("email", ""), contact.get("title", ""),
datetime.now(timezone.utc).isoformat(),
))
db.commit()
def update_status(db: sqlite3.Connection, project_id: str, status: str, **kwargs) -> None:
sets = ["status = ?"]
vals: list = [status]
for k, v in kwargs.items():
sets.append(f"{k} = ?")
vals.append(v)
vals.append(project_id)
db.execute(f"UPDATE projects SET {', '.join(sets)} WHERE id = ?", vals)
db.commit()
def get_project_row(db: sqlite3.Connection, project_id: str) -> sqlite3.Row | None:
return db.execute("SELECT * FROM projects WHERE id = ?", (project_id,)).fetchone()
# ===========================================================================
# SSH helpers
# ===========================================================================
def ssh_exec(ip: str, command: str, timeout: int = 120) -> str:
"""Execute a command on the VM via SSH. Returns stdout."""
import paramiko
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username=AZURE_ADMIN_USER, key_filename=str(SSH_KEY_PATH), timeout=30)
try:
_, stdout, stderr = client.exec_command(command, timeout=timeout)
out = stdout.read().decode()
err = stderr.read().decode()
if err:
print(f" [ssh stderr] {err.strip()}", file=sys.stderr)
return out
finally:
client.close()
def scp_upload(ip: str, local_path: str | Path, remote_path: str) -> None:
"""Upload a file or directory to the VM."""
import paramiko
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username=AZURE_ADMIN_USER, key_filename=str(SSH_KEY_PATH), timeout=30)
try:
sftp = client.open_sftp()
local_path = Path(local_path)
if local_path.is_dir():
_sftp_put_dir(sftp, local_path, remote_path)
else:
sftp.put(str(local_path), remote_path)
sftp.close()
finally:
client.close()
def _sftp_mkdir_p(sftp, remote_dir: str) -> None:
"""Recursively create remote directories (like mkdir -p)."""
dirs_to_create = []
current = remote_dir
while current and current != "/":
try:
sftp.stat(current)
break # exists
except FileNotFoundError:
dirs_to_create.append(current)
current = os.path.dirname(current)
for d in reversed(dirs_to_create):
sftp.mkdir(d)
def _sftp_put_dir(sftp, local_dir: Path, remote_dir: str) -> None:
"""Recursively upload a directory."""
_sftp_mkdir_p(sftp, remote_dir)
for item in sorted(local_dir.iterdir()):
remote_item = f"{remote_dir}/{item.name}"
if item.is_dir():
_sftp_put_dir(sftp, item, remote_item)
else:
sftp.put(str(item), remote_item)
def scp_download(ip: str, remote_path: str, local_path: str | Path) -> bool:
"""Download a file from the VM. Returns True if successful."""
import paramiko
local_path = Path(local_path)
local_path.parent.mkdir(parents=True, exist_ok=True)
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username=AZURE_ADMIN_USER, key_filename=str(SSH_KEY_PATH), timeout=30)
try:
sftp = client.open_sftp()
sftp.get(remote_path, str(local_path))
sftp.close()
return True
except FileNotFoundError:
print(f" [scp] Remote file not found: {remote_path}")
return False
finally:
client.close()
# ===========================================================================
# Azure VM management (via az CLI)
# ===========================================================================
def az(cmd: str, parse_json: bool = True) -> dict | list | str:
"""Run an az CLI command. Returns parsed JSON or raw stdout."""
full_cmd = f"az {cmd}"
if parse_json and "--output" not in cmd:
full_cmd += " --output json"
result = subprocess.run(
full_cmd, shell=True, capture_output=True, text=True, timeout=600,
)
if result.returncode != 0:
raise RuntimeError(f"az command failed:\n cmd: {full_cmd}\n stderr: {result.stderr.strip()}")
if parse_json:
return json.loads(result.stdout) if result.stdout.strip() else {}
return result.stdout
def provision_vm(project_id: str, repo_name: str) -> tuple[str, str]:
"""Provision an Azure VM using az CLI. Returns (vm_name, public_ip)."""
vm_name = f"uo-{project_id}"
# Ensure resource group exists
print(f" Ensuring resource group '{AZURE_RESOURCE_GROUP}'...")
az(f"group create --name {AZURE_RESOURCE_GROUP} --location {AZURE_LOCATION}")
# Create VM (az vm create handles vnet, subnet, nsg, nic, ip, disk automatically)
ssh_pub_key_path = SSH_KEY_PATH.with_suffix(".pub")
print(f" Creating VM '{vm_name}' ({AZURE_VM_SIZE})...")
vm_info = az(
f"vm create"
f" --resource-group {AZURE_RESOURCE_GROUP}"
f" --name {vm_name}"
f" --image {AZURE_VM_IMAGE}"
f" --size {AZURE_VM_SIZE}"
f" --admin-username {AZURE_ADMIN_USER}"
f" --ssh-key-values {shlex.quote(str(ssh_pub_key_path))}"
f" --os-disk-size-gb 128"
f" --storage-sku Premium_LRS"
f" --public-ip-sku Standard"
f" --tags project=universe-optimize project_id={project_id} repo={repo_name}"
)
public_ip = vm_info.get("publicIpAddress", "")
if not public_ip:
# Fallback: query the IP explicitly
ip_info = az(
f"vm list-ip-addresses"
f" --resource-group {AZURE_RESOURCE_GROUP}"
f" --name {vm_name}"
)
public_ip = ip_info[0]["virtualMachine"]["network"]["publicIpAddresses"][0]["ipAddress"]
print(f" VM '{vm_name}' provisioned at {public_ip}")
return vm_name, public_ip
def destroy_vm(project_id: str) -> None:
"""Destroy the VM and all associated resources."""
vm_name = f"uo-{project_id}"
print(f" Deleting VM '{vm_name}' and all associated resources...")
try:
# --delete-all removes NIC, public-ip, NSG, and OS disk along with the VM
az(
f"vm delete"
f" --resource-group {AZURE_RESOURCE_GROUP}"
f" --name {vm_name}"
f" --yes"
f" --force-deletion true",
parse_json=False,
)
except RuntimeError as e:
print(f" Warning: VM delete may have partially failed: {e}")
# Clean up any remaining resources that az vm delete may leave behind
# (VNet and NSG are sometimes shared/retained)
for cmd in [
f"network nsg delete --resource-group {AZURE_RESOURCE_GROUP} --name {vm_name}NSG --yes",
f"network vnet delete --resource-group {AZURE_RESOURCE_GROUP} --name {vm_name}VNET --yes",
f"network nic delete --resource-group {AZURE_RESOURCE_GROUP} --name {vm_name}VMNic",
f"network public-ip delete --resource-group {AZURE_RESOURCE_GROUP} --name {vm_name}PublicIP",
]:
try:
az(cmd, parse_json=False)
except RuntimeError:
pass # Resource may not exist or was already cleaned up
print(f" VM '{vm_name}' destroyed.")
# ===========================================================================
# GitHub
# ===========================================================================
def fork_repo(repo: str) -> str:
"""Fork a repo to the codeflash-ai org. Returns the fork URL.
Uses gh CLI which respects the logged-in user's auth and org permissions.
Falls back to PyGithub if gh is not available.
"""
repo_name = repo.split("/")[1]
fork_url = f"https://github.com/{GITHUB_ORG}/{repo_name}"
# Check if fork already exists
result = subprocess.run(
f"gh repo view {GITHUB_ORG}/{repo_name} --json url -q .url",
shell=True, capture_output=True, text=True,
)
if result.returncode == 0 and result.stdout.strip():
print(f" Fork already exists: {result.stdout.strip()}")
return result.stdout.strip()
# Create the fork via gh CLI
result = subprocess.run(
f"gh repo fork {repo} --org {GITHUB_ORG} --clone=false",
shell=True, capture_output=True, text=True,
)
if result.returncode == 0:
print(f" Forked {repo} -> {fork_url}")
time.sleep(5) # Wait for fork to be ready
return fork_url
# Fallback to PyGithub
print(f" gh fork failed ({result.stderr.strip()}), trying PyGithub...")
from github import Auth, Github
g = Github(auth=Auth.Token(GITHUB_PAT))
source = g.get_repo(repo)
org = g.get_organization(GITHUB_ORG)
fork = org.create_fork(source)
print(f" Forked {repo} -> {fork.html_url}")
time.sleep(5)
return fork.html_url
# ===========================================================================
# Bootstrap
# ===========================================================================
def bootstrap_vm(ip: str, project: dict) -> None:
"""Upload files and run bootstrap script on the VM."""
repo_name = project["repo"].split("/")[1]
print(" Uploading optimization.md...")
scp_upload(ip, OPTIMIZATION_MD, "/home/azureuser/optimization.md")
print(" Uploading codeflash-agent plugin...")
scp_upload(ip, CODEFLASH_AGENT_DIST, "/home/azureuser/codeflash-agent/dist")
print(" Uploading bootstrap.sh...")
# Read bootstrap, substitute placeholders
bootstrap_content = BOOTSTRAP_SH.read_text()
bootstrap_content = bootstrap_content.replace("__REPO_NAME__", repo_name)
bootstrap_content = bootstrap_content.replace("__GITHUB_PAT__", GITHUB_PAT)
bootstrap_content = bootstrap_content.replace("__AWS_BEARER_TOKEN_BEDROCK__", AWS_BEARER_TOKEN_BEDROCK)
bootstrap_content = bootstrap_content.replace("__CODEFLASH_API_KEY__", CODEFLASH_API_KEY)
# Write temp file, upload, execute
tmp_bootstrap = BASE_DIR / ".tmp_bootstrap.sh"
tmp_bootstrap.write_text(bootstrap_content)
scp_upload(ip, tmp_bootstrap, "/home/azureuser/bootstrap.sh")
tmp_bootstrap.unlink()
print(" Running bootstrap (this takes a few minutes)...")
output = ssh_exec(ip, "chmod +x ~/bootstrap.sh && ~/bootstrap.sh", timeout=600)
print(output)
# ===========================================================================
# Run Claude Code
# ===========================================================================
def launch_claude(ip: str) -> None:
"""Launch Claude Code on the VM in the background."""
home = f"/home/{AZURE_ADMIN_USER}"
cmd = dedent(f"""\
nohup bash -c '
source {home}/.env_universe 2>/dev/null
cd {home}/project && claude \\
--dangerously-skip-permissions \\
--plugin-dir {home}/codeflash-agent/dist \\
--model opus \\
--max-turns 400 \\
--print \\
-p "Read the CLAUDE.md file and follow its instructions exactly." \\
< /dev/null \\
2>&1 | tee {home}/results/claude_output.log
' > /dev/null 2>&1 &
""")
ssh_exec(ip, cmd, timeout=30)
print(" Claude Code launched in background.")
def check_vm_status(ip: str) -> dict:
"""Check if Claude is still running and if results are ready."""
try:
is_running = ssh_exec(ip, "pgrep -f 'claude' > /dev/null 2>&1 && echo running || echo done", timeout=15)
has_results = ssh_exec(ip, "test -f ~/results/summary.json && echo yes || echo no", timeout=15)
return {
"reachable": True,
"is_running": is_running.strip() == "running",
"has_results": has_results.strip() == "yes",
}
except Exception as e:
return {"reachable": False, "is_running": False, "has_results": False, "error": str(e)}
# ===========================================================================
# Results collection
# ===========================================================================
def collect_results(project_id: str, ip: str) -> dict | None:
"""Download results from the VM. Returns parsed summary or None."""
home = f"/home/{AZURE_ADMIN_USER}"
result_dir = RESULTS_DIR / project_id
result_dir.mkdir(parents=True, exist_ok=True)
# Try both ~/results/ (written by optimization.md) and ~/project/.codeflash/ (written by agent)
files = [
(f"{home}/results/summary.json", result_dir / "summary.json"),
(f"{home}/results/results.tsv", result_dir / "results.tsv"),
(f"{home}/results/HANDOFF.md", result_dir / "HANDOFF.md"),
(f"{home}/results/claude_output.log", result_dir / "claude_output.log"),
(f"{home}/project/.codeflash/results.tsv", result_dir / "results.tsv"),
(f"{home}/project/.codeflash/HANDOFF.md", result_dir / "HANDOFF.md"),
]
for remote, local in files:
scp_download(ip, remote, local)
summary_path = result_dir / "summary.json"
if summary_path.exists() and summary_path.stat().st_size > 0:
with open(summary_path) as f:
return json.load(f)
return None
def verify_and_push(ip: str) -> bool:
"""Verify the optimization branch was pushed; push if not."""
home = f"/home/{AZURE_ADMIN_USER}"
check = ssh_exec(ip, f"cd {home}/project && git log --oneline origin/codeflash/optimize -1 2>/dev/null || echo NOT_PUSHED", timeout=30)
if "NOT_PUSHED" in check:
print(" Branch not pushed yet, pushing now...")
result = ssh_exec(ip, f"cd {home}/project && git push origin codeflash/optimize 2>&1", timeout=60)
print(f" {result.strip()}")
return "NOT_PUSHED" not in result
print(f" Branch already pushed: {check.strip()}")
return True
# ===========================================================================
# Email system
# ===========================================================================
def build_summary_sentence(optimizations: list[dict]) -> str:
"""Build a human-readable summary of optimizations."""
if not optimizations:
return ""
parts = []
for opt in optimizations[:3]: # top 3
func = opt.get("function", "unknown")
speedup = opt.get("cpu_speedup", "")
technique = opt.get("technique", "")
if speedup:
parts.append(f"{speedup} {func}")
elif technique:
parts.append(f"{func} ({technique})")
else:
parts.append(func)
count = len(optimizations)
if count <= 3:
return f"{count} merge-ready commits including " + ", ".join(parts)
return f"{count} merge-ready commits including " + ", ".join(parts) + f", and {count - 3} more"
def build_context(project_id: str) -> dict | None:
"""Build context.json from summary.json + projects.json."""
project = load_project(project_id)
summary_path = RESULTS_DIR / project_id / "summary.json"
if not summary_path.exists():
print(f" No summary.json found for {project_id}")
return None
with open(summary_path) as f:
summary = json.load(f)
if summary.get("status") == "failed" or summary.get("total_keeps", 0) == 0:
print(f" Skipping {project_id}: status={summary.get('status')}, keeps={summary.get('total_keeps', 0)}")
return None
best = summary.get("headline_stats", {})
opts = summary.get("optimizations", [])
repo_name = project["repo"].split("/")[1]
context = {
"first_name": project["target_contact"]["name"].split()[0],
"full_name": project["target_contact"]["name"],
"title": project["target_contact"]["title"],
"company_name": project.get("company", ""),
"repo": project["repo"],
"repo_name": repo_name,
"fork_url": f"https://github.com/{GITHUB_ORG}/{repo_name}",
"branch_url": f"https://github.com/{GITHUB_ORG}/{repo_name}/tree/codeflash/optimize",
"num_optimizations": summary.get("total_keeps", 0),
"best_function": best.get("best_function", ""),
"best_speedup": best.get("best_single_speedup", ""),
"best_description": opts[0]["description"] if opts else "",
"second_best_function": opts[1]["function"] if len(opts) > 1 else "",
"second_best_technique": opts[1].get("technique", "") if len(opts) > 1 else "",
"total_cpu_improvement_pct": best.get("total_cpu_improvement_pct", 0),
"total_memory_saved_mb": best.get("total_memory_saved_mb", 0),
"optimizations_summary": build_summary_sentence(opts),
"calendly_link": CALENDLY_LINK,
}
context_path = RESULTS_DIR / project_id / "context.json"
with open(context_path, "w") as f:
json.dump(context, f, indent=2)
print(f" Built context.json for {project_id}")
return context
def render_emails(project_id: str) -> None:
"""Render all email templates for a project using its context.json."""
context_path = RESULTS_DIR / project_id / "context.json"
if not context_path.exists():
print(f" No context.json for {project_id}. Run 'email {project_id}' first.")
return
with open(context_path) as f:
context = json.load(f)
emails_dir = RESULTS_DIR / project_id / "emails"
emails_dir.mkdir(parents=True, exist_ok=True)
templates = sorted(EMAIL_TEMPLATES_DIR.glob("email_*.md"))
if not templates:
print(" No email templates found in email_templates/")
return
for template_path in templates:
template = template_path.read_text()
try:
rendered = template.format(**context)
except KeyError as e:
print(f" Warning: missing variable {e} in template {template_path.name}")
continue
out_path = emails_dir / template_path.name
out_path.write_text(rendered)
print(f" Rendered {out_path.name}")
def render_all_emails() -> None:
"""Re-render emails for all projects that have context.json."""
if not RESULTS_DIR.exists():
print("No results directory found.")
return
for project_dir in sorted(RESULTS_DIR.iterdir()):
if not project_dir.is_dir():
continue
context_path = project_dir / "context.json"
if context_path.exists():
print(f"Re-rendering {project_dir.name}...")
render_emails(project_dir.name)
def show_emails(project_id: str) -> None:
"""Print rendered emails to stdout."""
emails_dir = RESULTS_DIR / project_id / "emails"
if not emails_dir.exists():
print(f"No rendered emails for {project_id}. Run 'email {project_id}' first.")
return
for email_path in sorted(emails_dir.glob("email_*.md")):
print(f"\n{'=' * 60}")
print(f" {email_path.name}")
print(f"{'=' * 60}")
print(email_path.read_text())
def show_context(project_id: str) -> None:
"""Print context.json to stdout."""
context_path = RESULTS_DIR / project_id / "context.json"
if not context_path.exists():
print(f"No context.json for {project_id}.")
return
with open(context_path) as f:
print(json.dumps(json.load(f), indent=2))
# ===========================================================================
# Commands
# ===========================================================================
def cmd_provision(args: argparse.Namespace) -> None:
project_id = args.project_id
project = load_project(project_id)
db = get_db()
upsert_project(db, project)
print(f"\n[provision] {project_id}: {project['repo']}")
# Step 1: Fork
print("\n Step 1: Forking repo...")
fork_url = fork_repo(project["repo"])
update_status(db, project_id, "provisioning", fork_url=fork_url)
# Step 2: Provision VM
print("\n Step 2: Provisioning Azure VM...")
vm_name, vm_ip = provision_vm(project_id, project["repo"].split("/")[1])
update_status(db, project_id, "provisioning", vm_name=vm_name, vm_ip=vm_ip)
# Wait for VM to be SSH-ready
print("\n Waiting for SSH to become available...")
for attempt in range(30):
try:
ssh_exec(vm_ip, "echo ready", timeout=10)
break
except Exception:
time.sleep(10)
else:
print(" ERROR: VM not reachable via SSH after 5 minutes")
update_status(db, project_id, "failed")
return
# Step 3: Bootstrap
print("\n Step 3: Bootstrapping VM...")
bootstrap_vm(vm_ip, project)
update_status(db, project_id, "provisioned")
print(f"\n[provision] Done. VM ready at {vm_ip}. Run: python orchestrator.py run {project_id}")
def cmd_run(args: argparse.Namespace) -> None:
project_id = args.project_id
db = get_db()
row = get_project_row(db, project_id)
if not row:
print(f"Project {project_id} not in DB. Run 'provision' first.")
return
if not row["vm_ip"]:
print(f"No VM IP for {project_id}. Run 'provision' first.")
return
print(f"\n[run] Launching Claude Code on {row['vm_ip']}...")
launch_claude(row["vm_ip"])
update_status(db, project_id, "running", started_at=datetime.now(timezone.utc).isoformat())
print(f"[run] Done. Monitor with: python orchestrator.py status {project_id}")
def cmd_status(args: argparse.Namespace) -> None:
db = get_db()
if args.project_id:
# Single project status
row = get_project_row(db, args.project_id)
if not row:
print(f"Project {args.project_id} not found in DB.")
return
print(f"\nProject: {row['id']}")
print(f"Repo: {row['repo']}")
print(f"Status: {row['status']}")
print(f"VM: {row['vm_name'] or '-'} ({row['vm_ip'] or '-'})")
print(f"Fork: {row['fork_url'] or '-'}")
print(f"Started: {row['started_at'] or '-'}")
if row["vm_ip"] and row["status"] == "running":
vm_status = check_vm_status(row["vm_ip"])
print(f"Claude: {'running' if vm_status['is_running'] else 'stopped'}")
print(f"Results: {'ready' if vm_status['has_results'] else 'not yet'}")
if row["num_optimizations"]:
print(f"Optimizations: {row['num_optimizations']}")
print(f"Best speedup: {row['best_speedup'] or '-'}")
else:
# Dashboard
rows = db.execute("SELECT * FROM projects ORDER BY id").fetchall()
if not rows:
print("No projects in DB. Add to projects.json and run 'provision'.")
return
print(f"\n{'ID':<15} {'Repo':<30} {'Status':<14} {'Opts':>5} {'Best':<12} {'Email'}")
print("-" * 90)
for row in rows:
email_status = "-"
email_dir = RESULTS_DIR / row["id"] / "emails"
if email_dir.exists() and list(email_dir.glob("email_*.md")):
email_status = "DRAFT READY"
print(f"{row['id']:<15} {row['repo']:<30} {row['status']:<14} "
f"{row['num_optimizations'] or 0:>5} {row['best_speedup'] or '-':<12} {email_status}")
def cmd_monitor(args: argparse.Namespace) -> None:
interval = args.interval
db = get_db()
print(f"[monitor] Polling running VMs every {interval}s. Ctrl-C to stop.\n")
while True:
rows = db.execute("SELECT * FROM projects WHERE status = 'running'").fetchall()
if not rows:
print(" No running projects.")
break
for row in rows:
ip = row["vm_ip"]
pid = row["id"]
status = check_vm_status(ip)
elapsed = ""
if row["started_at"]:
started = datetime.fromisoformat(row["started_at"])
elapsed_s = (datetime.now(timezone.utc) - started).total_seconds()
elapsed = f" ({int(elapsed_s / 60)}m elapsed)"
if elapsed_s > VM_TIMEOUT_SECONDS and not status["has_results"]:
print(f" {pid}: TIMEOUT after {int(elapsed_s / 3600)}h. Killing claude...")
ssh_exec(ip, "pkill -f claude || true", timeout=15)
time.sleep(60)
status = check_vm_status(ip)
if status["has_results"] and not status["is_running"]:
print(f" {pid}: COMPLETED{elapsed}")
update_status(db, pid, "completed", completed_at=datetime.now(timezone.utc).isoformat())
elif status["is_running"]:
print(f" {pid}: running{elapsed}")
elif not status["reachable"]:
print(f" {pid}: UNREACHABLE - {status.get('error', '')}")
else:
print(f" {pid}: claude stopped, no results{elapsed}")
print()
time.sleep(interval)
def cmd_collect(args: argparse.Namespace) -> None:
project_id = args.project_id
db = get_db()
row = get_project_row(db, project_id)
if not row or not row["vm_ip"]:
print(f"No VM found for {project_id}.")
return
ip = row["vm_ip"]
print(f"\n[collect] {project_id} from {ip}")
# Step 1: Download results
print("\n Step 1: Downloading results...")
summary = collect_results(project_id, ip)
# Step 2: Verify branch pushed
print("\n Step 2: Verifying branch push...")
verify_and_push(ip)
# Step 3: Update DB
if summary:
update_status(
db, project_id, "completed",
completed_at=datetime.now(timezone.utc).isoformat(),
num_optimizations=summary.get("total_keeps", 0),
best_speedup=summary.get("headline_stats", {}).get("best_single_speedup", ""),
optimization_branch="codeflash/optimize",
summary_json=json.dumps(summary),
)
print(f"\n Results: {summary.get('total_keeps', 0)} optimizations kept")
else:
update_status(db, project_id, "completed",
completed_at=datetime.now(timezone.utc).isoformat())
print("\n Warning: No summary.json found. Marked as completed with no results.")
# Step 4: Destroy VM
if not args.keep_vm:
print("\n Step 3: Destroying VM...")
destroy_vm(project_id)
update_status(db, project_id, "destroyed")
else:
print("\n Keeping VM alive (--keep-vm flag).")
print(f"\n[collect] Done. Results in results/{project_id}/")
print(f" Next: python orchestrator.py email {project_id}")
def cmd_results(args: argparse.Namespace) -> None:
project_id = args.project_id
db = get_db()
row = get_project_row(db, project_id)
summary_path = RESULTS_DIR / project_id / "summary.json"
if not summary_path.exists():
print(f"No results for {project_id}. Run 'collect' first.")
return
with open(summary_path) as f:
summary = json.load(f)
project = load_project(project_id)
repo_name = project["repo"].split("/")[1]
contact = project.get("target_contact", {})
print(f"\nProject: {project['repo']}")
print(f"Status: {summary.get('status', 'unknown')}")
print(f"Branch: https://github.com/{GITHUB_ORG}/{repo_name}/tree/codeflash/optimize")
print(f"Experiments: {summary.get('total_experiments', 0)} total "
f"({summary.get('total_keeps', 0)} kept, {summary.get('total_discards', 0)} discarded)")
opts = summary.get("optimizations", [])
if opts:
print(f"\nTop optimizations:")
for i, opt in enumerate(opts[:10], 1):
speedup = opt.get("cpu_speedup", "")
desc = opt.get("technique", opt.get("description", ""))
print(f" {i}. {opt.get('function', '?'):<25} {speedup:<15} {desc}")
hs = summary.get("headline_stats", {})
cpu_pct = hs.get("total_cpu_improvement_pct", 0)
mem_mb = hs.get("total_memory_saved_mb", 0)
if cpu_pct or mem_mb:
print(f"\nHeadline: {cpu_pct}% total CPU improvement, {mem_mb} MiB memory saved")
if contact.get("name"):
print(f"\nContact: {contact['name']} ({contact.get('title', '')}) -- {contact.get('email', '')}")
print(f"Emails: results/{project_id}/emails/")
def cmd_email(args: argparse.Namespace) -> None:
if args.rerender_all:
print("[email] Re-rendering all project emails...")
render_all_emails()
return
project_id = args.project_id
if not project_id:
print("Usage: email <project-id> or email --rerender-all")
return
if args.show_context:
show_context(project_id)
return
if args.show:
show_emails(project_id)
return
# Default: build context + render
print(f"[email] Building context and rendering emails for {project_id}...")
ctx = build_context(project_id)
if ctx:
render_emails(project_id)
print(f"\n[email] Done. Preview with: python orchestrator.py email {project_id} --show")
else:
print("[email] Could not build context (no results or all failed).")
def cmd_mark_sent(args: argparse.Namespace) -> None:
db = get_db()
update_status(db, args.project_id, "email_sent")
print(f"Marked {args.project_id} as email_sent.")
def cmd_destroy(args: argparse.Namespace) -> None:
project_id = args.project_id
db = get_db()
print(f"[destroy] Destroying VM for {project_id}...")
destroy_vm(project_id)
update_status(db, project_id, "destroyed")
print("[destroy] Done.")
# ===========================================================================
# CLI
# ===========================================================================
def main() -> None:
parser = argparse.ArgumentParser(description="Universe Optimize Orchestrator")
sub = parser.add_subparsers(dest="command")
p_provision = sub.add_parser("provision", help="Provision VM, fork repo, bootstrap")
p_provision.add_argument("project_id")
p_run = sub.add_parser("run", help="Launch Claude Code on the VM")
p_run.add_argument("project_id")
p_status = sub.add_parser("status", help="Show status dashboard or single project")
p_status.add_argument("project_id", nargs="?")
p_monitor = sub.add_parser("monitor", help="Poll running VMs periodically")
p_monitor.add_argument("--interval", type=int, default=600, help="Poll interval in seconds")
p_collect = sub.add_parser("collect", help="Collect results and destroy VM")
p_collect.add_argument("project_id")
p_collect.add_argument("--keep-vm", action="store_true", help="Don't destroy VM after collecting")
p_results = sub.add_parser("results", help="Show detailed optimization results")
p_results.add_argument("project_id")
p_email = sub.add_parser("email", help="Build context and render email drafts")
p_email.add_argument("project_id", nargs="?")
p_email.add_argument("--rerender-all", action="store_true", help="Re-render all project emails")
p_email.add_argument("--show", action="store_true", help="Preview rendered emails")
p_email.add_argument("--show-context", action="store_true", help="Show raw context.json")
p_sent = sub.add_parser("mark-sent", help="Mark project emails as sent")
p_sent.add_argument("project_id")
p_destroy = sub.add_parser("destroy", help="Destroy VM without collecting results")
p_destroy.add_argument("project_id")
args = parser.parse_args()
if not args.command:
parser.print_help()
return
commands = {
"provision": cmd_provision,
"run": cmd_run,
"status": cmd_status,
"monitor": cmd_monitor,
"collect": cmd_collect,
"results": cmd_results,
"email": cmd_email,
"mark-sent": cmd_mark_sent,
"destroy": cmd_destroy,
}
commands[args.command](args)
if __name__ == "__main__":
main()

View file

@ -1,11 +1,9 @@
Top level plan:
1. Research 1000 projects on github that are importnat and popular and in Python, javascript/typescript and java. They are either used by important companies or are used by many people.
2. For each of the above project, start a new virtual machine on Azure with Ubuntu and 4 CPUs and 16GB RAM (choose a similar machine). On this machine download claude and setup the API key to access claude (to fill in later).
1. Research 1000 projects on github that are important and popular and in Python, javascript/typescript and java. They are either used by important companies or are used by many people.
2. For each of the above project, start a new virtual machine on Azure with Ubuntu and 4 CPUs and 16GB RAM (choose a similar machine). On this machine download claude and setup the API key to access claude (to fill in later). When you start claude, run it in --dangerously-skip-permissions mode and with --plugin-dir=.path/to/codeflash-agent/dist. Codeflash agent should be copied to this machine from /Users/saurabh/Library/CloudStorage/Dropbox/hacks/codeflash-agent/dist. Instructions on how to use this codeflash-agent will follow
3. Fork the chosen repo on to the codeflash-ai github org and clone the fork on the new virtual machine.
4. Understand the source code of the project and try to understand what it does and how.
5. Setup the project to run on the new virtual machine. Install the dependencies and try to get the unit tests to pass.
6. Now we want to optimize the project. To do so, we will first use the understanding of the project to come with a few end to end use cases as performance benchmarks. Run these and ensure that they run and seem reasonable.
7. Use the codeflash-agent to optimize the found benchmarks. Codeflash agent will profile the code and work in an incremental fashion to optimize the benchmark piece by piece. This will open individual PRs to the github repo.
8. Try to create a branch with all the optimizations merged in for a the given performance benchmark and try to measure the end to end performance optimization results wrt the original benchmark. Try to fix any bugs that might arise from merging all the branches together and try to gain confidence on the benchmarking scheme here.
9. Write an email at the end with the purpose of it being selling the results of the optimization to an engineering leader at the copmany of the project or the main developer of the project.
5. From there we will initiate optimization process start using `/codeflash-optimize start` command. Do read what the plugin does to know what to expect. This will setup the project on the VM, and setup benchmarks and find optimizations.
6. Codeflash agents will find optimizations and create a stacked branch with a lot of the optimizations. Collect the results of the optimizations for the next step.
9. Write an email at the end with the purpose of it being selling the results of the optimization to an engineering leader at the company of the project or the main developer of the project.

View file

@ -0,0 +1,720 @@
# Universe Optimize: Automated Optimization Outbound Program
## Overview
This program automates the full pipeline: discover high-value open-source projects, fork them, spin up Azure VMs, run Codeflash optimizations via Claude Code, collect results, and draft personalized outreach emails with real optimization proof points. The orchestrator runs locally and manages everything end-to-end.
---
## Architecture
```
Local Machine (Orchestrator)
|
|-- orchestrator.py (Python script that manages the full pipeline)
| |
| |-- Azure SDK: provisions/destroys VMs
| |-- GitHub API: forks repos to codeflash-ai org
| |-- SSH (paramiko): executes commands on VMs
| |-- Results DB: SQLite tracking all projects/VMs/results
| |-- Email drafter: generates personalized emails from results
|
+-- VM 1 (Azure Ubuntu, 4 CPU / 16GB RAM)
| |-- Claude Code (--dangerously-skip-permissions, --plugin-dir=codeflash-agent)
| |-- Forked repo cloned
| |-- optimization.md injected as CLAUDE.md instructions
| |-- Results written to .codeflash/results.tsv + summary.json
|
+-- VM 2 ...
+-- VM N ...
```
---
## Phase 0: Project Discovery (Manual for POC, Automated Later)
### POC: Hand-pick 2 projects
For the proof of concept, manually select 2 projects that are:
- Popular (>1k stars), actively maintained
- Python, JavaScript/TypeScript, or Java
- Have a test suite (pytest, jest, JUnit) so optimizations can be verified
- Backed by a company or used by companies where we can identify an engineering leader to email
- Performance-sensitive domain (data processing, web frameworks, databases, ML infra, etc.)
Store project metadata in `projects.json`:
```json
[
{
"id": "project-001",
"repo": "org/repo-name",
"language": "python",
"stars": 5200,
"description": "...",
"company": "Company Name",
"domain": "data processing",
"target_contact": {
"name": "First Last",
"title": "CTO",
"email": "...",
"linkedin": "..."
},
"why_selected": "High star count, performance-critical data pipeline, active maintenance, company has 50-200 employees"
}
]
```
### At Scale (Post-POC)
Use GitHub API to search for repos matching criteria:
- `stars:>1000 language:python` (repeat for JS/TS, Java)
- Filter by: has CI, recent commits, has test suite, identifiable company/maintainer
- Cross-reference with Sumble/Apollo APIs (as in existing outbound program) to find the right engineering contact
- Target 1000 projects total across Python, JS/TS, Java
---
## Phase 1: Infrastructure Provisioning
### What the Orchestrator Does
The orchestrator (`orchestrator.py`) is a Python script that manages the full lifecycle. It uses:
- **Azure SDK (`azure-mgmt-compute`, `azure-identity`)** to provision and destroy VMs
- **Paramiko** for SSH command execution on VMs
- **GitHub API (`PyGithub`)** for forking repos
- **SQLite** for tracking state across runs
### VM Provisioning
For each project in `projects.json`:
1. **Fork the repo** to the `codeflash-ai` GitHub org:
```python
github.get_repo("org/repo").create_fork(organization="codeflash-ai")
```
2. **Provision an Azure VM**:
- Image: Ubuntu 24.04 LTS
- Size: `Standard_D4s_v5` (4 vCPU, 16 GB RAM)
- Region: East US (or nearest to minimize latency)
- Disk: 128 GB SSD
- NSG: SSH only (port 22), locked to orchestrator's IP
- Tags: `{"project": "universe-optimize", "repo": "org/repo-name", "id": "project-001"}`
3. **Bootstrap the VM** via SSH (single setup script):
```bash
#!/bin/bash
set -euo pipefail
# System deps
sudo apt-get update && sudo apt-get install -y git curl build-essential python3-dev
# Install language runtimes as needed
# Python: install uv
curl -LsSf https://astral.sh/uv/install.sh | sh
source $HOME/.local/bin/env
# Node (for JS/TS projects)
curl -fsSL https://deb.nodesource.com/setup_22.x | sudo -E bash -
sudo apt-get install -y nodejs
# Java (for Java projects)
sudo apt-get install -y openjdk-21-jdk maven gradle
# Install Claude Code CLI
curl -fsSL https://claude.ai/install.sh | bash
# Set up these API keys in .bashrc
CODEFLASH_API_KEY=cf-kNZFz7nM3Tl3wY4t5Kh1E58A-UhxKvLhokyTfFIz5YJ_xJJLcBBFrdp8kJF6G8ld
CLAUDE_CODE_USE_BEDROCK=1
AWS_REGION=us-east-1
AWS_BEARER_TOKEN_BEDROCK=ABSKc2FydGhha0Bjb2RlZmxhc2guYWkrMS1hdC05OTIzODI0NjM5MDc6bS9tdGx4SW0wQ08yazMwU3QxZFdlbWRWeTM0NnJWZElBZmFNNmVobC9UU2tRVTBPQm4wUXVPS3ZFQWs9
LC_ALL=en_US.UTF-8
source ~/.bashrc
# Copy codeflash-agent plugin
mkdir -p ~/codeflash-agent
# (orchestrator SCPs the dist/ directory here)
# Clone the forked repo
git clone https://github.com/codeflash-ai/<repo-name>.git ~/project
cd ~/project
# Inject optimization.md as the project's CLAUDE.md
cp ~/optimization.md ~/project/CLAUDE.md
```
### State Tracking
The orchestrator maintains a SQLite DB (`universe_optimize.db`):
```sql
CREATE TABLE projects (
id TEXT PRIMARY KEY,
repo TEXT NOT NULL,
language TEXT NOT NULL,
fork_url TEXT,
company TEXT,
contact_name TEXT,
contact_email TEXT,
contact_title TEXT,
status TEXT DEFAULT 'pending', -- pending, provisioning, running, completed, failed, destroyed
vm_id TEXT,
vm_ip TEXT,
created_at TIMESTAMP,
started_at TIMESTAMP,
completed_at TIMESTAMP,
optimization_branch TEXT,
num_optimizations INTEGER DEFAULT 0,
best_speedup TEXT,
summary_json TEXT,
email_draft_path TEXT
);
```
---
## Phase 2: Running the Optimization
### optimization.md (Injected as CLAUDE.md on Each VM)
This file tells Claude Code exactly what to do when it starts. It is placed as `CLAUDE.md` in the project root before Claude is launched.
```markdown
# Codeflash Optimization Run
You are running an automated optimization session on this open-source project.
Your goal is to find as many provably-faster code implementations as possible
and stack them as commits on a single branch.
## Your Mission
1. Understand the project: read the README, project structure, and key source files.
2. Set up the project: install dependencies, verify tests pass.
3. Run `/codeflash-optimize start` and when asked for context, respond with "go".
4. Let the optimization agent work. It will:
- Profile the codebase (CPU, memory, GC)
- Identify bottleneck functions
- Implement optimizations one at a time
- Verify each with tests
- Commit each successful optimization
5. When the optimization agent completes (or plateaus), collect results.
## After Optimization Completes
Write a file `~/results/summary.json` with the following structure:
```json
{
"repo": "<org/repo>",
"language": "<python|javascript|java>",
"branch": "<optimization branch name>",
"total_experiments": <N>,
"total_keeps": <N>,
"total_discards": <N>,
"optimizations": [
{
"commit": "<sha>",
"function": "<function_name>",
"file": "<file_path>",
"description": "<what was optimized>",
"cpu_speedup": "<e.g. 2.3x faster>",
"memory_reduction": "<e.g. -50 MiB>",
"technique": "<e.g. replaced list with set, eliminated deepcopy>"
}
],
"headline_stats": {
"best_single_speedup": "<e.g. 5x faster>",
"best_function": "<function_name>",
"total_cpu_improvement_pct": <number>,
"total_memory_saved_mb": <number>
},
"pr_ready_commits": <N>,
"status": "completed|plateaued|failed",
"error": "<if failed, why>"
}
```
Also copy `.codeflash/results.tsv` and `.codeflash/HANDOFF.md` to `~/results/`.
## Important
- Work fully autonomously. Do not ask questions -- make reasonable decisions.
- If tests fail during setup, note the pre-existing failures and work around them.
- If the project cannot be set up (missing deps, private packages), write summary.json with status "failed" and an error message, then stop.
- After optimization completes, push the optimization branch to the remote fork: `git push origin codeflash/optimize`.
- Time limit: aim to complete within 8 hours. If still running after 8 hours, wrap up, write summary.json with whatever results you have, and stop.
```
### Launching Claude Code
The orchestrator SSHs into each VM and runs:
```bash
cd ~/project && claude \
--dangerously-skip-permissions \
--plugin-dir ~/codeflash-agent/dist \
--model opus \
--max-turns 400 \
--print \
"Read the CLAUDE.md file and follow its instructions exactly." \
2>&1 | tee ~/results/claude_output.log
```
Key flags:
- `--dangerously-skip-permissions`: no human approval needed
- `--plugin-dir`: loads the codeflash-agent plugin with `/codeflash-optimize` skill
- `--print`: non-interactive mode, outputs to stdout
- `--max-turns 400`: generous turn limit for thorough optimization
- Output captured to log file for debugging
### Monitoring
The orchestrator polls each VM periodically (every 10 minutes):
```python
def check_vm_status(vm_ip):
"""Check if Claude is still running and if results are ready."""
# Check if claude process is still running
is_running = ssh_exec(vm_ip, "pgrep -f 'claude' > /dev/null && echo running || echo done")
# Check if summary.json exists (optimization complete)
has_results = ssh_exec(vm_ip, "test -f ~/results/summary.json && echo yes || echo no")
# Check elapsed time
elapsed = ssh_exec(vm_ip, "stat -c %Y ~/project/CLAUDE.md | xargs -I{} expr $(date +%s) - {}")
return {
"is_running": is_running.strip() == "running",
"has_results": has_results.strip() == "yes",
"elapsed_seconds": int(elapsed.strip())
}
```
If a VM has been running >5 hours with no results, the orchestrator:
1. SSHs in and sends SIGTERM to claude
2. Waits 60s for graceful shutdown
3. Checks if partial results exist in `.codeflash/results.tsv`
4. Marks project as `failed` or `completed` (with partial results)
---
## Phase 3: Results Collection
Once Claude completes on a VM:
1. **SCP the results** from the VM to local:
```python
scp_download(vm_ip, "~/results/summary.json", f"results/{project_id}/summary.json")
scp_download(vm_ip, "~/results/results.tsv", f"results/{project_id}/results.tsv")
scp_download(vm_ip, "~/results/HANDOFF.md", f"results/{project_id}/HANDOFF.md")
scp_download(vm_ip, "~/results/claude_output.log", f"results/{project_id}/claude_output.log")
```
2. **Verify the branch was pushed**. The VM Claude pushes the branch as part of its workflow. Verify:
```bash
ssh vm "cd ~/project && git log --oneline origin/codeflash/optimize -5"
```
If not pushed (e.g. Claude failed before that step), push from the VM before destroying it:
```bash
ssh vm "cd ~/project && git push origin codeflash/optimize"
```
3. **Update the DB** with results from summary.json.
4. **Destroy the VM** to stop burning money:
```python
azure_client.virtual_machines.begin_delete(resource_group, vm_name)
```
### Results Directory Structure
```
results/
project-001/
summary.json # Full optimization results (from VM)
results.tsv # Per-experiment log (from VM)
HANDOFF.md # Session state (from VM)
claude_output.log # Full Claude transcript (for debugging)
context.json # Template variables (built by orchestrator from summary + project data)
emails/
email_1_proof.md # Rendered email (regenerated on template edit)
email_2_followup.md
email_3_risk.md
project-002/
...
```
---
## Phase 4: Email Drafting
The email system separates **data** from **templates**. All optimization results and contact info are stored locally as structured data (`context.json`). Templates are standalone files with `{placeholder}` variables. You can edit a template and re-render all emails with one command.
### Data Layer: context.json
After results are collected, the orchestrator builds `context.json` for each project by merging `summary.json` (optimization results from the VM) with `projects.json` (contact/company info). This is the single source of truth for all email variables.
```json
{
"first_name": "Gil",
"full_name": "Gil Tene",
"title": "CTO",
"company_name": "Azul",
"repo": "azul/zulu-openjdk",
"repo_name": "zulu-openjdk",
"fork_url": "https://github.com/codeflash-ai/zulu-openjdk",
"branch_url": "https://github.com/codeflash-ai/zulu-openjdk/tree/codeflash/optimize",
"num_optimizations": 12,
"best_function": "parseClassFile",
"best_speedup": "5x faster",
"best_description": "replaced linear scan with hash lookup in class file parser",
"second_best_function": "resolveMethod",
"second_best_technique": "eliminated redundant deepcopy in method resolution",
"total_cpu_improvement_pct": 34,
"total_memory_saved_mb": 120,
"optimizations_summary": "12 merge-ready commits including 5x faster class file parsing, 3x faster method resolution, and 40% memory reduction in bytecode verification",
"calendly_link": "https://calendly.com/codeflash-saurabh/30min"
}
```
### Template Layer: Editable Email Templates
Templates live in `email_templates/` and use `{variable_name}` placeholders. Edit these any time, then re-render.
**`email_templates/email_1_proof.md`**
```
Subject: I created {num_optimizations} PRs that speed up {repo_name}
Hi {first_name},
I'm Saurabh, ex-CMU and Meta, and CEO and Founder of Codeflash.
We work with companies like Unstructured.io and HuggingFace who all face a
growing challenge: as AI coding tools generate more of the codebase, performance
regressions slip in faster than teams can catch them.
Codeflash is the performance layer that sits on top of your AI coding workflow.
It finds provably faster implementations for your existing code and ensures
every new PR ships optimized.
I ran Codeflash on a fork of {repo} and created {num_optimizations} merge-ready
commits that significantly speed up several crucial components -- {best_function}
now runs {best_speedup}! {optimizations_summary}
Unstructured.io used Codeflash across their entire infrastructure and cut
compute costs by 50%.
I'd love to walk you through the results and show you how much more free
performance is hiding across your full codebase. Would sometime this week work?
{calendly_link}
Thanks,
Saurabh
Founder, Codeflash.ai
```
**`email_templates/email_2_followup.md`**
```
Subject: Re: I created {num_optimizations} PRs that speed up {repo_name}
Hi {first_name},
Wanted to follow up -- did your team get a chance to look at the optimizations?
You can see all {num_optimizations} commits here:
{branch_url}
If those didn't hit the right area of your codebase, I have an open offer:
share any performance benchmark your team cares about, and I'll run Codeflash
against it and send you the results. No commitment, just proof.
{calendly_link}
Saurabh
Founder, codeflash.ai
```
**`email_templates/email_3_risk.md`**
```
Subject: Re: I created {num_optimizations} PRs that speed up {repo_name}
Hi {first_name},
With AI coding tools writing more of the code, performance regressions are
showing up faster and quieter than before. By the time they surface, it's
production issues and fire drills.
Unstructured.io plugged Codeflash into their workflow and cut compute costs
by 50% -- and now every PR is automatically checked before it merges.
Happy to show you what that looks like for {company_name}.
{calendly_link}
Saurabh
Founder, codeflash.ai
```
### Rendering: Template + Data = Emails
```python
def build_context(project_id):
"""Build context.json from summary.json + projects.json. Idempotent."""
project = load_project(project_id)
summary = json.load(open(f"results/{project_id}/summary.json"))
if summary["status"] == "failed" or summary["total_keeps"] == 0:
return None
best = summary["headline_stats"]
opts = summary["optimizations"]
context = {
"first_name": project["target_contact"]["name"].split()[0],
"full_name": project["target_contact"]["name"],
"title": project["target_contact"]["title"],
"company_name": project["company"],
"repo": project["repo"],
"repo_name": project["repo"].split("/")[1],
"fork_url": f"https://github.com/codeflash-ai/{project['repo'].split('/')[1]}",
"branch_url": f"https://github.com/codeflash-ai/{project['repo'].split('/')[1]}/tree/codeflash/optimize",
"num_optimizations": summary["total_keeps"],
"best_function": best["best_function"],
"best_speedup": best["best_single_speedup"],
"best_description": opts[0]["description"] if opts else "",
"second_best_function": opts[1]["function"] if len(opts) > 1 else "",
"second_best_technique": opts[1]["technique"] if len(opts) > 1 else "",
"total_cpu_improvement_pct": best.get("total_cpu_improvement_pct", 0),
"total_memory_saved_mb": best.get("total_memory_saved_mb", 0),
"optimizations_summary": build_summary_sentence(opts),
"calendly_link": "https://calendly.com/codeflash-saurabh/30min",
}
write_json(f"results/{project_id}/context.json", context)
return context
def render_emails(project_id):
"""Render all email templates for a project. Re-run after template edits."""
context = json.load(open(f"results/{project_id}/context.json"))
os.makedirs(f"results/{project_id}/emails", exist_ok=True)
for template_file in sorted(glob("email_templates/email_*.md")):
template = open(template_file).read()
rendered = template.format(**context)
out_name = os.path.basename(template_file)
write_file(f"results/{project_id}/emails/{out_name}", rendered)
def render_all_emails():
"""Re-render emails for ALL completed projects. Use after editing a template."""
for project_id in get_completed_project_ids():
render_emails(project_id)
```
### Orchestrator Email Commands
```bash
# Build context + render emails for one project (after results collected)
python orchestrator.py email <project-id>
# Re-render ALL project emails after you edit a template
python orchestrator.py email --rerender-all
# Preview rendered emails in terminal
python orchestrator.py email <project-id> --show
# Show raw context data (for debugging or manual override)
python orchestrator.py email <project-id> --show-context
```
### Email Editing Workflow
1. **Edit the template** (affects all future projects):
- Edit `email_templates/email_1_proof.md` (or email_2, email_3)
- Run `python orchestrator.py email --rerender-all`
- All project emails are regenerated
2. **Override data for one project** (e.g. fix a name):
- Edit `results/<id>/context.json` directly
- Run `python orchestrator.py email <project-id>`
- Only that project's emails are regenerated
3. **One-off edit for a single email** (e.g. add a personal note):
- Edit `results/<id>/emails/email_1_proof.md` directly
- This is the final rendered copy -- it won't be overwritten unless you explicitly re-render
---
## Phase 5: Review, Present Results & Send
This phase is intentionally manual. The orchestrator gives you all the data and rendered emails; you decide what ships.
### Results Presentation
```bash
# Dashboard: all projects at a glance
python orchestrator.py status
ID Repo Status Opts Best Email
project-001 pallets/flask completed 12 5x faster DRAFT READY
project-002 encode/httpx completed 8 3x faster DRAFT READY
project-003 tiangolo/fastapi failed 0 -- --
# Deep dive into one project
python orchestrator.py results <project-id>
Project: pallets/flask
Status: completed
Branch: https://github.com/codeflash-ai/flask/tree/codeflash/optimize
Experiments: 18 total (12 kept, 6 discarded)
Top optimizations:
1. parse_rule() 5.0x faster replaced regex with string split
2. match_request() 3.2x faster eliminated redundant dict copy
3. send_file() 2.1x faster switched to sendfile() syscall
4. url_for() 1.8x faster cached reverse route lookup
...
Headline: 34% total CPU improvement, 120 MiB memory saved
Contact: Armin Ronacher (Creator) -- armin@palletsprojects.com
Emails: results/project-001/emails/
```
### Review Checklist
For each project:
1. **Review the optimizations**: Open the fork on GitHub (link from `orchestrator.py results <id>`). Scan the commits. Are they legit?
2. **Review the emails**: `orchestrator.py email <id> --show`. Check numbers, names, tone.
3. **Edit if needed** (see Email Editing Workflow above).
4. **Send**: Copy from `results/<id>/emails/` into your email client or Apollo. Mark as sent:
```bash
python orchestrator.py mark-sent <project-id>
```
---
## POC Plan: 2 Projects
### Step 1: Select Projects
Pick 2 projects. Suggested criteria for POC:
- One Python project, one Java or JS/TS project (to prove multi-language)
- Both should have >2k stars and active test suites
- Companies behind them should be in the 20-500 employee range
- The engineering contact should be identifiable via LinkedIn/Apollo
### Step 2: Build the Orchestrator (MVP)
For the POC, the orchestrator can be simplified:
```
experiments/universe-optimize/
orchestrator.py # Main script
optimization.md # Template injected as CLAUDE.md on VMs
projects.json # The 2 POC projects
bootstrap.sh # VM setup script
email_templates/
email_1_proof.md
email_2_followup.md
email_3_risk.md
results/ # Collected results (gitignored)
```
MVP orchestrator commands:
- `python orchestrator.py provision <project-id>` -- Create VM, fork repo, bootstrap
- `python orchestrator.py run <project-id>` -- SSH in and launch Claude Code
- `python orchestrator.py status [project-id]` -- Check status of all or one project
- `python orchestrator.py collect <project-id>` -- SCP results, push branch, destroy VM
- `python orchestrator.py email <project-id>` -- Generate email draft from results
- `python orchestrator.py status-all` -- Dashboard view
### Step 3: Run & Iterate
1. Provision both VMs in parallel
2. Launch Claude Code on both
3. Monitor (check every 10 min)
4. Collect results when done
5. Review optimizations on GitHub
6. Review and edit email drafts
7. Send emails manually
8. Destroy VMs
### Step 4: Evaluate POC
After the 2 projects complete, assess:
- **Quality**: Were the optimizations real and meaningful? Would they impress an engineering leader?
- **Reliability**: Did Claude complete successfully, or did it get stuck/fail?
- **Cost**: What was the Azure + Anthropic API cost per project?
- **Time**: How long did each optimization run take?
- **Email quality**: Did the auto-generated emails need heavy editing?
Use these findings to decide whether to scale to 10, then 100, then 1000 projects.
---
## Scaling Considerations (Post-POC)
### Parallelism
- POC: 2 VMs at a time
- Scale: batch in groups of 10-20 VMs to manage cost and API rate limits
- Use Azure VMSS (Virtual Machine Scale Sets) for easier provisioning at scale
### Cost Control
- Auto-destroy VMs after 24 hours regardless of status
- Estimated per-project cost: ~$2-5 Azure compute + $5-20 Anthropic API = $7-25/project
- At 1000 projects: $7k-25k total
### Project Discovery Automation
- GitHub API search + filtering pipeline
- Sumble API for company enrichment (as in existing outbound program)
- Apollo API for contact discovery (as in existing outbound program)
- Auto-populate projects.json with enriched data
### Results Quality Gate
- Auto-skip email generation if <3 optimizations found
- Auto-skip if no optimization exceeds 1.5x speedup (not impressive enough)
- Flag projects where Claude failed for manual investigation
### Email Pipeline Integration
- Instead of manual send, integrate with Apollo sequences
- Auto-create drafts in Apollo, linked to the contact list
- Still require manual approval before sending
---
## File Inventory
| File | Purpose |
|------|---------|
| `program.md` | This document -- the full program specification |
| `plan.md` | Original high-level plan (kept for reference) |
| `orchestrator.py` | Main orchestrator script |
| `optimization.md` | Template injected as CLAUDE.md on each VM |
| `bootstrap.sh` | VM setup script (copied to VM and executed) |
| `projects.json` | Project list with metadata and contacts |
| `universe_optimize.db` | SQLite state database |
| `email_templates/*.md` | Email templates with placeholders |
| `results/<id>/` | Per-project results directory |
---
## Open Questions
1. **API key management**: How to securely inject the Anthropic API key onto VMs? Options: Azure Key Vault, environment variable via SSH, or baked into VM image. -> Env vars via SSH
2. **GitHub auth on VMs**: Need a GitHub PAT or deploy key on each VM for cloning and pushing. Use a dedicated bot account? -> PAT
3. **Claude Code model**: Use Opus for maximum quality, or Sonnet for cost savings? POC should use Opus; scale run could use Sonnet with Opus fallback for stuck sessions.
4. **Java/JS optimization**: The codeflash-agent plugin is Python-focused today. For Java/JS projects, Claude will need to optimize without the plugin -- just using its own profiling/optimization knowledge. This may produce lower quality results. Should we restrict POC to Python only? Lets restrict POC to python
5. **Rate limits**: Anthropic API rate limits at scale. May need multiple API keys or request a rate limit increase.

View file

@ -0,0 +1,34 @@
[
{
"id": "unstructured",
"repo": "Unstructured-IO/unstructured",
"language": "python",
"stars": 14400,
"description": "Open-source ETL for transforming complex documents (PDFs, HTML, Word) into structured data for LLMs",
"company": "Unstructured.io",
"domain": "data processing / ML infrastructure",
"target_contact": {
"name": "Matt Robinson",
"title": "Co-founder",
"email": "",
"linkedin": "https://linkedin.com/in/maborobinson"
},
"why_selected": "Existing customer relationship, 14k+ stars, performance-critical document processing pipelines, Python-heavy codebase with good test coverage"
},
{
"id": "pillow",
"repo": "python-pillow/Pillow",
"language": "python",
"stars": 13500,
"description": "The friendly PIL fork -- Python Imaging Library with extensive format support and image manipulation",
"company": "python-pillow (community)",
"domain": "image processing",
"target_contact": {
"name": "Andrew Murray",
"title": "Lead Maintainer",
"email": "",
"linkedin": ""
},
"why_selected": "13k+ stars, performance-critical image processing library used everywhere, Python with C extensions, active test suite, widely depended upon"
}
]

View file

@ -0,0 +1,9 @@
[project]
name = "universe-optimize"
version = "0.1.0"
description = "Add your description here"
requires-python = ">=3.13"
dependencies = [
"paramiko>=4.0.0",
"pygithub>=2.9.0",
]

View file

@ -0,0 +1,388 @@
version = 1
revision = 2
requires-python = ">=3.13"
[[package]]
name = "bcrypt"
version = "5.0.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" },
{ url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" },
{ url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" },
{ url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" },
{ url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" },
{ url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" },
{ url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" },
{ url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" },
{ url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" },
{ url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" },
{ url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" },
{ url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" },
{ url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" },
{ url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" },
{ url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" },
{ url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" },
{ url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" },
{ url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" },
{ url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" },
{ url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" },
{ url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" },
{ url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" },
{ url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" },
{ url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" },
{ url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" },
{ url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" },
{ url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" },
{ url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" },
{ url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" },
{ url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" },
{ url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" },
{ url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" },
{ url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" },
{ url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" },
{ url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" },
{ url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" },
{ url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" },
{ url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" },
{ url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" },
{ url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" },
{ url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" },
{ url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" },
{ url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" },
{ url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" },
{ url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" },
{ url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" },
{ url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" },
{ url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" },
{ url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" },
{ url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" },
{ url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" },
{ url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" },
{ url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" },
{ url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" },
{ url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" },
{ url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" },
{ url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" },
{ url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" },
]
[[package]]
name = "certifi"
version = "2026.2.25"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" },
]
[[package]]
name = "cffi"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pycparser", marker = "implementation_name != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
{ url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
{ url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
{ url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
{ url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
{ url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
{ url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
{ url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
{ url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
{ url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
{ url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
{ url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
{ url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
{ url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
{ url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
{ url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
{ url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
{ url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
{ url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
{ url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
{ url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
{ url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
{ url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
{ url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
{ url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
{ url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
{ url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
{ url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
{ url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
{ url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
{ url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
{ url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
{ url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
{ url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
]
[[package]]
name = "charset-normalizer"
version = "3.4.7"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" },
{ url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" },
{ url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" },
{ url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" },
{ url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" },
{ url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" },
{ url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" },
{ url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" },
{ url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" },
{ url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" },
{ url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" },
{ url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" },
{ url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" },
{ url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" },
{ url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" },
{ url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" },
{ url = "https://files.pythonhosted.org/packages/97/c8/c67cb8c70e19ef1960b97b22ed2a1567711de46c4ddf19799923adc836c2/charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0", size = 309234, upload-time = "2026-04-02T09:27:07.194Z" },
{ url = "https://files.pythonhosted.org/packages/99/85/c091fdee33f20de70d6c8b522743b6f831a2f1cd3ff86de4c6a827c48a76/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a", size = 208042, upload-time = "2026-04-02T09:27:08.749Z" },
{ url = "https://files.pythonhosted.org/packages/87/1c/ab2ce611b984d2fd5d86a5a8a19c1ae26acac6bad967da4967562c75114d/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b", size = 228706, upload-time = "2026-04-02T09:27:09.951Z" },
{ url = "https://files.pythonhosted.org/packages/a8/29/2b1d2cb00bf085f59d29eb773ce58ec2d325430f8c216804a0a5cd83cbca/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41", size = 224727, upload-time = "2026-04-02T09:27:11.175Z" },
{ url = "https://files.pythonhosted.org/packages/47/5c/032c2d5a07fe4d4855fea851209cca2b6f03ebeb6d4e3afdb3358386a684/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e", size = 215882, upload-time = "2026-04-02T09:27:12.446Z" },
{ url = "https://files.pythonhosted.org/packages/2c/c2/356065d5a8b78ed04499cae5f339f091946a6a74f91e03476c33f0ab7100/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae", size = 200860, upload-time = "2026-04-02T09:27:13.721Z" },
{ url = "https://files.pythonhosted.org/packages/0c/cd/a32a84217ced5039f53b29f460962abb2d4420def55afabe45b1c3c7483d/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18", size = 211564, upload-time = "2026-04-02T09:27:15.272Z" },
{ url = "https://files.pythonhosted.org/packages/44/86/58e6f13ce26cc3b8f4a36b94a0f22ae2f00a72534520f4ae6857c4b81f89/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b", size = 211276, upload-time = "2026-04-02T09:27:16.834Z" },
{ url = "https://files.pythonhosted.org/packages/8f/fe/d17c32dc72e17e155e06883efa84514ca375f8a528ba2546bee73fc4df81/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356", size = 201238, upload-time = "2026-04-02T09:27:18.229Z" },
{ url = "https://files.pythonhosted.org/packages/6a/29/f33daa50b06525a237451cdb6c69da366c381a3dadcd833fa5676bc468b3/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab", size = 230189, upload-time = "2026-04-02T09:27:19.445Z" },
{ url = "https://files.pythonhosted.org/packages/b6/6e/52c84015394a6a0bdcd435210a7e944c5f94ea1055f5cc5d56c5fe368e7b/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46", size = 211352, upload-time = "2026-04-02T09:27:20.79Z" },
{ url = "https://files.pythonhosted.org/packages/8c/d7/4353be581b373033fb9198bf1da3cf8f09c1082561e8e922aa7b39bf9fe8/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44", size = 227024, upload-time = "2026-04-02T09:27:22.063Z" },
{ url = "https://files.pythonhosted.org/packages/30/45/99d18aa925bd1740098ccd3060e238e21115fffbfdcb8f3ece837d0ace6c/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72", size = 217869, upload-time = "2026-04-02T09:27:23.486Z" },
{ url = "https://files.pythonhosted.org/packages/5c/05/5ee478aa53f4bb7996482153d4bfe1b89e0f087f0ab6b294fcf92d595873/charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10", size = 148541, upload-time = "2026-04-02T09:27:25.146Z" },
{ url = "https://files.pythonhosted.org/packages/48/77/72dcb0921b2ce86420b2d79d454c7022bf5be40202a2a07906b9f2a35c97/charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f", size = 159634, upload-time = "2026-04-02T09:27:26.642Z" },
{ url = "https://files.pythonhosted.org/packages/c6/a3/c2369911cd72f02386e4e340770f6e158c7980267da16af8f668217abaa0/charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246", size = 148384, upload-time = "2026-04-02T09:27:28.271Z" },
{ url = "https://files.pythonhosted.org/packages/94/09/7e8a7f73d24dba1f0035fbbf014d2c36828fc1bf9c88f84093e57d315935/charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24", size = 330133, upload-time = "2026-04-02T09:27:29.474Z" },
{ url = "https://files.pythonhosted.org/packages/8d/da/96975ddb11f8e977f706f45cddd8540fd8242f71ecdb5d18a80723dcf62c/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79", size = 216257, upload-time = "2026-04-02T09:27:30.793Z" },
{ url = "https://files.pythonhosted.org/packages/e5/e8/1d63bf8ef2d388e95c64b2098f45f84758f6d102a087552da1485912637b/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960", size = 234851, upload-time = "2026-04-02T09:27:32.44Z" },
{ url = "https://files.pythonhosted.org/packages/9b/40/e5ff04233e70da2681fa43969ad6f66ca5611d7e669be0246c4c7aaf6dc8/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4", size = 233393, upload-time = "2026-04-02T09:27:34.03Z" },
{ url = "https://files.pythonhosted.org/packages/be/c1/06c6c49d5a5450f76899992f1ee40b41d076aee9279b49cf9974d2f313d5/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e", size = 223251, upload-time = "2026-04-02T09:27:35.369Z" },
{ url = "https://files.pythonhosted.org/packages/2b/9f/f2ff16fb050946169e3e1f82134d107e5d4ae72647ec8a1b1446c148480f/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1", size = 206609, upload-time = "2026-04-02T09:27:36.661Z" },
{ url = "https://files.pythonhosted.org/packages/69/d5/a527c0cd8d64d2eab7459784fb4169a0ac76e5a6fc5237337982fd61347e/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44", size = 220014, upload-time = "2026-04-02T09:27:38.019Z" },
{ url = "https://files.pythonhosted.org/packages/7e/80/8a7b8104a3e203074dc9aa2c613d4b726c0e136bad1cc734594b02867972/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e", size = 218979, upload-time = "2026-04-02T09:27:39.37Z" },
{ url = "https://files.pythonhosted.org/packages/02/9a/b759b503d507f375b2b5c153e4d2ee0a75aa215b7f2489cf314f4541f2c0/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3", size = 209238, upload-time = "2026-04-02T09:27:40.722Z" },
{ url = "https://files.pythonhosted.org/packages/c2/4e/0f3f5d47b86bdb79256e7290b26ac847a2832d9a4033f7eb2cd4bcf4bb5b/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0", size = 236110, upload-time = "2026-04-02T09:27:42.33Z" },
{ url = "https://files.pythonhosted.org/packages/96/23/bce28734eb3ed2c91dcf93abeb8a5cf393a7b2749725030bb630e554fdd8/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e", size = 219824, upload-time = "2026-04-02T09:27:43.924Z" },
{ url = "https://files.pythonhosted.org/packages/2c/6f/6e897c6984cc4d41af319b077f2f600fc8214eb2fe2d6bcb79141b882400/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb", size = 233103, upload-time = "2026-04-02T09:27:45.348Z" },
{ url = "https://files.pythonhosted.org/packages/76/22/ef7bd0fe480a0ae9b656189ec00744b60933f68b4f42a7bb06589f6f576a/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe", size = 225194, upload-time = "2026-04-02T09:27:46.706Z" },
{ url = "https://files.pythonhosted.org/packages/c5/a7/0e0ab3e0b5bc1219bd80a6a0d4d72ca74d9250cb2382b7c699c147e06017/charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0", size = 159827, upload-time = "2026-04-02T09:27:48.053Z" },
{ url = "https://files.pythonhosted.org/packages/7a/1d/29d32e0fb40864b1f878c7f5a0b343ae676c6e2b271a2d55cc3a152391da/charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c", size = 174168, upload-time = "2026-04-02T09:27:49.795Z" },
{ url = "https://files.pythonhosted.org/packages/de/32/d92444ad05c7a6e41fb2036749777c163baf7a0301a040cb672d6b2b1ae9/charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d", size = 153018, upload-time = "2026-04-02T09:27:51.116Z" },
{ url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" },
]
[[package]]
name = "cryptography"
version = "46.0.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" },
{ url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" },
{ url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" },
{ url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" },
{ url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" },
{ url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" },
{ url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" },
{ url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" },
{ url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" },
{ url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" },
{ url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" },
{ url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" },
{ url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" },
{ url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" },
{ url = "https://files.pythonhosted.org/packages/01/41/3a578f7fd5c70611c0aacba52cd13cb364a5dee895a5c1d467208a9380b0/cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275", size = 7117147, upload-time = "2026-03-25T23:33:48.249Z" },
{ url = "https://files.pythonhosted.org/packages/fa/87/887f35a6fca9dde90cad08e0de0c89263a8e59b2d2ff904fd9fcd8025b6f/cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4", size = 4266221, upload-time = "2026-03-25T23:33:49.874Z" },
{ url = "https://files.pythonhosted.org/packages/aa/a8/0a90c4f0b0871e0e3d1ed126aed101328a8a57fd9fd17f00fb67e82a51ca/cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b", size = 4408952, upload-time = "2026-03-25T23:33:52.128Z" },
{ url = "https://files.pythonhosted.org/packages/16/0b/b239701eb946523e4e9f329336e4ff32b1247e109cbab32d1a7b61da8ed7/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707", size = 4270141, upload-time = "2026-03-25T23:33:54.11Z" },
{ url = "https://files.pythonhosted.org/packages/0f/a8/976acdd4f0f30df7b25605f4b9d3d89295351665c2091d18224f7ad5cdbf/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361", size = 4904178, upload-time = "2026-03-25T23:33:55.725Z" },
{ url = "https://files.pythonhosted.org/packages/b1/1b/bf0e01a88efd0e59679b69f42d4afd5bced8700bb5e80617b2d63a3741af/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b", size = 4441812, upload-time = "2026-03-25T23:33:57.364Z" },
{ url = "https://files.pythonhosted.org/packages/bb/8b/11df86de2ea389c65aa1806f331cae145f2ed18011f30234cc10ca253de8/cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca", size = 3963923, upload-time = "2026-03-25T23:33:59.361Z" },
{ url = "https://files.pythonhosted.org/packages/91/e0/207fb177c3a9ef6a8108f234208c3e9e76a6aa8cf20d51932916bd43bda0/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013", size = 4269695, upload-time = "2026-03-25T23:34:00.909Z" },
{ url = "https://files.pythonhosted.org/packages/21/5e/19f3260ed1e95bced52ace7501fabcd266df67077eeb382b79c81729d2d3/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4", size = 4869785, upload-time = "2026-03-25T23:34:02.796Z" },
{ url = "https://files.pythonhosted.org/packages/10/38/cd7864d79aa1d92ef6f1a584281433419b955ad5a5ba8d1eb6c872165bcb/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a", size = 4441404, upload-time = "2026-03-25T23:34:04.35Z" },
{ url = "https://files.pythonhosted.org/packages/09/0a/4fe7a8d25fed74419f91835cf5829ade6408fd1963c9eae9c4bce390ecbb/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d", size = 4397549, upload-time = "2026-03-25T23:34:06.342Z" },
{ url = "https://files.pythonhosted.org/packages/5f/a0/7d738944eac6513cd60a8da98b65951f4a3b279b93479a7e8926d9cd730b/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736", size = 4651874, upload-time = "2026-03-25T23:34:07.916Z" },
{ url = "https://files.pythonhosted.org/packages/cb/f1/c2326781ca05208845efca38bf714f76939ae446cd492d7613808badedf1/cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed", size = 3001511, upload-time = "2026-03-25T23:34:09.892Z" },
{ url = "https://files.pythonhosted.org/packages/c9/57/fe4a23eb549ac9d903bd4698ffda13383808ef0876cc912bcb2838799ece/cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4", size = 3471692, upload-time = "2026-03-25T23:34:11.613Z" },
{ url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" },
{ url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" },
{ url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" },
{ url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" },
{ url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" },
{ url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" },
{ url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" },
{ url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" },
{ url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" },
{ url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" },
{ url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" },
{ url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" },
{ url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" },
{ url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" },
]
[[package]]
name = "idna"
version = "3.11"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
]
[[package]]
name = "invoke"
version = "2.2.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/de/bd/b461d3424a24c80490313fd77feeb666ca4f6a28c7e72713e3d9095719b4/invoke-2.2.1.tar.gz", hash = "sha256:515bf49b4a48932b79b024590348da22f39c4942dff991ad1fb8b8baea1be707", size = 304762, upload-time = "2025-10-11T00:36:35.172Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/4b/b99e37f88336009971405cbb7630610322ed6fbfa31e1d7ab3fbf3049a2d/invoke-2.2.1-py3-none-any.whl", hash = "sha256:2413bc441b376e5cd3f55bb5d364f973ad8bdd7bf87e53c79de3c11bf3feecc8", size = 160287, upload-time = "2025-10-11T00:36:33.703Z" },
]
[[package]]
name = "paramiko"
version = "4.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "bcrypt" },
{ name = "cryptography" },
{ name = "invoke" },
{ name = "pynacl" },
]
sdist = { url = "https://files.pythonhosted.org/packages/1f/e7/81fdcbc7f190cdb058cffc9431587eb289833bdd633e2002455ca9bb13d4/paramiko-4.0.0.tar.gz", hash = "sha256:6a25f07b380cc9c9a88d2b920ad37167ac4667f8d9886ccebd8f90f654b5d69f", size = 1630743, upload-time = "2025-08-04T01:02:03.711Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/90/a744336f5af32c433bd09af7854599682a383b37cfd78f7de263de6ad6cb/paramiko-4.0.0-py3-none-any.whl", hash = "sha256:0e20e00ac666503bf0b4eda3b6d833465a2b7aff2e2b3d79a8bba5ef144ee3b9", size = 223932, upload-time = "2025-08-04T01:02:02.029Z" },
]
[[package]]
name = "pycparser"
version = "3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" },
]
[[package]]
name = "pygithub"
version = "2.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyjwt", extra = ["crypto"] },
{ name = "pynacl" },
{ name = "requests" },
{ name = "typing-extensions" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a6/9a/44f918e9be12e49cb8b053f09d5d0733b74df52bf4dabc570da1c3ecd9f6/pygithub-2.9.0.tar.gz", hash = "sha256:a26abda1222febba31238682634cad11d8b966137ed6cc3c5e445b29a11cb0a4", size = 2592289, upload-time = "2026-03-22T21:14:39.053Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/de/72e02bc7674e161b155a4b5a03b2347129d0626115bc97ba5bad5070cac9/pygithub-2.9.0-py3-none-any.whl", hash = "sha256:5e2b260ce327bffce9b00f447b65953ef7078ffe93e5a5425624a3075483927c", size = 449653, upload-time = "2026-03-22T21:14:37.726Z" },
]
[[package]]
name = "pyjwt"
version = "2.12.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c2/27/a3b6e5bf6ff856d2509292e95c8f57f0df7017cf5394921fc4e4ef40308a/pyjwt-2.12.1.tar.gz", hash = "sha256:c74a7a2adf861c04d002db713dd85f84beb242228e671280bf709d765b03672b", size = 102564, upload-time = "2026-03-13T19:27:37.25Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/7a/8dd906bd22e79e47397a61742927f6747fe93242ef86645ee9092e610244/pyjwt-2.12.1-py3-none-any.whl", hash = "sha256:28ca37c070cad8ba8cd9790cd940535d40274d22f80ab87f3ac6a713e6e8454c", size = 29726, upload-time = "2026-03-13T19:27:35.677Z" },
]
[package.optional-dependencies]
crypto = [
{ name = "cryptography" },
]
[[package]]
name = "pynacl"
version = "1.6.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d9/9a/4019b524b03a13438637b11538c82781a5eda427394380381af8f04f467a/pynacl-1.6.2.tar.gz", hash = "sha256:018494d6d696ae03c7e656e5e74cdfd8ea1326962cc401bcf018f1ed8436811c", size = 3511692, upload-time = "2026-01-01T17:48:10.851Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4b/79/0e3c34dc3c4671f67d251c07aa8eb100916f250ee470df230b0ab89551b4/pynacl-1.6.2-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594", size = 390064, upload-time = "2026-01-01T17:31:57.264Z" },
{ url = "https://files.pythonhosted.org/packages/eb/1c/23a26e931736e13b16483795c8a6b2f641bf6a3d5238c22b070a5112722c/pynacl-1.6.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0", size = 809370, upload-time = "2026-01-01T17:31:59.198Z" },
{ url = "https://files.pythonhosted.org/packages/87/74/8d4b718f8a22aea9e8dcc8b95deb76d4aae380e2f5b570cc70b5fd0a852d/pynacl-1.6.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9", size = 1408304, upload-time = "2026-01-01T17:32:01.162Z" },
{ url = "https://files.pythonhosted.org/packages/fd/73/be4fdd3a6a87fe8a4553380c2b47fbd1f7f58292eb820902f5c8ac7de7b0/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574", size = 844871, upload-time = "2026-01-01T17:32:02.824Z" },
{ url = "https://files.pythonhosted.org/packages/55/ad/6efc57ab75ee4422e96b5f2697d51bbcf6cdcc091e66310df91fbdc144a8/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634", size = 1446356, upload-time = "2026-01-01T17:32:04.452Z" },
{ url = "https://files.pythonhosted.org/packages/78/b7/928ee9c4779caa0a915844311ab9fb5f99585621c5d6e4574538a17dca07/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88", size = 826814, upload-time = "2026-01-01T17:32:06.078Z" },
{ url = "https://files.pythonhosted.org/packages/f7/a9/1bdba746a2be20f8809fee75c10e3159d75864ef69c6b0dd168fc60e485d/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14", size = 1411742, upload-time = "2026-01-01T17:32:07.651Z" },
{ url = "https://files.pythonhosted.org/packages/f3/2f/5e7ea8d85f9f3ea5b6b87db1d8388daa3587eed181bdeb0306816fdbbe79/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444", size = 801714, upload-time = "2026-01-01T17:32:09.558Z" },
{ url = "https://files.pythonhosted.org/packages/06/ea/43fe2f7eab5f200e40fb10d305bf6f87ea31b3bbc83443eac37cd34a9e1e/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b", size = 1372257, upload-time = "2026-01-01T17:32:11.026Z" },
{ url = "https://files.pythonhosted.org/packages/4d/54/c9ea116412788629b1347e415f72195c25eb2f3809b2d3e7b25f5c79f13a/pynacl-1.6.2-cp314-cp314t-win32.whl", hash = "sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145", size = 231319, upload-time = "2026-01-01T17:32:12.46Z" },
{ url = "https://files.pythonhosted.org/packages/ce/04/64e9d76646abac2dccf904fccba352a86e7d172647557f35b9fe2a5ee4a1/pynacl-1.6.2-cp314-cp314t-win_amd64.whl", hash = "sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590", size = 244044, upload-time = "2026-01-01T17:32:13.781Z" },
{ url = "https://files.pythonhosted.org/packages/33/33/7873dc161c6a06f43cda13dec67b6fe152cb2f982581151956fa5e5cdb47/pynacl-1.6.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2", size = 188740, upload-time = "2026-01-01T17:32:15.083Z" },
{ url = "https://files.pythonhosted.org/packages/be/7b/4845bbf88e94586ec47a432da4e9107e3fc3ce37eb412b1398630a37f7dd/pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465", size = 388458, upload-time = "2026-01-01T17:32:16.829Z" },
{ url = "https://files.pythonhosted.org/packages/1e/b4/e927e0653ba63b02a4ca5b4d852a8d1d678afbf69b3dbf9c4d0785ac905c/pynacl-1.6.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0", size = 800020, upload-time = "2026-01-01T17:32:18.34Z" },
{ url = "https://files.pythonhosted.org/packages/7f/81/d60984052df5c97b1d24365bc1e30024379b42c4edcd79d2436b1b9806f2/pynacl-1.6.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4", size = 1399174, upload-time = "2026-01-01T17:32:20.239Z" },
{ url = "https://files.pythonhosted.org/packages/68/f7/322f2f9915c4ef27d140101dd0ed26b479f7e6f5f183590fd32dfc48c4d3/pynacl-1.6.2-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87", size = 835085, upload-time = "2026-01-01T17:32:22.24Z" },
{ url = "https://files.pythonhosted.org/packages/3e/d0/f301f83ac8dbe53442c5a43f6a39016f94f754d7a9815a875b65e218a307/pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c", size = 1437614, upload-time = "2026-01-01T17:32:23.766Z" },
{ url = "https://files.pythonhosted.org/packages/c4/58/fc6e649762b029315325ace1a8c6be66125e42f67416d3dbd47b69563d61/pynacl-1.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130", size = 818251, upload-time = "2026-01-01T17:32:25.69Z" },
{ url = "https://files.pythonhosted.org/packages/c9/a8/b917096b1accc9acd878819a49d3d84875731a41eb665f6ebc826b1af99e/pynacl-1.6.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6", size = 1402859, upload-time = "2026-01-01T17:32:27.215Z" },
{ url = "https://files.pythonhosted.org/packages/85/42/fe60b5f4473e12c72f977548e4028156f4d340b884c635ec6b063fe7e9a5/pynacl-1.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e", size = 791926, upload-time = "2026-01-01T17:32:29.314Z" },
{ url = "https://files.pythonhosted.org/packages/fa/f9/e40e318c604259301cc091a2a63f237d9e7b424c4851cafaea4ea7c4834e/pynacl-1.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577", size = 1363101, upload-time = "2026-01-01T17:32:31.263Z" },
{ url = "https://files.pythonhosted.org/packages/48/47/e761c254f410c023a469284a9bc210933e18588ca87706ae93002c05114c/pynacl-1.6.2-cp38-abi3-win32.whl", hash = "sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa", size = 227421, upload-time = "2026-01-01T17:32:33.076Z" },
{ url = "https://files.pythonhosted.org/packages/41/ad/334600e8cacc7d86587fe5f565480fde569dfb487389c8e1be56ac21d8ac/pynacl-1.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0", size = 239754, upload-time = "2026-01-01T17:32:34.557Z" },
{ url = "https://files.pythonhosted.org/packages/29/7d/5945b5af29534641820d3bd7b00962abbbdfee84ec7e19f0d5b3175f9a31/pynacl-1.6.2-cp38-abi3-win_arm64.whl", hash = "sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c", size = 184801, upload-time = "2026-01-01T17:32:36.309Z" },
]
[[package]]
name = "requests"
version = "2.33.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "charset-normalizer" },
{ name = "idna" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" },
]
[[package]]
name = "typing-extensions"
version = "4.15.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
]
[[package]]
name = "universe-optimize"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "paramiko" },
{ name = "pygithub" },
]
[package.metadata]
requires-dist = [
{ name = "paramiko", specifier = ">=4.0.0" },
{ name = "pygithub", specifier = ">=2.9.0" },
]
[[package]]
name = "urllib3"
version = "2.6.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
]