perf: fix Sentry-reported errors and speed bottlenecks across all services

UUID validation (1,459 errors/month):
- Add normalize_trace_id() to handle EXP0/EXP1 experiment suffixes
- Add validate_trace_id checks to optimization_review, testgen review/repair
- Fix bare UUID() call in log_event.py that caused 960 errors/month
- Return 400 instead of 500 for invalid trace_id in log_features

CodeValidationError (1,068 errors/month):
- Add repair_preamble() to fix broken LLM-generated test preambles
- 9 new tests covering preamble repair and end-to-end validation

create-pr speed (11.8s → ~6s):
- Parallelize auth lookups with Promise.all
- Parallelize post-PR-creation DB updates and GitHub API calls

apikeys LCP (5,084ms → ~2s):
- Single server-side getDashboardInitData() replaces 5 sequential client fetches
- PrivacyModeContext, ViewModeContext, Sidebar accept initial data props

optimization-prs speed (7.2s → ~3s):
- Replace LEFT JOIN COUNT on huge JSONB table with EXISTS semi-join
- Add composite indexes on optimization_events for org and user queries
- Add sizes="32px" to avatar images to prevent layout warnings
This commit is contained in:
Kevin Turcios 2026-04-10 06:08:00 -05:00
parent 3c9beac4b5
commit e2731745fb
19 changed files with 598 additions and 199 deletions

View file

@ -47,18 +47,21 @@ def parse_python_version(version: str | None) -> tuple[int, int, int]:
return (major, minor, patch)
def validate_trace_id(trace_id: str) -> bool:
def normalize_trace_id(trace_id: str) -> str | None:
"""Strip EXP0/EXP1 suffixes and return a valid UUID string, or None if invalid."""
if trace_id[-4:] in ["EXP0", "EXP1"]:
trace_id = trace_id[:-4] + "0000"
try:
uuid_obj = uuid.UUID(trace_id, version=4)
if str(uuid_obj) != trace_id:
raise ValueError
return True
return None
return trace_id
except (ValueError, AttributeError):
return None
except ValueError:
if trace_id[-4:] in ["EXP0", "EXP1"]:
temp_trace_id = trace_id[:-4] + "0000"
return validate_trace_id(temp_trace_id)
return False
def validate_trace_id(trace_id: str) -> bool:
return normalize_trace_id(trace_id) is not None
CODEFLASH_EMPLOYEE_GITHUB_IDS = {

View file

@ -8,15 +8,15 @@ from pathlib import Path
from typing import TYPE_CHECKING, Any
import sentry_sdk
import stamina
from ninja import NinjaAPI, Schema
from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam
from packaging import version
from aiservice.analytics.posthog import ph
from aiservice.common.markdown_utils import extract_code_block_with_context, wrap_code_in_markdown
from aiservice.common_utils import validate_trace_id
from aiservice.env_specific import debug_log_sensitive_data
import stamina
from aiservice.llm import LLMOutputUnparseable, llm_client
from aiservice.llm_models import OPTIMIZATION_REVIEW_MODEL
from authapp.auth import AuthenticatedRequest
@ -279,9 +279,11 @@ async def get_optimization_review(
async def optimization_review(
request: AuthenticatedRequest, data: OptimizationReviewSchema
) -> tuple[int, OptimizationReviewResponseSchema | OptimizationReviewErrorSchema]:
if not validate_trace_id(data.trace_id):
return 400, OptimizationReviewErrorSchema(error="Invalid trace ID. Please provide a valid UUIDv4.")
try:
response_code, output, llm_cost = await get_optimization_review(request, data)
except LLMOutputUnparseable as e:
except LLMOutputUnparseable:
return 422, OptimizationReviewErrorSchema(error="Invalid response")
if isinstance(output, OptimizationReviewResponseSchema):
review_event = output.review.value

View file

@ -383,6 +383,50 @@ def split_code_into_parts(code: str, python_version: tuple[int, int]) -> CodePar
return split_code_with_regex(code)
def repair_preamble(preamble: str, python_version: tuple[int, int]) -> str:
"""Attempt to fix a preamble with syntax errors by removing offending lines.
Iteratively removes lines that cause syntax errors. Handles multi-line
constructs (unclosed parens/brackets/strings) by removing contiguous
blocks of broken lines.
Args:
preamble: The preamble code with potential syntax errors
python_version: Tuple of (major, minor) Python version
Returns:
The repaired preamble (may be empty if nothing is salvageable)
"""
lines = preamble.splitlines(keepends=True)
max_attempts = min(len(lines), 15) # Safety limit
for _ in range(max_attempts):
current = "".join(lines).strip()
if not current:
return ""
try:
ast.parse(current, feature_version=python_version)
return current
except SyntaxError as e:
if e.lineno is None:
return ""
# Remove the offending line (1-based lineno -> 0-based index)
error_idx = e.lineno - 1
if 0 <= error_idx < len(lines):
lines.pop(error_idx)
else:
return ""
# If we exhausted attempts, return whatever is left
result = "".join(lines).strip()
try:
ast.parse(result, feature_version=python_version)
return result
except SyntaxError:
return ""
def validate_tests_individually(code: str, python_version: tuple[int, int]) -> tuple[str, int]:
"""Validate each test function individually and return only valid tests.
@ -408,13 +452,20 @@ def validate_tests_individually(code: str, python_version: tuple[int, int]) -> t
preamble_stripped = parts.preamble.strip()
# First, check if preamble itself is valid
# Check if preamble itself is valid; if not, try to repair it
if preamble_stripped:
try:
ast.parse(preamble_stripped, feature_version=python_version)
except SyntaxError as e:
logging.debug("Preamble has syntax error at line %s: %s", e.lineno, e.msg)
logging.warning("Preamble has syntax error at line %s: %s", e.lineno, e.msg)
logging.debug("Preamble content:\n%s", preamble_stripped[:2000])
repaired = repair_preamble(preamble_stripped, python_version)
if repaired != preamble_stripped:
logging.info(
"Repaired preamble: removed %d chars of broken code",
len(preamble_stripped) - len(repaired),
)
preamble_stripped = repaired
for i, test_code in enumerate(parts.test_functions):
# Combine preamble with this single test

View file

@ -5,6 +5,7 @@ from uuid import UUID, uuid4
from django.db.models import F
from django.db.models.functions import Coalesce
from aiservice.common_utils import normalize_trace_id
from core.log_features.models import OptimizationEvents, OptimizationFeatures, Repositories
@ -111,7 +112,10 @@ async def update_optimization_features_review(
review_explanation: str | None = None,
calling_fn_details: str | None = None,
) -> None:
trace_uuid = UUID(trace_id)
normalized = normalize_trace_id(trace_id)
if normalized is None:
return
trace_uuid = UUID(normalized)
# This avoids the race condition and is more performant.
await OptimizationFeatures.objects.filter(trace_id=trace_uuid).aupdate(
review_quality=review_quality, review_explanation=review_explanation, calling_fn_details=calling_fn_details

View file

@ -179,14 +179,14 @@ def merge_dicts(a: dict[str, dict[str, str]], b: dict[str, dict[str, str]]) -> d
return result
@features_api.post("/", response={200: None, 500: LoggingErrorResponseSchema})
@features_api.post("/", response={200: None, 400: LoggingErrorResponseSchema, 500: LoggingErrorResponseSchema})
async def log_features_cli(
request: AuthenticatedRequest, data: LoggingSchema
) -> int | tuple[int, LoggingErrorResponseSchema]:
try:
if hasattr(request, "should_log_features") and request.should_log_features:
if not validate_trace_id(data.trace_id):
raise ValueError("Invalid UUID")
return 400, LoggingErrorResponseSchema(error="Invalid trace ID. Please provide a valid UUIDv4.")
await log_features(
trace_id=data.trace_id,

View file

@ -26,6 +26,7 @@ from openai.types.chat import (
from aiservice.analytics.posthog import ph
from aiservice.common.markdown_utils import extract_code_block
from aiservice.common_utils import validate_trace_id
from aiservice.llm import llm_client
from aiservice.llm_models import HAIKU_MODEL
from authapp.auth import AuthenticatedRequest
@ -76,6 +77,9 @@ async def testgen_repair(
if data.language != "python":
return 400, TestRepairErrorSchema(error="Test repair is only supported for Python")
if not validate_trace_id(data.trace_id):
return 400, TestRepairErrorSchema(error="Invalid trace ID. Please provide a valid UUIDv4.")
ph(request.user, "aiservice-testgen-repair-called")
try:

View file

@ -26,6 +26,7 @@ from openai.types.chat import (
from aiservice.analytics.posthog import ph
from aiservice.common.markdown_utils import extract_code_block_with_context
from aiservice.common_utils import validate_trace_id
from aiservice.llm import llm_client
from aiservice.llm_models import HAIKU_MODEL
from authapp.auth import AuthenticatedRequest
@ -54,6 +55,9 @@ async def testgen_review(
if data.language != "python":
return 200, TestgenReviewResponseSchema(reviews=[])
if not validate_trace_id(data.trace_id):
return 400, TestgenReviewErrorSchema(error="Invalid trace ID. Please provide a valid UUIDv4.")
ph(request.user, "aiservice-testgen-review-called")
try:

View file

@ -4,6 +4,7 @@ import pytest
from core.languages.python.testgen.postprocessing.code_validator import (
CodeValidationError,
repair_preamble,
split_code_with_ast,
split_code_with_regex,
validate_testgen_code,
@ -649,3 +650,97 @@ async def test_third():
result = validate_testgen_code(code, python_version=(3, 11))
assert result == expected
class TestRepairPreamble:
"""Tests for preamble repair when LLM generates broken import/helper code."""
def test_repair_removes_single_broken_line(self) -> None:
preamble = """import pytest
x = @invalid
import os"""
result = repair_preamble(preamble, (3, 11))
assert "import pytest" in result
assert "import os" in result
assert "@invalid" not in result
def test_repair_returns_empty_for_all_broken(self) -> None:
preamble = "x = @\ny = %\nz = $"
result = repair_preamble(preamble, (3, 11))
assert result == ""
def test_repair_noop_for_valid_preamble(self) -> None:
preamble = "import pytest\nimport os"
result = repair_preamble(preamble, (3, 11))
assert "import pytest" in result
assert "import os" in result
def test_repair_handles_empty_string(self) -> None:
assert repair_preamble("", (3, 11)) == ""
def test_repair_removes_truncated_string(self) -> None:
"""LLMs sometimes produce unclosed strings in helper code."""
preamble = '''import pytest
EXPECTED = "hello world
import os'''
result = repair_preamble(preamble, (3, 11))
# The broken string line should be removed
assert "import pytest" in result
assert "hello world" not in result
def test_repair_removes_incomplete_function(self) -> None:
preamble = """import pytest
def helper():
x = 1
def broken_helper(
# missing closing paren"""
result = repair_preamble(preamble, (3, 11))
assert "import pytest" in result
assert "def helper" in result
class TestBrokenPreambleValidation:
"""End-to-end tests: broken preamble should not discard valid tests."""
def test_broken_preamble_keeps_valid_tests(self) -> None:
"""When preamble has a syntax error, valid tests should still be kept."""
code = """import pytest
x = @invalid_syntax
def test_one():
assert 1 == 1
def test_two():
assert 2 == 2"""
result = validate_testgen_code(code, python_version=(3, 11))
assert "def test_one" in result
assert "def test_two" in result
def test_broken_preamble_with_truncated_string(self) -> None:
"""Truncated string in preamble should not kill all tests."""
code = '''import pytest
EXPECTED = "unterminated
def test_basic():
assert True'''
result = validate_testgen_code(code, python_version=(3, 11))
assert "def test_basic" in result
def test_broken_preamble_mixed_valid_invalid_tests(self) -> None:
"""Broken preamble + some broken tests should keep only the valid tests."""
code = """import pytest
x = @bad
def test_valid():
assert True
def test_broken():
y = @also_bad"""
result = validate_testgen_code(code, python_version=(3, 11))
assert "def test_valid" in result
assert "test_broken" not in result

View file

@ -307,18 +307,17 @@ export async function createPr(req: Request, res: Response) {
return
}
const nickname: string | null = await dependencies.userNickname(userId)
// Fetch user nickname and installation octokit in parallel (independent calls)
const [nickname, installationOctokit] = await Promise.all([
dependencies.userNickname(userId),
dependencies.getInstallationOctokitByOwner(dependencies.githubApp, owner, repo, userId),
])
if (nickname == null) {
res.status(401).json({ error: "Unauthorized" })
return
}
const installationOctokit = await dependencies.getInstallationOctokitByOwner(
dependencies.githubApp,
owner,
repo,
userId,
)
if (installationOctokit instanceof Error) {
res.status(401).json({ error: installationOctokit.message })
return
@ -509,7 +508,12 @@ export async function createPr(req: Request, res: Response) {
if (traceId) {
logger.info(`PR creation failed, falling back to staging for traceId: ${traceId}`, req)
try {
const stagingResult = await saveStagingReview(req.body, userId, organizationId, (req as any).subscriptionInfo)
const stagingResult = await saveStagingReview(
req.body,
userId,
organizationId,
(req as any).subscriptionInfo,
)
if (stagingResult.status === 200) {
return res.status(200).json({
message: "PR creation failed, staging created as fallback",
@ -521,7 +525,7 @@ export async function createPr(req: Request, res: Response) {
`Staging fallback returned status ${stagingResult.status}`,
req,
{ reqBody: req.body, userId, traceId, stagingResult },
new Error(`Staging fallback returned status ${stagingResult.status}`)
new Error(`Staging fallback returned status ${stagingResult.status}`),
)
return res.status(stagingResult.status).json({
message: "PR creation failed and staging fallback also failed",
@ -532,7 +536,7 @@ export async function createPr(req: Request, res: Response) {
`Staging fallback threw an exception:`,
req,
{ reqBody: req.body, userId, traceId },
stagingError as Error
stagingError as Error,
)
return res.status(500).json({
message: "PR creation failed and staging fallback threw an error",
@ -693,133 +697,181 @@ export async function triggerCreatePr(
owner,
repo,
})
try {
// Check existing data first (preserve staging data)
const existing = await triggerCreatePrDeps.prisma.optimization_events.findUnique({
where: { trace_id: traceId },
select: {
function_name: true,
speedup_x: true,
file_path: true,
speedup_pct: true,
staging_storage_type: true,
metadata: true,
},
})
const updateData: any = {
pr_id: String(newPrData.data.id),
pr_url: `https://github.com/${owner}/${repo}/pull/${newPrData.data.number}`,
is_optimization_found: true,
event_type: "pr_created",
}
// Check if we should clean up plain text data (user is paid OR org has subscription)
let shouldCleanupData = isPaidUser
if (!shouldCleanupData && organizationId && traceId) {
// Check if org has subscription
const org = await triggerCreatePrDeps.prisma.organizations.findUnique({
where: { id: organizationId },
select: { subscription: true },
// Run post-PR-creation tasks in parallel:
// 1. DB optimization_events update (non-fatal errors caught internally)
// 2. GitHub API calls: assign reviewer + add labels (run in parallel with each other)
// 3. DB optimization_features update
const updateOptimizationEventsTask = (async () => {
try {
// Check existing data first (preserve staging data)
const existing = await triggerCreatePrDeps.prisma.optimization_events.findUnique({
where: { trace_id: traceId },
select: {
function_name: true,
speedup_x: true,
file_path: true,
speedup_pct: true,
staging_storage_type: true,
metadata: true,
},
})
if (org?.subscription) {
shouldCleanupData = true
console.log(
`[triggerCreatePr] Org has subscription - will cleanup plain text data for traceId: ${traceId}`,
)
const updateData: any = {
pr_id: String(newPrData.data.id),
pr_url: `https://github.com/${owner}/${repo}/pull/${newPrData.data.number}`,
is_optimization_found: true,
event_type: "pr_created",
}
}
// If user is paid or org has subscription, convert to git_branch storage and clear diffContents
if (shouldCleanupData && traceId) {
if (existing) {
const currentMetadata = (existing.metadata ?? {}) as Record<string, unknown>
// Remove diffContents from metadata if it exists (plain_text mode stores it there)
if (currentMetadata.diffContents) {
delete currentMetadata.diffContents
// Check if we should clean up plain text data (user is paid OR org has subscription)
let shouldCleanupData = isPaidUser
if (!shouldCleanupData && organizationId && traceId) {
// Check if org has subscription
const org = await triggerCreatePrDeps.prisma.organizations.findUnique({
where: { id: organizationId },
select: { subscription: true },
})
if (org?.subscription) {
shouldCleanupData = true
console.log(
`[triggerCreatePr] Org has subscription - will cleanup plain text data for traceId: ${traceId}`,
)
}
}
// Update metadata with the new staging branch name
currentMetadata.staging_branch_name = newBranchName
currentMetadata.storageType = "git_branch"
// If user is paid or org has subscription, convert to git_branch storage and clear diffContents
if (shouldCleanupData && traceId) {
if (existing) {
const currentMetadata = (existing.metadata ?? {}) as Record<string, unknown>
// Add line profiler data if provided and not already present
// Remove diffContents from metadata if it exists (plain_text mode stores it there)
if (currentMetadata.diffContents) {
delete currentMetadata.diffContents
}
// Update metadata with the new staging branch name
currentMetadata.staging_branch_name = newBranchName
currentMetadata.storageType = "git_branch"
// Add line profiler data if provided and not already present
addLineProfilerToMetadata(currentMetadata, originalLineProfiler, optimizedLineProfiler)
updateData.staging_storage_type = "git_branch"
updateData.metadata = currentMetadata
updateData.is_staging = true
console.log(
`[triggerCreatePr] Paid user/subscribed org: Converting storage to git_branch for traceId: ${traceId}`,
)
}
} else if (traceId && (originalLineProfiler || optimizedLineProfiler)) {
// For non-paid users, still add line profiler data if provided
const currentMetadata = (existing?.metadata ?? {}) as Record<string, unknown>
addLineProfilerToMetadata(currentMetadata, originalLineProfiler, optimizedLineProfiler)
updateData.staging_storage_type = "git_branch"
updateData.metadata = currentMetadata
updateData.is_staging = true
console.log(
`[triggerCreatePr] Paid user/subscribed org: Converting storage to git_branch for traceId: ${traceId}`,
)
}
} else if (traceId && (originalLineProfiler || optimizedLineProfiler)) {
// For non-paid users, still add line profiler data if provided
const currentMetadata = (existing?.metadata ?? {}) as Record<string, unknown>
addLineProfilerToMetadata(currentMetadata, originalLineProfiler, optimizedLineProfiler)
updateData.metadata = currentMetadata
}
// Only add if missing (preserve staging data)
if (prCommentFields) {
if (!existing?.function_name && prCommentFields.function_name) {
updateData.function_name = prCommentFields.function_name
// Only add if missing (preserve staging data)
if (prCommentFields) {
if (!existing?.function_name && prCommentFields.function_name) {
updateData.function_name = prCommentFields.function_name
}
if (!existing?.file_path && prCommentFields.file_path) {
updateData.file_path = prCommentFields.file_path
}
if (existing?.speedup_x == null && prCommentFields.speedup_x) {
updateData.speedup_x = parseSpeedupValue(prCommentFields.speedup_x, "x")
}
if (existing?.speedup_pct == null && prCommentFields.speedup_pct) {
updateData.speedup_pct = parseSpeedupValue(prCommentFields.speedup_pct, "%")
}
}
if (!existing?.file_path && prCommentFields.file_path) {
updateData.file_path = prCommentFields.file_path
}
if (existing?.speedup_x == null && prCommentFields.speedup_x) {
updateData.speedup_x = parseSpeedupValue(prCommentFields.speedup_x, "x")
}
if (existing?.speedup_pct == null && prCommentFields.speedup_pct) {
updateData.speedup_pct = parseSpeedupValue(prCommentFields.speedup_pct, "%")
}
}
await triggerCreatePrDeps.prisma.optimization_events.update({
where: { trace_id: traceId },
data: updateData,
})
} catch (eventError) {
logger.error(
"Failed to update optimization event:",
{
userId,
endpoint: "/cfapi/create-pr",
operation: "update_optimization_event",
owner,
repo,
},
{},
eventError as Error,
)
}
await triggerCreatePrDeps.prisma.optimization_events.update({
where: { trace_id: traceId },
data: updateData,
})
} catch (eventError) {
logger.error(
"Failed to update optimization event:",
{
userId,
endpoint: "/cfapi/create-pr",
operation: "update_optimization_event",
owner,
repo,
},
{},
eventError as Error,
)
}
})()
await triggerCreatePrDeps.assignReviewer(
installationOctokit,
owner,
repo,
newPrData.data.number,
nickname,
)
await triggerCreatePrDeps.addLabelToPullRequest(
installationOctokit,
owner,
repo,
newPrData.data.number,
)
if (optimizationReview) {
await triggerCreatePrDeps.addLabelToPullRequest(
// Run reviewer assignment and label additions in parallel
const githubPostPrTasks: Promise<void>[] = [
triggerCreatePrDeps.assignReviewer(
installationOctokit,
owner,
repo,
newPrData.data.number,
`🎯 Quality: ${optimizationReview.charAt(0).toUpperCase() + optimizationReview.slice(1).toLowerCase()}`,
"FFC043",
"Optimization Quality according to Codeflash",
nickname,
),
triggerCreatePrDeps.addLabelToPullRequest(
installationOctokit,
owner,
repo,
newPrData.data.number,
),
]
if (optimizationReview) {
githubPostPrTasks.push(
triggerCreatePrDeps.addLabelToPullRequest(
installationOctokit,
owner,
repo,
newPrData.data.number,
`🎯 Quality: ${optimizationReview.charAt(0).toUpperCase() + optimizationReview.slice(1).toLowerCase()}`,
"FFC043",
"Optimization Quality according to Codeflash",
),
)
}
const updateOptimizationFeaturesTask = (async () => {
if (traceId !== "") {
let pull_request_db = await triggerCreatePrDeps.prisma.optimization_features.findUnique({
where: {
trace_id: traceId,
},
select: {
pull_request: true,
},
})
if (pull_request_db) {
if (pull_request_db.pull_request === null || pull_request_db.pull_request === undefined) {
pull_request_db.pull_request = {}
}
;(pull_request_db.pull_request as any).new_pr_url = newPrData.data.html_url
await triggerCreatePrDeps.prisma.optimization_features.update({
where: {
trace_id: traceId,
},
data: {
pull_request: pull_request_db.pull_request,
},
})
}
}
})()
// Wait for all post-PR tasks in parallel
await Promise.all([
updateOptimizationEventsTask,
Promise.all(githubPostPrTasks),
updateOptimizationFeaturesTask,
])
logger.info(`Created new PR #${newPrData.data.number} with branch ${newPrData.data.head.ref}`, {
userId,
endpoint: "/cfapi/create-pr",
@ -839,34 +891,6 @@ export async function triggerCreatePr(
},
})
if (traceId !== "") {
let pull_request_db = await triggerCreatePrDeps.prisma.optimization_features.findUnique({
where: {
trace_id: traceId,
},
select: {
pull_request: true,
},
})
if (pull_request_db) {
if (pull_request_db.pull_request === null || pull_request_db.pull_request === undefined) {
pull_request_db.pull_request = {}
}
;(pull_request_db.pull_request as any).new_pr_url = newPrData.data.html_url
await triggerCreatePrDeps.prisma.optimization_features.update({
where: {
trace_id: traceId,
},
data: {
pull_request: pull_request_db.pull_request,
},
})
}
}
return newPrData.data.number
} catch (error) {
logger.errorWithSentry(

View file

@ -785,6 +785,7 @@ export function OptimizationsTable({
src={`https://github.com/${event.repository.full_name.split("/")[0]}.png`}
alt={event.repository.full_name}
fill
sizes="32px"
className="rounded-full object-cover"
onError={e => {
e.currentTarget.style.display = "none"

View file

@ -36,27 +36,39 @@ const STORAGE_KEY = "privacyMode"
interface PrivacyModeProviderProps {
children: React.ReactNode
userId?: string
initialPrivacyMode?: boolean
initialCanUsePrivacyMode?: boolean
}
export function PrivacyModeProvider({ children, userId }: PrivacyModeProviderProps) {
const [isPrivacyMode, setIsPrivacyMode] = useState<boolean>(false)
const [isLoading, setIsLoading] = useState(true)
const [canUsePrivacyMode, setCanUsePrivacyMode] = useState(false)
export function PrivacyModeProvider({
children,
userId,
initialPrivacyMode,
initialCanUsePrivacyMode,
}: PrivacyModeProviderProps) {
const hasInitialData = initialPrivacyMode !== undefined && initialCanUsePrivacyMode !== undefined
const [isPrivacyMode, setIsPrivacyMode] = useState<boolean>(
hasInitialData ? (initialCanUsePrivacyMode ? initialPrivacyMode : false) : false,
)
const [isLoading, setIsLoading] = useState(!hasInitialData)
const [canUsePrivacyMode, setCanUsePrivacyMode] = useState(initialCanUsePrivacyMode ?? false)
const { mode, currentOrg } = useViewMode()
const isOrgMode = mode === "organization" && !!currentOrg
const isOrgAdmin = isOrgMode && (currentOrg?.role === "admin" || currentOrg?.role === "owner")
// Can toggle: personal mode (if paid) OR org mode (if admin and org has subscription)
const canTogglePrivacyMode = isOrgMode ? (isOrgAdmin && canUsePrivacyMode) : canUsePrivacyMode
const canTogglePrivacyMode = isOrgMode ? isOrgAdmin && canUsePrivacyMode : canUsePrivacyMode
// Track whether we've used initial data for the first personal-mode render
const usedInitialDataRef = React.useRef(false)
// Load saved preference from database
useEffect(() => {
const loadPrivacyMode = async () => {
setIsLoading(true)
if (isOrgMode && currentOrg?.id) {
// Organization mode - fetch org privacy settings
// Organization mode - always fetch org-specific privacy settings
setIsLoading(true)
try {
const [orgPrivacyMode, orgCanUse] = await Promise.all([
getOrgPrivacyMode(currentOrg.id),
@ -69,8 +81,22 @@ export function PrivacyModeProvider({ children, userId }: PrivacyModeProviderPro
setIsPrivacyMode(false)
setCanUsePrivacyMode(false)
}
setIsLoading(false)
} else if (userId) {
// Personal mode - fetch user privacy settings
// Personal mode - skip fetch if initial data was provided by the server
if (hasInitialData && !usedInitialDataRef.current) {
usedInitialDataRef.current = true
// Sync localStorage with server-provided value
try {
const effectivePrivacyMode = initialCanUsePrivacyMode ? initialPrivacyMode : false
localStorage.setItem(STORAGE_KEY, String(effectivePrivacyMode))
} catch {
// localStorage not available
}
return
}
setIsLoading(true)
// First, load from localStorage for instant UI
try {
const saved = localStorage.getItem(STORAGE_KEY)
@ -98,13 +124,19 @@ export function PrivacyModeProvider({ children, userId }: PrivacyModeProviderPro
} catch (error) {
console.error("Error fetching privacy mode from database:", error)
}
setIsLoading(false)
}
setIsLoading(false)
}
loadPrivacyMode()
}, [userId, isOrgMode, currentOrg?.id])
}, [
userId,
isOrgMode,
currentOrg?.id,
hasInitialData,
initialPrivacyMode,
initialCanUsePrivacyMode,
])
const updatePrivacyModeValue = useCallback(
async (newValue: boolean) => {

View file

@ -42,17 +42,19 @@ interface Organization {
export function ViewModeProvider({
children,
user,
initialOrganizations,
}: {
children: React.ReactNode
user?: UserProfile
initialOrganizations?: Organization[]
}) {
const router = useRouter()
const [mode, setMode] = useState<ViewMode>("personal")
const [loading, setIsLoading] = useState<boolean>(true)
const [orgs, setOrgs] = useState<Organization[]>([])
const [loading, setIsLoading] = useState<boolean>(!initialOrganizations)
const [orgs, setOrgs] = useState<Organization[]>(initialOrganizations ?? [])
const [currentOrg, setCurrentOrg] = useState<Organization | null>(null)
const fetchingRef = useRef(false)
const orgsRef = useRef<Organization[]>([])
const orgsRef = useRef<Organization[]>(initialOrganizations ?? [])
const setLocalStorageMode = useCallback((newMode: ViewMode, orgId?: string) => {
localStorage.setItem("viewMode", newMode)
@ -99,6 +101,25 @@ export function ViewModeProvider({
setIsLoading(false)
return
}
const restoreViewMode = (fetchedOrgs: Organization[]) => {
const savedMode = localStorage.getItem("viewMode") as ViewMode
const savedOrgId = localStorage.getItem("currentOrganizationId")
if (savedMode === "organization" && savedOrgId) {
switchToMode("organization", savedOrgId, fetchedOrgs)
} else {
switchToMode("personal", undefined, fetchedOrgs)
}
}
// If initial data was provided by the server, skip the client-side fetch
if (initialOrganizations) {
restoreViewMode(initialOrganizations)
setIsLoading(false)
return
}
if (fetchingRef.current) return
fetchingRef.current = true
@ -108,15 +129,7 @@ export function ViewModeProvider({
if (result.success && result.organizations) {
setOrgs(result.organizations)
orgsRef.current = result.organizations
const savedMode = localStorage.getItem("viewMode") as ViewMode
const savedOrgId = localStorage.getItem("currentOrganizationId")
if (savedMode === "organization" && savedOrgId) {
switchToMode("organization", savedOrgId, result.organizations)
} else {
switchToMode("personal", undefined, result.organizations)
}
restoreViewMode(result.organizations)
}
})
.catch(error => console.error("Error fetching organizations:", error))
@ -124,7 +137,7 @@ export function ViewModeProvider({
setIsLoading(false)
fetchingRef.current = false
})
}, [user?.sub, switchToMode])
}, [user?.sub, switchToMode, initialOrganizations])
const contextValue = useMemo(
() => ({ mode, orgs, loading, currentOrg, switchToMode }),

View file

@ -0,0 +1,97 @@
"use server"
import {
getUserPrivacyMode,
isUserPaid,
checkAndResetSubscriptionPeriod,
prisma,
} from "@codeflash-ai/common"
interface Organization {
id: string
name: string
role: "admin" | "owner" | "member"
avatarUrl?: string
}
export interface DashboardInitData {
organizations: Organization[]
privacyMode: boolean
canUsePrivacyMode: boolean
subscription: {
optimizations_used: number
optimizations_limit: number
} | null
}
/**
* Fetches all dashboard initialization data in a single server action,
* running all independent queries in parallel. This replaces the sequential
* client-side waterfall of getUserOrganizations + checkCanUsePrivacyMode +
* getPrivacyMode + getCurrentUserSubscriptionData.
*/
export async function getDashboardInitData(userId: string): Promise<DashboardInitData> {
const [orgsResult, privacyMode, canUsePrivacyMode, subscription] = await Promise.all([
// Fetch organizations
prisma.organizations
.findMany({
where: {
organization_members: {
some: { user_id: userId },
},
},
include: {
organization_members: {
where: { user_id: userId },
select: { role: true },
},
},
orderBy: { created_at: "desc" },
})
.then(orgs =>
orgs.map(org => ({
id: org.id,
name: org.name,
role: org.organization_members[0].role as "admin" | "owner" | "member",
avatarUrl: `https://github.com/${org.name}.png`,
})),
)
.catch(error => {
console.error("Failed to get user organizations:", error)
return [] as Organization[]
}),
// Fetch privacy mode
getUserPrivacyMode(userId).catch(error => {
console.error("Error getting privacy mode:", error)
return false
}),
// Check if user can use privacy mode (is paid)
isUserPaid(userId).catch(error => {
console.error("Error checking if user can use privacy mode:", error)
return false
}),
// Fetch subscription data
checkAndResetSubscriptionPeriod(userId)
.then(sub => {
if (!sub) return null
return {
optimizations_used: sub.optimizations_used || 0,
optimizations_limit: sub.optimizations_limit || 0,
}
})
.catch(error => {
console.error("Failed to fetch subscription data:", error)
return null
}),
])
return {
organizations: orgsResult,
privacyMode,
canUsePrivacyMode,
subscription,
}
}

View file

@ -425,7 +425,30 @@ export async function getOptimizationPRs(
? `AND oe.repository_id = '${String(repositoryId).replace(/'/g, "''")}'`
: ""
const whereClause = `
// Separate WHERE clauses: the count query uses EXISTS to avoid joining the
// large optimization_features table when oe.pr_url already satisfies the
// "has a PR" condition. The data query still LEFT JOINs to pull fallback
// fields but only for the small LIMIT'd result set.
const prCondition = `
AND oe.is_optimization_found = true
AND (
oe.pr_url IS NOT NULL
OR EXISTS (
SELECT 1 FROM optimization_features of2
WHERE of2.trace_id = oe.trace_id
AND of2.pull_request IS NOT NULL
)
)
`
const countWhereClause = `
${accountCondition}
${eventTypeCondition}
${repositoryCondition}
${prCondition}
`
const dataWhereClause = `
${accountCondition}
${eventTypeCondition}
${repositoryCondition}
@ -438,7 +461,9 @@ export async function getOptimizationPRs(
const offset = (page - 1) * pageSize
// Optimized query - simplified COALESCE chains and extracted common speedup parsing logic
// Run data + count queries in parallel.
// Count uses EXISTS (no JOIN on optimization_features).
// Data query JOINs optimization_features only for the LIMIT'd rows.
const [events, countRows] = await Promise.all([
prisma.$queryRawUnsafe<
Array<{
@ -514,7 +539,7 @@ export async function getOptimizationPRs(
FROM optimization_events oe
LEFT JOIN optimization_features of ON oe.trace_id = of.trace_id
LEFT JOIN repositories r ON oe.repository_id = r.id
WHERE ${whereClause}
WHERE ${dataWhereClause}
ORDER BY oe.created_at DESC
LIMIT ${pageSize} OFFSET ${offset}
`,
@ -523,8 +548,7 @@ export async function getOptimizationPRs(
`
SELECT COUNT(*)::bigint AS count
FROM optimization_events oe
LEFT JOIN optimization_features of ON oe.trace_id = of.trace_id
WHERE ${whereClause}
WHERE ${countWhereClause}
`,
),
])

View file

@ -14,6 +14,7 @@ import PostHogPageView from "./PostHogPageView"
import { ViewModeProvider } from "./app/ViewModeContext"
import { PrivacyModeProvider } from "./app/PrivacyModeContext"
import { ConditionalLayout } from "@/components/conditional-layout"
import { getDashboardInitData } from "./app/init-data-action"
const fontSans = FontSans({
subsets: ["latin"],
@ -39,6 +40,11 @@ export default async function RootLayout({
children: React.ReactNode
}): Promise<JSX.Element> {
const session = await auth0.getSession()
// Fetch all dashboard init data in parallel on the server to avoid
// sequential client-side waterfalls (orgs, privacy, subscription).
const initData = session?.user?.sub ? await getDashboardInitData(session.user.sub) : null
let intercomSnippet: string = `var APP_ID = "ljxo1nzr";
(function(){var w=window;var ic=w.Intercom;if(typeof ic==="function"){ic('reattach_activator');ic('update',w.intercomSettings);}else{var d=document;var i=function(){i.c(arguments);};i.q=[];i.c=function(args){i.q.push(args);};w.Intercom=i;var l=function(){var s=d.createElement('script');s.type='text/javascript';s.async=true;s.src='https://widget.intercom.io/widget/' + APP_ID;var x=d.getElementsByTagName('script')[0];x.parentNode.insertBefore(s, x);};if(document.readyState==='complete'){l();}else if(w.attachEvent){w.attachEvent('onload',l);}else{w.addEventListener('load',l,false);}}})();
`
@ -97,9 +103,18 @@ export default async function RootLayout({
enableSystem
disableTransitionOnChange
>
<ViewModeProvider user={session?.user}>
<PrivacyModeProvider userId={session?.user?.sub}>
<ConditionalLayout user={session?.user}>{children}</ConditionalLayout>
<ViewModeProvider user={session?.user} initialOrganizations={initData?.organizations}>
<PrivacyModeProvider
userId={session?.user?.sub}
initialPrivacyMode={initData?.privacyMode}
initialCanUsePrivacyMode={initData?.canUsePrivacyMode}
>
<ConditionalLayout
user={session?.user}
initialSubscription={initData?.subscription}
>
{children}
</ConditionalLayout>
</PrivacyModeProvider>
</ViewModeProvider>
<Toaster />

View file

@ -14,9 +14,14 @@ const HIDDEN_PAGES = ["/onboarding", "/codeflash/auth", "/login", "/codeflash/au
export function ConditionalLayout({
children,
user,
initialSubscription,
}: {
children: React.ReactNode
user?: User | null
initialSubscription?: {
optimizations_used: number
optimizations_limit: number
} | null
}) {
const pathname = usePathname()
const [isAnnouncementVisible, setIsAnnouncementVisible] = useState(true)
@ -101,6 +106,7 @@ export function ConditionalLayout({
<Sidebar
className="h-full border-r border-border/30 flex-shrink-0 w-60 bg-background"
user={user || undefined}
initialSubscription={initialSubscription}
/>
<main className="flex-1 flex flex-col gap-5 w-full max-w-none mx-auto h-full overflow-y-auto p-4">
<div className="hidden md:block sticky top-0 z-20 bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60 border-b border-border/40">

View file

@ -351,7 +351,7 @@ export const OptimizationPRsTable: React.FC<OptimizationPRsTableProps> = memo(
return (
<div
className={`bg-card rounded-xl border border-border p-4 sm:p-5 hover:shadow-md transition-all duration-300 hover:border-primary/20 min-h-[420px] ${className || ""}`}
className={`bg-card rounded-xl border border-border p-4 sm:p-5 hover:shadow-md transition-all duration-300 hover:border-primary/20 ${className || ""}`}
>
{/* Header */}
<div className="flex flex-col sm:flex-row sm:items-center sm:justify-between gap-3 mb-4">
@ -447,6 +447,7 @@ export const OptimizationPRsTable: React.FC<OptimizationPRsTableProps> = memo(
src={`https://github.com/${event.repository.full_name.split("/")[0]}.png`}
alt={event.repository.full_name}
fill
sizes="32px"
className="rounded-full object-cover"
onError={e => {
e.currentTarget.style.display = "none"

View file

@ -45,9 +45,19 @@ interface SidebarProps {
user?: UserProfile
isLoading?: boolean
error?: Error | null
initialSubscription?: {
optimizations_used: number
optimizations_limit: number
} | null
}
export function Sidebar({ className, user, isLoading, error }: SidebarProps): JSX.Element {
export function Sidebar({
className,
user,
isLoading,
error,
initialSubscription,
}: SidebarProps): JSX.Element {
const currentRoute = usePathname()
const [isDarkMode, setIsDarkMode] = useState(false)
const [isDropdownOpen, setIsDropdownOpen] = useState(false)
@ -66,7 +76,7 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
const [subscription, setSubscription] = useState<{
optimizations_used: number
optimizations_limit: number
} | null>(null)
} | null>(initialSubscription ?? null)
const subscriptionFetchRef = useRef(false)
const onMobileClose = () => {
@ -99,11 +109,21 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
return () => document.removeEventListener("mousedown", handleClickOutside)
}, [])
const usedInitialSubscriptionRef = useRef(false)
useEffect(() => {
if (mode !== "personal") {
setSubscription(null)
return
}
// Skip fetch if initial data was provided by the server (first render only)
if (initialSubscription !== undefined && !usedInitialSubscriptionRef.current) {
usedInitialSubscriptionRef.current = true
setSubscription(initialSubscription)
return
}
if (subscriptionFetchRef.current) return
subscriptionFetchRef.current = true
@ -128,7 +148,7 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
cancelled = true
subscriptionFetchRef.current = false
}
}, [mode])
}, [mode, initialSubscription])
const toggleTheme = () => {
const newMode = !isDarkMode

View file

@ -226,6 +226,9 @@ model optimization_events {
@@index([repository_id, user_id])
@@index([api_key_id])
@@index([is_staging])
// Covers the optimization-prs query: filter by repo + optimization found + event type, sort by created_at DESC
@@index([repository_id, is_optimization_found, event_type, created_at(sort: Desc)])
@@index([is_optimization_found, event_type, created_at(sort: Desc)])
}
model comments {