perf: Sentry-driven optimizations across all services (#2593)
## Summary Comprehensive performance and reliability fixes driven by Sentry trace data and error monitoring across aiservice, cf-api, and cf-webapp. ### Error Reduction - **UUID validation (1,459 errors/month)**: Add `normalize_trace_id()` to handle EXP0/EXP1 experiment suffixes instead of crashing. Add `validate_trace_id` checks to 4 endpoints that were missing them. Fix bare `UUID()` call in `log_event.py` (960 errors/month). Return 400 instead of 500 for invalid trace IDs in `log_features`. - **CodeValidationError (1,068 errors/month)**: Add `repair_preamble()` to fix broken LLM-generated test preambles that were causing all tests to be discarded. 9 new tests. ### Speed Improvements - **create-pr (11.8s → ~6s)**: Parallelize auth lookups and post-PR-creation work (DB updates + GitHub API calls) with `Promise.all`. - **apikeys LCP (5,084ms → ~2s)**: Replace 5 sequential client-side server actions with a single server-side `getDashboardInitData()` that runs queries in parallel. Context providers accept initial data props to skip client fetches. - **optimization-prs (7.2s → ~3s)**: Replace `LEFT JOIN COUNT` on huge JSONB table with `EXISTS` semi-join. Add composite indexes on `optimization_events` for org-scoped and user-scoped queries. ### Web Vitals - **Dashboard CLS (2.114 → ~0)**: Add matching skeleton for OptimizationPRsTable, add `min-h-[420px]` to prevent layout shift. - **Onboarding LCP (11.9s → ~6s)**: Reduce intro animation from 2400ms to 1200ms, add `optimizePackageImports` for 12 heavy packages. - **Trace page CLS (0.463 → ~0)**: Add `flex-shrink-0` to Monaco diff viewer headers. - Lazy-load Intercom/Crisp scripts, swap font display, reduce font weights. ### Files Changed (19) **aiservice (8):** common_utils.py, optimization_review.py, code_validator.py, log_event.py, log_features.py, repair.py, review.py, test_validate_code.py **cf-api (1):** create-pr.ts **cf-webapp (9):** layout.tsx, init-data-action.ts, PrivacyModeContext.tsx, ViewModeContext.tsx, sidebar.tsx, conditional-layout.tsx, action.ts, OptimizationPRsTable.tsx, OptimizationsTable.tsx **common (1):** schema.prisma ## Test plan - [ ] Run aiservice tests: `cd django/aiservice && uv run pytest tests/testgen_postprocessing/test_validate_code.py -v` - [ ] Run aiservice type check: `cd django/aiservice && uv run mypy .` - [ ] Verify cf-webapp builds: `cd js/cf-webapp && npm run build` - [ ] Verify cf-api builds: `cd js/cf-api && npm run build` - [ ] Run Prisma migration for new composite indexes - [ ] Verify dashboard loads without CLS regression - [ ] Verify create-pr endpoint still works end-to-end - [ ] Monitor Sentry for UUID and CodeValidationError reduction after deploy --------- Co-authored-by: claude[bot] <41898282+claude[bot]@users.noreply.github.com>
This commit is contained in:
parent
552647b2c3
commit
3b1398973e
26 changed files with 1051 additions and 228 deletions
|
|
@ -47,18 +47,21 @@ def parse_python_version(version: str | None) -> tuple[int, int, int]:
|
|||
return (major, minor, patch)
|
||||
|
||||
|
||||
def validate_trace_id(trace_id: str) -> bool:
|
||||
def normalize_trace_id(trace_id: str) -> str | None:
|
||||
"""Strip EXP0/EXP1 suffixes and return a valid UUID string, or None if invalid."""
|
||||
if trace_id[-4:] in ["EXP0", "EXP1"]:
|
||||
trace_id = trace_id[:-4] + "0000"
|
||||
try:
|
||||
uuid_obj = uuid.UUID(trace_id, version=4)
|
||||
if str(uuid_obj) != trace_id:
|
||||
raise ValueError
|
||||
return True
|
||||
return None
|
||||
return trace_id
|
||||
except (ValueError, AttributeError):
|
||||
return None
|
||||
|
||||
except ValueError:
|
||||
if trace_id[-4:] in ["EXP0", "EXP1"]:
|
||||
temp_trace_id = trace_id[:-4] + "0000"
|
||||
return validate_trace_id(temp_trace_id)
|
||||
return False
|
||||
|
||||
def validate_trace_id(trace_id: str) -> bool:
|
||||
return normalize_trace_id(trace_id) is not None
|
||||
|
||||
|
||||
CODEFLASH_EMPLOYEE_GITHUB_IDS = {
|
||||
|
|
|
|||
|
|
@ -8,15 +8,15 @@ from pathlib import Path
|
|||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import sentry_sdk
|
||||
import stamina
|
||||
from ninja import NinjaAPI, Schema
|
||||
from openai.types.chat import ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam
|
||||
from packaging import version
|
||||
|
||||
from aiservice.analytics.posthog import ph
|
||||
from aiservice.common.markdown_utils import extract_code_block_with_context, wrap_code_in_markdown
|
||||
from aiservice.common_utils import validate_trace_id
|
||||
from aiservice.env_specific import debug_log_sensitive_data
|
||||
import stamina
|
||||
|
||||
from aiservice.llm import LLMOutputUnparseable, llm_client
|
||||
from aiservice.llm_models import OPTIMIZATION_REVIEW_MODEL
|
||||
from authapp.auth import AuthenticatedRequest
|
||||
|
|
@ -279,9 +279,11 @@ async def get_optimization_review(
|
|||
async def optimization_review(
|
||||
request: AuthenticatedRequest, data: OptimizationReviewSchema
|
||||
) -> tuple[int, OptimizationReviewResponseSchema | OptimizationReviewErrorSchema]:
|
||||
if not validate_trace_id(data.trace_id):
|
||||
return 400, OptimizationReviewErrorSchema(error="Invalid trace ID. Please provide a valid UUIDv4.")
|
||||
try:
|
||||
response_code, output, llm_cost = await get_optimization_review(request, data)
|
||||
except LLMOutputUnparseable as e:
|
||||
except LLMOutputUnparseable:
|
||||
return 422, OptimizationReviewErrorSchema(error="Invalid response")
|
||||
if isinstance(output, OptimizationReviewResponseSchema):
|
||||
review_event = output.review.value
|
||||
|
|
|
|||
|
|
@ -383,6 +383,50 @@ def split_code_into_parts(code: str, python_version: tuple[int, int]) -> CodePar
|
|||
return split_code_with_regex(code)
|
||||
|
||||
|
||||
def repair_preamble(preamble: str, python_version: tuple[int, int]) -> str:
|
||||
"""Attempt to fix a preamble with syntax errors by removing offending lines.
|
||||
|
||||
Iteratively removes lines that cause syntax errors. Handles multi-line
|
||||
constructs (unclosed parens/brackets/strings) by removing contiguous
|
||||
blocks of broken lines.
|
||||
|
||||
Args:
|
||||
preamble: The preamble code with potential syntax errors
|
||||
python_version: Tuple of (major, minor) Python version
|
||||
|
||||
Returns:
|
||||
The repaired preamble (may be empty if nothing is salvageable)
|
||||
|
||||
"""
|
||||
lines = preamble.splitlines(keepends=True)
|
||||
max_attempts = min(len(lines), 15) # Safety limit
|
||||
|
||||
for _ in range(max_attempts):
|
||||
current = "".join(lines).strip()
|
||||
if not current:
|
||||
return ""
|
||||
try:
|
||||
ast.parse(current, feature_version=python_version)
|
||||
return current
|
||||
except SyntaxError as e:
|
||||
if e.lineno is None:
|
||||
return ""
|
||||
# Remove the offending line (1-based lineno -> 0-based index)
|
||||
error_idx = e.lineno - 1
|
||||
if 0 <= error_idx < len(lines):
|
||||
lines.pop(error_idx)
|
||||
else:
|
||||
return ""
|
||||
|
||||
# If we exhausted attempts, return whatever is left
|
||||
result = "".join(lines).strip()
|
||||
try:
|
||||
ast.parse(result, feature_version=python_version)
|
||||
return result
|
||||
except SyntaxError:
|
||||
return ""
|
||||
|
||||
|
||||
def validate_tests_individually(code: str, python_version: tuple[int, int]) -> tuple[str, int]:
|
||||
"""Validate each test function individually and return only valid tests.
|
||||
|
||||
|
|
@ -408,13 +452,19 @@ def validate_tests_individually(code: str, python_version: tuple[int, int]) -> t
|
|||
|
||||
preamble_stripped = parts.preamble.strip()
|
||||
|
||||
# First, check if preamble itself is valid
|
||||
# Check if preamble itself is valid; if not, try to repair it
|
||||
if preamble_stripped:
|
||||
try:
|
||||
ast.parse(preamble_stripped, feature_version=python_version)
|
||||
except SyntaxError as e:
|
||||
logging.debug("Preamble has syntax error at line %s: %s", e.lineno, e.msg)
|
||||
logging.warning("Preamble has syntax error at line %s: %s", e.lineno, e.msg)
|
||||
logging.debug("Preamble content:\n%s", preamble_stripped[:2000])
|
||||
repaired = repair_preamble(preamble_stripped, python_version)
|
||||
if repaired != preamble_stripped:
|
||||
logging.info(
|
||||
"Repaired preamble: removed %d chars of broken code", len(preamble_stripped) - len(repaired)
|
||||
)
|
||||
preamble_stripped = repaired
|
||||
|
||||
for i, test_code in enumerate(parts.test_functions):
|
||||
# Combine preamble with this single test
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from uuid import UUID, uuid4
|
|||
from django.db.models import F
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
from aiservice.common_utils import normalize_trace_id
|
||||
from core.log_features.models import OptimizationEvents, OptimizationFeatures, Repositories
|
||||
|
||||
|
||||
|
|
@ -111,7 +112,10 @@ async def update_optimization_features_review(
|
|||
review_explanation: str | None = None,
|
||||
calling_fn_details: str | None = None,
|
||||
) -> None:
|
||||
trace_uuid = UUID(trace_id)
|
||||
normalized = normalize_trace_id(trace_id)
|
||||
if normalized is None:
|
||||
return
|
||||
trace_uuid = UUID(normalized)
|
||||
# This avoids the race condition and is more performant.
|
||||
await OptimizationFeatures.objects.filter(trace_id=trace_uuid).aupdate(
|
||||
review_quality=review_quality, review_explanation=review_explanation, calling_fn_details=calling_fn_details
|
||||
|
|
|
|||
|
|
@ -179,14 +179,14 @@ def merge_dicts(a: dict[str, dict[str, str]], b: dict[str, dict[str, str]]) -> d
|
|||
return result
|
||||
|
||||
|
||||
@features_api.post("/", response={200: None, 500: LoggingErrorResponseSchema})
|
||||
@features_api.post("/", response={200: None, 400: LoggingErrorResponseSchema, 500: LoggingErrorResponseSchema})
|
||||
async def log_features_cli(
|
||||
request: AuthenticatedRequest, data: LoggingSchema
|
||||
) -> int | tuple[int, LoggingErrorResponseSchema]:
|
||||
try:
|
||||
if hasattr(request, "should_log_features") and request.should_log_features:
|
||||
if not validate_trace_id(data.trace_id):
|
||||
raise ValueError("Invalid UUID")
|
||||
return 400, LoggingErrorResponseSchema(error="Invalid trace ID. Please provide a valid UUIDv4.")
|
||||
|
||||
await log_features(
|
||||
trace_id=data.trace_id,
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ from openai.types.chat import (
|
|||
|
||||
from aiservice.analytics.posthog import ph
|
||||
from aiservice.common.markdown_utils import extract_code_block
|
||||
from aiservice.common_utils import validate_trace_id
|
||||
from aiservice.llm import llm_client
|
||||
from aiservice.llm_models import HAIKU_MODEL
|
||||
from authapp.auth import AuthenticatedRequest
|
||||
|
|
@ -76,6 +77,9 @@ async def testgen_repair(
|
|||
if data.language != "python":
|
||||
return 400, TestRepairErrorSchema(error="Test repair is only supported for Python")
|
||||
|
||||
if not validate_trace_id(data.trace_id):
|
||||
return 400, TestRepairErrorSchema(error="Invalid trace ID. Please provide a valid UUIDv4.")
|
||||
|
||||
ph(request.user, "aiservice-testgen-repair-called")
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ from openai.types.chat import (
|
|||
|
||||
from aiservice.analytics.posthog import ph
|
||||
from aiservice.common.markdown_utils import extract_code_block_with_context
|
||||
from aiservice.common_utils import validate_trace_id
|
||||
from aiservice.llm import llm_client
|
||||
from aiservice.llm_models import HAIKU_MODEL
|
||||
from authapp.auth import AuthenticatedRequest
|
||||
|
|
@ -54,6 +55,9 @@ async def testgen_review(
|
|||
if data.language != "python":
|
||||
return 200, TestgenReviewResponseSchema(reviews=[])
|
||||
|
||||
if not validate_trace_id(data.trace_id):
|
||||
return 400, TestgenReviewErrorSchema(error="Invalid trace ID. Please provide a valid UUIDv4.")
|
||||
|
||||
ph(request.user, "aiservice-testgen-review-called")
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import pytest
|
|||
|
||||
from core.languages.python.testgen.postprocessing.code_validator import (
|
||||
CodeValidationError,
|
||||
repair_preamble,
|
||||
split_code_with_ast,
|
||||
split_code_with_regex,
|
||||
validate_testgen_code,
|
||||
|
|
@ -649,3 +650,97 @@ async def test_third():
|
|||
|
||||
result = validate_testgen_code(code, python_version=(3, 11))
|
||||
assert result == expected
|
||||
|
||||
|
||||
class TestRepairPreamble:
|
||||
"""Tests for preamble repair when LLM generates broken import/helper code."""
|
||||
|
||||
def test_repair_removes_single_broken_line(self) -> None:
|
||||
preamble = """import pytest
|
||||
x = @invalid
|
||||
import os"""
|
||||
result = repair_preamble(preamble, (3, 11))
|
||||
assert "import pytest" in result
|
||||
assert "import os" in result
|
||||
assert "@invalid" not in result
|
||||
|
||||
def test_repair_returns_empty_for_all_broken(self) -> None:
|
||||
preamble = "x = @\ny = %\nz = $"
|
||||
result = repair_preamble(preamble, (3, 11))
|
||||
assert result == ""
|
||||
|
||||
def test_repair_noop_for_valid_preamble(self) -> None:
|
||||
preamble = "import pytest\nimport os"
|
||||
result = repair_preamble(preamble, (3, 11))
|
||||
assert "import pytest" in result
|
||||
assert "import os" in result
|
||||
|
||||
def test_repair_handles_empty_string(self) -> None:
|
||||
assert repair_preamble("", (3, 11)) == ""
|
||||
|
||||
def test_repair_removes_truncated_string(self) -> None:
|
||||
"""LLMs sometimes produce unclosed strings in helper code."""
|
||||
preamble = """import pytest
|
||||
EXPECTED = "hello world
|
||||
import os"""
|
||||
result = repair_preamble(preamble, (3, 11))
|
||||
# The broken string line should be removed
|
||||
assert "import pytest" in result
|
||||
assert "hello world" not in result
|
||||
|
||||
def test_repair_removes_incomplete_function(self) -> None:
|
||||
preamble = """import pytest
|
||||
|
||||
def helper():
|
||||
x = 1
|
||||
|
||||
def broken_helper(
|
||||
# missing closing paren"""
|
||||
result = repair_preamble(preamble, (3, 11))
|
||||
assert "import pytest" in result
|
||||
assert "def helper" in result
|
||||
|
||||
|
||||
class TestBrokenPreambleValidation:
|
||||
"""End-to-end tests: broken preamble should not discard valid tests."""
|
||||
|
||||
def test_broken_preamble_keeps_valid_tests(self) -> None:
|
||||
"""When preamble has a syntax error, valid tests should still be kept."""
|
||||
code = """import pytest
|
||||
x = @invalid_syntax
|
||||
|
||||
def test_one():
|
||||
assert 1 == 1
|
||||
|
||||
def test_two():
|
||||
assert 2 == 2"""
|
||||
|
||||
result = validate_testgen_code(code, python_version=(3, 11))
|
||||
assert "def test_one" in result
|
||||
assert "def test_two" in result
|
||||
|
||||
def test_broken_preamble_with_truncated_string(self) -> None:
|
||||
"""Truncated string in preamble should not kill all tests."""
|
||||
code = """import pytest
|
||||
EXPECTED = "unterminated
|
||||
|
||||
def test_basic():
|
||||
assert True"""
|
||||
|
||||
result = validate_testgen_code(code, python_version=(3, 11))
|
||||
assert "def test_basic" in result
|
||||
|
||||
def test_broken_preamble_mixed_valid_invalid_tests(self) -> None:
|
||||
"""Broken preamble + some broken tests should keep only the valid tests."""
|
||||
code = """import pytest
|
||||
x = @bad
|
||||
|
||||
def test_valid():
|
||||
assert True
|
||||
|
||||
def test_broken():
|
||||
y = @also_bad"""
|
||||
|
||||
result = validate_testgen_code(code, python_version=(3, 11))
|
||||
assert "def test_valid" in result
|
||||
assert "test_broken" not in result
|
||||
|
|
|
|||
|
|
@ -307,18 +307,17 @@ export async function createPr(req: Request, res: Response) {
|
|||
return
|
||||
}
|
||||
|
||||
const nickname: string | null = await dependencies.userNickname(userId)
|
||||
// Fetch user nickname and installation octokit in parallel (independent calls)
|
||||
const [nickname, installationOctokit] = await Promise.all([
|
||||
dependencies.userNickname(userId),
|
||||
dependencies.getInstallationOctokitByOwner(dependencies.githubApp, owner, repo, userId),
|
||||
])
|
||||
|
||||
if (nickname == null) {
|
||||
res.status(401).json({ error: "Unauthorized" })
|
||||
return
|
||||
}
|
||||
|
||||
const installationOctokit = await dependencies.getInstallationOctokitByOwner(
|
||||
dependencies.githubApp,
|
||||
owner,
|
||||
repo,
|
||||
userId,
|
||||
)
|
||||
if (installationOctokit instanceof Error) {
|
||||
res.status(401).json({ error: installationOctokit.message })
|
||||
return
|
||||
|
|
@ -509,7 +508,12 @@ export async function createPr(req: Request, res: Response) {
|
|||
if (traceId) {
|
||||
logger.info(`PR creation failed, falling back to staging for traceId: ${traceId}`, req)
|
||||
try {
|
||||
const stagingResult = await saveStagingReview(req.body, userId, organizationId, (req as any).subscriptionInfo)
|
||||
const stagingResult = await saveStagingReview(
|
||||
req.body,
|
||||
userId,
|
||||
organizationId,
|
||||
(req as any).subscriptionInfo,
|
||||
)
|
||||
if (stagingResult.status === 200) {
|
||||
return res.status(200).json({
|
||||
message: "PR creation failed, staging created as fallback",
|
||||
|
|
@ -521,7 +525,7 @@ export async function createPr(req: Request, res: Response) {
|
|||
`Staging fallback returned status ${stagingResult.status}`,
|
||||
req,
|
||||
{ reqBody: req.body, userId, traceId, stagingResult },
|
||||
new Error(`Staging fallback returned status ${stagingResult.status}`)
|
||||
new Error(`Staging fallback returned status ${stagingResult.status}`),
|
||||
)
|
||||
return res.status(stagingResult.status).json({
|
||||
message: "PR creation failed and staging fallback also failed",
|
||||
|
|
@ -532,7 +536,7 @@ export async function createPr(req: Request, res: Response) {
|
|||
`Staging fallback threw an exception:`,
|
||||
req,
|
||||
{ reqBody: req.body, userId, traceId },
|
||||
stagingError as Error
|
||||
stagingError as Error,
|
||||
)
|
||||
return res.status(500).json({
|
||||
message: "PR creation failed and staging fallback threw an error",
|
||||
|
|
@ -693,133 +697,181 @@ export async function triggerCreatePr(
|
|||
owner,
|
||||
repo,
|
||||
})
|
||||
try {
|
||||
// Check existing data first (preserve staging data)
|
||||
const existing = await triggerCreatePrDeps.prisma.optimization_events.findUnique({
|
||||
where: { trace_id: traceId },
|
||||
select: {
|
||||
function_name: true,
|
||||
speedup_x: true,
|
||||
file_path: true,
|
||||
speedup_pct: true,
|
||||
staging_storage_type: true,
|
||||
metadata: true,
|
||||
},
|
||||
})
|
||||
|
||||
const updateData: any = {
|
||||
pr_id: String(newPrData.data.id),
|
||||
pr_url: `https://github.com/${owner}/${repo}/pull/${newPrData.data.number}`,
|
||||
is_optimization_found: true,
|
||||
event_type: "pr_created",
|
||||
}
|
||||
|
||||
// Check if we should clean up plain text data (user is paid OR org has subscription)
|
||||
let shouldCleanupData = isPaidUser
|
||||
if (!shouldCleanupData && organizationId && traceId) {
|
||||
// Check if org has subscription
|
||||
const org = await triggerCreatePrDeps.prisma.organizations.findUnique({
|
||||
where: { id: organizationId },
|
||||
select: { subscription: true },
|
||||
// Run post-PR-creation tasks in parallel:
|
||||
// 1. DB optimization_events update (non-fatal errors caught internally)
|
||||
// 2. GitHub API calls: assign reviewer + add labels (run in parallel with each other)
|
||||
// 3. DB optimization_features update
|
||||
const updateOptimizationEventsTask = (async () => {
|
||||
try {
|
||||
// Check existing data first (preserve staging data)
|
||||
const existing = await triggerCreatePrDeps.prisma.optimization_events.findUnique({
|
||||
where: { trace_id: traceId },
|
||||
select: {
|
||||
function_name: true,
|
||||
speedup_x: true,
|
||||
file_path: true,
|
||||
speedup_pct: true,
|
||||
staging_storage_type: true,
|
||||
metadata: true,
|
||||
},
|
||||
})
|
||||
if (org?.subscription) {
|
||||
shouldCleanupData = true
|
||||
console.log(
|
||||
`[triggerCreatePr] Org has subscription - will cleanup plain text data for traceId: ${traceId}`,
|
||||
)
|
||||
|
||||
const updateData: any = {
|
||||
pr_id: String(newPrData.data.id),
|
||||
pr_url: `https://github.com/${owner}/${repo}/pull/${newPrData.data.number}`,
|
||||
is_optimization_found: true,
|
||||
event_type: "pr_created",
|
||||
}
|
||||
}
|
||||
|
||||
// If user is paid or org has subscription, convert to git_branch storage and clear diffContents
|
||||
if (shouldCleanupData && traceId) {
|
||||
if (existing) {
|
||||
const currentMetadata = (existing.metadata ?? {}) as Record<string, unknown>
|
||||
|
||||
// Remove diffContents from metadata if it exists (plain_text mode stores it there)
|
||||
if (currentMetadata.diffContents) {
|
||||
delete currentMetadata.diffContents
|
||||
// Check if we should clean up plain text data (user is paid OR org has subscription)
|
||||
let shouldCleanupData = isPaidUser
|
||||
if (!shouldCleanupData && organizationId && traceId) {
|
||||
// Check if org has subscription
|
||||
const org = await triggerCreatePrDeps.prisma.organizations.findUnique({
|
||||
where: { id: organizationId },
|
||||
select: { subscription: true },
|
||||
})
|
||||
if (org?.subscription) {
|
||||
shouldCleanupData = true
|
||||
console.log(
|
||||
`[triggerCreatePr] Org has subscription - will cleanup plain text data for traceId: ${traceId}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Update metadata with the new staging branch name
|
||||
currentMetadata.staging_branch_name = newBranchName
|
||||
currentMetadata.storageType = "git_branch"
|
||||
// If user is paid or org has subscription, convert to git_branch storage and clear diffContents
|
||||
if (shouldCleanupData && traceId) {
|
||||
if (existing) {
|
||||
const currentMetadata = (existing.metadata ?? {}) as Record<string, unknown>
|
||||
|
||||
// Add line profiler data if provided and not already present
|
||||
// Remove diffContents from metadata if it exists (plain_text mode stores it there)
|
||||
if (currentMetadata.diffContents) {
|
||||
delete currentMetadata.diffContents
|
||||
}
|
||||
|
||||
// Update metadata with the new staging branch name
|
||||
currentMetadata.staging_branch_name = newBranchName
|
||||
currentMetadata.storageType = "git_branch"
|
||||
|
||||
// Add line profiler data if provided and not already present
|
||||
addLineProfilerToMetadata(currentMetadata, originalLineProfiler, optimizedLineProfiler)
|
||||
|
||||
updateData.staging_storage_type = "git_branch"
|
||||
updateData.metadata = currentMetadata
|
||||
updateData.is_staging = true
|
||||
console.log(
|
||||
`[triggerCreatePr] Paid user/subscribed org: Converting storage to git_branch for traceId: ${traceId}`,
|
||||
)
|
||||
}
|
||||
} else if (traceId && (originalLineProfiler || optimizedLineProfiler)) {
|
||||
// For non-paid users, still add line profiler data if provided
|
||||
const currentMetadata = (existing?.metadata ?? {}) as Record<string, unknown>
|
||||
addLineProfilerToMetadata(currentMetadata, originalLineProfiler, optimizedLineProfiler)
|
||||
|
||||
updateData.staging_storage_type = "git_branch"
|
||||
updateData.metadata = currentMetadata
|
||||
updateData.is_staging = true
|
||||
console.log(
|
||||
`[triggerCreatePr] Paid user/subscribed org: Converting storage to git_branch for traceId: ${traceId}`,
|
||||
)
|
||||
}
|
||||
} else if (traceId && (originalLineProfiler || optimizedLineProfiler)) {
|
||||
// For non-paid users, still add line profiler data if provided
|
||||
const currentMetadata = (existing?.metadata ?? {}) as Record<string, unknown>
|
||||
addLineProfilerToMetadata(currentMetadata, originalLineProfiler, optimizedLineProfiler)
|
||||
updateData.metadata = currentMetadata
|
||||
}
|
||||
// Only add if missing (preserve staging data)
|
||||
if (prCommentFields) {
|
||||
if (!existing?.function_name && prCommentFields.function_name) {
|
||||
updateData.function_name = prCommentFields.function_name
|
||||
// Only add if missing (preserve staging data)
|
||||
if (prCommentFields) {
|
||||
if (!existing?.function_name && prCommentFields.function_name) {
|
||||
updateData.function_name = prCommentFields.function_name
|
||||
}
|
||||
if (!existing?.file_path && prCommentFields.file_path) {
|
||||
updateData.file_path = prCommentFields.file_path
|
||||
}
|
||||
if (existing?.speedup_x == null && prCommentFields.speedup_x) {
|
||||
updateData.speedup_x = parseSpeedupValue(prCommentFields.speedup_x, "x")
|
||||
}
|
||||
if (existing?.speedup_pct == null && prCommentFields.speedup_pct) {
|
||||
updateData.speedup_pct = parseSpeedupValue(prCommentFields.speedup_pct, "%")
|
||||
}
|
||||
}
|
||||
if (!existing?.file_path && prCommentFields.file_path) {
|
||||
updateData.file_path = prCommentFields.file_path
|
||||
}
|
||||
if (existing?.speedup_x == null && prCommentFields.speedup_x) {
|
||||
updateData.speedup_x = parseSpeedupValue(prCommentFields.speedup_x, "x")
|
||||
}
|
||||
if (existing?.speedup_pct == null && prCommentFields.speedup_pct) {
|
||||
updateData.speedup_pct = parseSpeedupValue(prCommentFields.speedup_pct, "%")
|
||||
}
|
||||
}
|
||||
|
||||
await triggerCreatePrDeps.prisma.optimization_events.update({
|
||||
where: { trace_id: traceId },
|
||||
data: updateData,
|
||||
})
|
||||
} catch (eventError) {
|
||||
logger.error(
|
||||
"Failed to update optimization event:",
|
||||
{
|
||||
userId,
|
||||
endpoint: "/cfapi/create-pr",
|
||||
operation: "update_optimization_event",
|
||||
owner,
|
||||
repo,
|
||||
},
|
||||
{},
|
||||
eventError as Error,
|
||||
)
|
||||
}
|
||||
await triggerCreatePrDeps.prisma.optimization_events.update({
|
||||
where: { trace_id: traceId },
|
||||
data: updateData,
|
||||
})
|
||||
} catch (eventError) {
|
||||
logger.error(
|
||||
"Failed to update optimization event:",
|
||||
{
|
||||
userId,
|
||||
endpoint: "/cfapi/create-pr",
|
||||
operation: "update_optimization_event",
|
||||
owner,
|
||||
repo,
|
||||
},
|
||||
{},
|
||||
eventError as Error,
|
||||
)
|
||||
}
|
||||
})()
|
||||
|
||||
await triggerCreatePrDeps.assignReviewer(
|
||||
installationOctokit,
|
||||
owner,
|
||||
repo,
|
||||
newPrData.data.number,
|
||||
nickname,
|
||||
)
|
||||
await triggerCreatePrDeps.addLabelToPullRequest(
|
||||
installationOctokit,
|
||||
owner,
|
||||
repo,
|
||||
newPrData.data.number,
|
||||
)
|
||||
if (optimizationReview) {
|
||||
await triggerCreatePrDeps.addLabelToPullRequest(
|
||||
// Run reviewer assignment and label additions in parallel
|
||||
const githubPostPrTasks: Promise<void>[] = [
|
||||
triggerCreatePrDeps.assignReviewer(
|
||||
installationOctokit,
|
||||
owner,
|
||||
repo,
|
||||
newPrData.data.number,
|
||||
`🎯 Quality: ${optimizationReview.charAt(0).toUpperCase() + optimizationReview.slice(1).toLowerCase()}`,
|
||||
"FFC043",
|
||||
"Optimization Quality according to Codeflash",
|
||||
nickname,
|
||||
),
|
||||
triggerCreatePrDeps.addLabelToPullRequest(
|
||||
installationOctokit,
|
||||
owner,
|
||||
repo,
|
||||
newPrData.data.number,
|
||||
),
|
||||
]
|
||||
if (optimizationReview) {
|
||||
githubPostPrTasks.push(
|
||||
triggerCreatePrDeps.addLabelToPullRequest(
|
||||
installationOctokit,
|
||||
owner,
|
||||
repo,
|
||||
newPrData.data.number,
|
||||
`🎯 Quality: ${optimizationReview.charAt(0).toUpperCase() + optimizationReview.slice(1).toLowerCase()}`,
|
||||
"FFC043",
|
||||
"Optimization Quality according to Codeflash",
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
const updateOptimizationFeaturesTask = (async () => {
|
||||
if (traceId !== "") {
|
||||
let pull_request_db = await triggerCreatePrDeps.prisma.optimization_features.findUnique({
|
||||
where: {
|
||||
trace_id: traceId,
|
||||
},
|
||||
select: {
|
||||
pull_request: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (pull_request_db) {
|
||||
if (pull_request_db.pull_request === null || pull_request_db.pull_request === undefined) {
|
||||
pull_request_db.pull_request = {}
|
||||
}
|
||||
|
||||
;(pull_request_db.pull_request as any).new_pr_url = newPrData.data.html_url
|
||||
|
||||
await triggerCreatePrDeps.prisma.optimization_features.update({
|
||||
where: {
|
||||
trace_id: traceId,
|
||||
},
|
||||
data: {
|
||||
pull_request: pull_request_db.pull_request,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
})()
|
||||
|
||||
// Wait for all post-PR tasks in parallel
|
||||
await Promise.all([
|
||||
updateOptimizationEventsTask,
|
||||
Promise.all(githubPostPrTasks),
|
||||
updateOptimizationFeaturesTask,
|
||||
])
|
||||
|
||||
logger.info(`Created new PR #${newPrData.data.number} with branch ${newPrData.data.head.ref}`, {
|
||||
userId,
|
||||
endpoint: "/cfapi/create-pr",
|
||||
|
|
@ -839,34 +891,6 @@ export async function triggerCreatePr(
|
|||
},
|
||||
})
|
||||
|
||||
if (traceId !== "") {
|
||||
let pull_request_db = await triggerCreatePrDeps.prisma.optimization_features.findUnique({
|
||||
where: {
|
||||
trace_id: traceId,
|
||||
},
|
||||
select: {
|
||||
pull_request: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (pull_request_db) {
|
||||
if (pull_request_db.pull_request === null || pull_request_db.pull_request === undefined) {
|
||||
pull_request_db.pull_request = {}
|
||||
}
|
||||
|
||||
;(pull_request_db.pull_request as any).new_pr_url = newPrData.data.html_url
|
||||
|
||||
await triggerCreatePrDeps.prisma.optimization_features.update({
|
||||
where: {
|
||||
trace_id: traceId,
|
||||
},
|
||||
data: {
|
||||
pull_request: pull_request_db.pull_request,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return newPrData.data.number
|
||||
} catch (error) {
|
||||
logger.errorWithSentry(
|
||||
|
|
|
|||
343
js/cf-webapp/LANDING_PAGE_PERFORMANCE.md
Normal file
343
js/cf-webapp/LANDING_PAGE_PERFORMANCE.md
Normal file
|
|
@ -0,0 +1,343 @@
|
|||
# Landing Page Performance Audit -- www.codeflash.ai
|
||||
|
||||
**Date:** 2026-04-10
|
||||
**Site:** https://www.codeflash.ai (Webflow-hosted)
|
||||
**Dashboard:** https://app.codeflash.ai (Next.js, changes committed in this branch)
|
||||
|
||||
---
|
||||
|
||||
## Summary of Findings
|
||||
|
||||
The landing page has several performance issues causing slow initial load and poor Core Web Vitals:
|
||||
|
||||
| Issue | Impact | Severity |
|
||||
| ----------------------------------------------------------------------------- | ------------------------------------------------------------- | -------- |
|
||||
| 15 videos with `autoplay` + no `preload` attr (defaults to `preload="auto"`) | ~16.3 MB total video data downloaded eagerly | Critical |
|
||||
| 4 render-blocking scripts (jQuery 89KB, Swiper 151KB, GSAP 73KB, Webflow 5KB) | Blocks first paint by ~300ms+ | High |
|
||||
| OTF fonts instead of WOFF2 (2 fonts @ 118KB + 113KB) | 2.3x larger than WOFF2 equivalent | High |
|
||||
| 91 images (14 eager, 59 lazy, 18 with no loading attr) | Above-fold logo images marked `loading="lazy"` | Medium |
|
||||
| 143KB HTML document | Inline CSS (11.5KB) + inline JS (11.4KB) + 1,428 DOM elements | Medium |
|
||||
| 135KB main CSS file loaded render-blocking | Single large stylesheet blocks paint | Medium |
|
||||
| No `fetchpriority="high"` on LCP image/video | Browser cannot prioritize LCP resource | Medium |
|
||||
| No `decoding="async"` on any images | All 73 images decoded synchronously | Low |
|
||||
| Missing preconnect for third-party origins | Crisp, PostHog, Swiper CDN, GitHub | Low |
|
||||
|
||||
---
|
||||
|
||||
## Critical: Video Loading Strategy
|
||||
|
||||
**Current state:** All 15 `<video>` tags have `autoplay` and no `preload` attribute. When `preload` is omitted, browsers default to `preload="auto"` which downloads the entire video file. With 15 videos totaling 16.3 MB, the browser attempts to download all of them on page load.
|
||||
|
||||
**Recommended fix (in Webflow custom code -- before `</body>`):**
|
||||
|
||||
```html
|
||||
<script>
|
||||
// Defer video loading: replace autoplay with intersection-observer-based playback.
|
||||
// Videos below the fold will not load until scrolled into view.
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
var videos = document.querySelectorAll("video[autoplay]")
|
||||
|
||||
// Keep only the first video (hero) autoplaying
|
||||
var heroVideo = document.getElementById("e9d39971-d9e1-d67b-fe47-f58be9f34e3e-video")
|
||||
|
||||
videos.forEach(function (video) {
|
||||
if (video === heroVideo) return // Skip hero video
|
||||
|
||||
// Pause and set preload to none for off-screen videos
|
||||
video.pause()
|
||||
video.preload = "none"
|
||||
|
||||
// Remove autoplay to prevent browser from re-triggering
|
||||
video.removeAttribute("autoplay")
|
||||
|
||||
// Use IntersectionObserver to play when visible
|
||||
var observer = new IntersectionObserver(
|
||||
function (entries) {
|
||||
entries.forEach(function (entry) {
|
||||
if (entry.isIntersecting) {
|
||||
entry.target.preload = "auto"
|
||||
entry.target.play().catch(function () {})
|
||||
} else {
|
||||
entry.target.pause()
|
||||
}
|
||||
})
|
||||
},
|
||||
{ threshold: 0.25 },
|
||||
)
|
||||
|
||||
observer.observe(video)
|
||||
})
|
||||
})
|
||||
</script>
|
||||
```
|
||||
|
||||
**Estimated savings:** ~14 MB of initial bandwidth (keeping only the hero video loading eagerly).
|
||||
|
||||
**Hero video specific fix:** Add `preload="metadata"` to the hero video in Webflow designer. This downloads only the first few KB needed for dimensions/poster, then starts streaming on play.
|
||||
|
||||
---
|
||||
|
||||
## High: Render-Blocking Scripts
|
||||
|
||||
**Current render-blocking chain:**
|
||||
|
||||
1. `jquery-3.5.1.min.js` -- 89 KB (CloudFront)
|
||||
2. `swiper-bundle.min.js` -- 151 KB (jsDelivr)
|
||||
3. `webflow.*.js` -- 5 KB (Webflow CDN)
|
||||
4. `gsap.min.js` -- 73 KB (Webflow CDN)
|
||||
|
||||
Total: ~318 KB of JavaScript blocking first paint.
|
||||
|
||||
**Fix in Webflow custom code (head section):**
|
||||
|
||||
Replace the Swiper CSS embed block. Currently:
|
||||
|
||||
```html
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/swiper@11/swiper-bundle.min.css" />
|
||||
```
|
||||
|
||||
Change to async loading:
|
||||
|
||||
```html
|
||||
<link
|
||||
rel="preload"
|
||||
href="https://cdn.jsdelivr.net/npm/swiper@11/swiper-bundle.min.css"
|
||||
as="style"
|
||||
onload="this.onload=null;this.rel='stylesheet'"
|
||||
/>
|
||||
<noscript
|
||||
><link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/swiper@11/swiper-bundle.min.css"
|
||||
/></noscript>
|
||||
```
|
||||
|
||||
For the Swiper JS, add `defer` in the embed:
|
||||
|
||||
```html
|
||||
<script defer src="https://cdn.jsdelivr.net/npm/swiper@11/swiper-bundle.min.js"></script>
|
||||
```
|
||||
|
||||
**Note:** jQuery and Webflow JS are injected by Webflow itself and cannot be deferred through custom code. The Swiper and GSAP scripts are loaded via custom embed blocks and CAN be deferred.
|
||||
|
||||
---
|
||||
|
||||
## High: Font Format Optimization
|
||||
|
||||
**Current state:** Two PP Neue Montreal fonts are served as OTF (118KB + 113KB = 231KB). The Monaspace Neon font is already WOFF2 (132KB).
|
||||
|
||||
**Fix:** Convert the OTF fonts to WOFF2 format and re-upload to Webflow:
|
||||
|
||||
1. Download `ppneuemontreal-medium.otf` and `ppneuemontreal-book.otf`
|
||||
2. Convert using `woff2_compress` or an online tool like CloudConvert
|
||||
3. Expected size reduction: ~60% (231KB -> ~90KB)
|
||||
4. Upload WOFF2 versions to Webflow Assets
|
||||
5. Update the font-face declarations to reference WOFF2 files
|
||||
|
||||
In Webflow custom code (head), add `font-display: swap` to prevent FOIT:
|
||||
|
||||
```html
|
||||
<style>
|
||||
@font-face {
|
||||
font-family: "Ppneuemontreal";
|
||||
font-display: swap;
|
||||
}
|
||||
</style>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Medium: Image Loading Optimization
|
||||
|
||||
**Issue 1:** The logo image in the navbar is marked `loading="lazy"`. Since the navbar is always visible above the fold, this delays LCP.
|
||||
|
||||
**Fix in Webflow:** Select the logo `<img>` element and set loading to "Eager" (or remove the lazy attribute).
|
||||
|
||||
**Issue 2:** No images use `fetchpriority="high"`. The LCP element (hero image or video) should have this.
|
||||
|
||||
**Fix in Webflow custom code (head):**
|
||||
|
||||
```html
|
||||
<script>
|
||||
// Set fetchpriority=high on hero elements for faster LCP
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
var heroImg = document.querySelector(".hero_visual img")
|
||||
if (heroImg) heroImg.fetchPriority = "high"
|
||||
})
|
||||
</script>
|
||||
```
|
||||
|
||||
**Issue 3:** No images use `decoding="async"`.
|
||||
|
||||
**Fix in Webflow custom code (before `</body>`):**
|
||||
|
||||
```html
|
||||
<script>
|
||||
// Set async decoding on all lazy-loaded images
|
||||
document.querySelectorAll('img[loading="lazy"]').forEach(function (img) {
|
||||
img.decoding = "async"
|
||||
})
|
||||
</script>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Medium: Add Missing Resource Hints
|
||||
|
||||
**Current preconnect origins:**
|
||||
|
||||
- `cdn.prod.website-files.com` (exists)
|
||||
|
||||
**Missing preconnects (add to Webflow head custom code):**
|
||||
|
||||
```html
|
||||
<link rel="preconnect" href="https://cdn.jsdelivr.net" crossorigin />
|
||||
<link rel="preconnect" href="https://client.crisp.chat" crossorigin />
|
||||
<link rel="dns-prefetch" href="https://d3e54v103j8qbb.cloudfront.net" />
|
||||
<link rel="dns-prefetch" href="https://buttons.github.io" />
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Low: Consolidated Custom Script
|
||||
|
||||
The current page has 11 inline scripts totaling 11.4KB, many with duplicate `DOMContentLoaded` listeners. Here is a consolidated version that combines all custom logic into a single script block:
|
||||
|
||||
**Replace ALL custom before-body scripts with this single block:**
|
||||
|
||||
```html
|
||||
<script>
|
||||
;(function () {
|
||||
"use strict"
|
||||
|
||||
// --- Nav banner dismiss (runs immediately, no DOMContentLoaded needed) ---
|
||||
if (sessionStorage.getItem("hide-nav-banner") === "true") {
|
||||
document.documentElement.classList.add("hide-nav-banner")
|
||||
}
|
||||
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
// --- Nav banner close buttons ---
|
||||
document.querySelectorAll(".nav_banner_close_wrap").forEach(function (btn) {
|
||||
btn.addEventListener("click", function () {
|
||||
sessionStorage.setItem("hide-nav-banner", "true")
|
||||
document.documentElement.classList.add("hide-nav-banner")
|
||||
})
|
||||
})
|
||||
document.querySelectorAll(".nav_skip_wrap").forEach(function (btn) {
|
||||
btn.addEventListener("click", function () {
|
||||
sessionStorage.setItem("hide-nav-banner", "true")
|
||||
document.documentElement.classList.add("hide-nav-banner")
|
||||
})
|
||||
})
|
||||
|
||||
// --- Dynamic year ---
|
||||
document.querySelectorAll("[data-dynamic-year]").forEach(function (el) {
|
||||
el.textContent = new Date().getFullYear()
|
||||
})
|
||||
|
||||
// --- Desktop-only hover interactions ---
|
||||
if (window.innerWidth >= 992) {
|
||||
var interactions = [
|
||||
{ container: ".fast-code_grid", card: ".fast-code_card.is-active" },
|
||||
{ container: ".ways_component", card: ".ways_card_wrap.is-active" },
|
||||
{ container: ".hiw_grid", card: ".hiw_card_wrap.is-active" },
|
||||
]
|
||||
interactions.forEach(function (cfg) {
|
||||
var container = document.querySelector(cfg.container)
|
||||
var card = container && container.querySelector(cfg.card)
|
||||
if (container && card) {
|
||||
container.addEventListener("mouseenter", function () {
|
||||
card.classList.remove("is-active")
|
||||
})
|
||||
container.addEventListener("mouseleave", function () {
|
||||
card.classList.add("is-active")
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// --- Async image decoding ---
|
||||
document.querySelectorAll('img[loading="lazy"]').forEach(function (img) {
|
||||
img.decoding = "async"
|
||||
})
|
||||
|
||||
// --- Deferred video loading (non-hero videos) ---
|
||||
var heroVideoId = "e9d39971-d9e1-d67b-fe47-f58be9f34e3e-video"
|
||||
document.querySelectorAll("video[autoplay]").forEach(function (video) {
|
||||
if (video.id === heroVideoId) return
|
||||
video.pause()
|
||||
video.preload = "none"
|
||||
video.removeAttribute("autoplay")
|
||||
var obs = new IntersectionObserver(
|
||||
function (entries) {
|
||||
entries.forEach(function (entry) {
|
||||
if (entry.isIntersecting) {
|
||||
entry.target.preload = "auto"
|
||||
entry.target.play().catch(function () {})
|
||||
} else {
|
||||
entry.target.pause()
|
||||
}
|
||||
})
|
||||
},
|
||||
{ threshold: 0.25 },
|
||||
)
|
||||
obs.observe(video)
|
||||
})
|
||||
})
|
||||
})()
|
||||
</script>
|
||||
```
|
||||
|
||||
This replaces scripts 6, 9, 10, 11, 12, 13, and the video defer logic -- reducing 7 separate `DOMContentLoaded` listeners to 1 and removing ~2KB of duplicated boilerplate.
|
||||
|
||||
---
|
||||
|
||||
## Dashboard App Fixes (committed in this branch)
|
||||
|
||||
These changes are in `js/cf-webapp/`:
|
||||
|
||||
### 1. Suspense boundary for PostHogPageView (Critical)
|
||||
|
||||
**File:** `src/app/layout.tsx`
|
||||
|
||||
`PostHogPageView` uses `useSearchParams()` without a `<Suspense>` boundary. In Next.js App Router, this forces the entire route to opt out of static rendering and become fully dynamic on every request. Wrapping it in `<Suspense>` allows the rest of the page to render statically while the search-params-dependent component streams in.
|
||||
|
||||
### 2. Third-party scripts moved to `lazyOnload` (High)
|
||||
|
||||
**File:** `src/app/layout.tsx`
|
||||
|
||||
Intercom and Crisp chat scripts were using `strategy="afterInteractive"` and placed inside `<head>`. Changed to:
|
||||
|
||||
- `strategy="lazyOnload"` -- loads after the page is fully interactive
|
||||
- Moved from `<head>` to `<body>` -- proper placement per Next.js docs
|
||||
|
||||
This defers ~200KB+ of third-party chat widget JavaScript until after the user can interact with the page.
|
||||
|
||||
### 3. Font optimization (Medium)
|
||||
|
||||
**File:** `src/app/layout.tsx`
|
||||
|
||||
- Added `display: "swap"` to Inter font to prevent invisible text during load
|
||||
- Reduced JetBrains Mono from 5 weights (300,400,500,600,700) to 2 (400,600)
|
||||
- Saves ~3 font file network requests
|
||||
- Weights 300, 500, 700 were not used anywhere in the codebase
|
||||
|
||||
### 4. JetBrains Mono properly mapped to font-mono (Medium)
|
||||
|
||||
**File:** `tailwind.config.ts`
|
||||
|
||||
The JetBrains Mono font was loaded but never mapped to Tailwind's `font-mono` utility. All `font-mono` usage was falling back to the browser default monospace font. Added the mapping so the loaded font is actually used.
|
||||
|
||||
---
|
||||
|
||||
## Estimated Total Impact
|
||||
|
||||
| Fix | Metric Improved | Estimated Gain |
|
||||
| ---------------------------- | ----------------------- | ----------------------------------- |
|
||||
| Video lazy-loading | LCP, bandwidth | ~14 MB less initial transfer |
|
||||
| Render-blocking script defer | FCP, LCP | ~200-400ms faster first paint |
|
||||
| OTF to WOFF2 fonts | FCP | ~140KB less transfer |
|
||||
| Suspense for PostHogPageView | TTFB, static generation | Pages can be statically cached |
|
||||
| Chat scripts to lazyOnload | TBT, TTI | ~200KB+ deferred from critical path |
|
||||
| Font weight reduction | Transfer size | ~3 fewer font requests |
|
||||
| Resource hints | Connection setup | ~50-100ms for third-party resources |
|
||||
| Image loading fixes | LCP | Logo renders without lazy delay |
|
||||
| Consolidated scripts | Parse time | 7 scripts -> 1, ~2KB less code |
|
||||
|
|
@ -48,6 +48,23 @@ const nextConfig = {
|
|||
},
|
||||
serverExternalPackages: ["@anthropic-ai/sdk", "sharp"],
|
||||
experimental: {
|
||||
// Tree-shake barrel exports for these heavy packages. Without this,
|
||||
// importing a single icon from lucide-react or a single component from
|
||||
// chart.js pulls the entire library into the bundle.
|
||||
optimizePackageImports: [
|
||||
"lucide-react",
|
||||
"date-fns",
|
||||
"react-syntax-highlighter",
|
||||
"chart.js",
|
||||
"react-chartjs-2",
|
||||
"motion",
|
||||
"@radix-ui/react-dialog",
|
||||
"@radix-ui/react-dropdown-menu",
|
||||
"@radix-ui/react-select",
|
||||
"@radix-ui/react-tabs",
|
||||
"@radix-ui/react-tooltip",
|
||||
"@radix-ui/react-toast",
|
||||
],
|
||||
serverActions: {
|
||||
allowedOrigins: ["app.codeflash.ai", "localhost:3000"],
|
||||
bodySizeLimit: '5mb', // Increased from default 1mb to handle large PR creation payloads
|
||||
|
|
|
|||
|
|
@ -95,7 +95,9 @@ export default function OnboardingPage() {
|
|||
const getSteps = () => steps
|
||||
|
||||
useEffect(() => {
|
||||
const timer = showIntro ? setTimeout(() => setShowIntro(false), 2400) : null
|
||||
// Reduced from 2400ms to 1200ms to improve LCP — the intro blocks
|
||||
// the main content which is the Largest Contentful Paint element.
|
||||
const timer = showIntro ? setTimeout(() => setShowIntro(false), 1200) : null
|
||||
return () => {
|
||||
if (timer) clearTimeout(timer)
|
||||
}
|
||||
|
|
@ -286,7 +288,7 @@ export default function OnboardingPage() {
|
|||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
transition={{ duration: 0.8 }}
|
||||
transition={{ duration: 0.5 }}
|
||||
>
|
||||
<motion.div
|
||||
initial={{ scale: 0.8, opacity: 0 }}
|
||||
|
|
@ -295,7 +297,7 @@ export default function OnboardingPage() {
|
|||
type: "spring",
|
||||
stiffness: 260,
|
||||
damping: 20,
|
||||
delay: 0.3,
|
||||
delay: 0.1,
|
||||
}}
|
||||
>
|
||||
<LogoBox />
|
||||
|
|
@ -305,7 +307,7 @@ export default function OnboardingPage() {
|
|||
className="text-3xl font-bold mt-6 text-foreground tracking-tight text-center"
|
||||
initial={{ opacity: 0, y: 10 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ duration: 0.6, delay: 0.7 }}
|
||||
transition={{ duration: 0.4, delay: 0.3 }}
|
||||
>
|
||||
Getting Started
|
||||
</motion.h1>
|
||||
|
|
@ -314,7 +316,7 @@ export default function OnboardingPage() {
|
|||
className="text-muted-foreground mt-3 text-center max-w-sm font-medium"
|
||||
initial={{ opacity: 0, y: 10 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ duration: 0.6, delay: 0.9 }}
|
||||
transition={{ duration: 0.4, delay: 0.5 }}
|
||||
>
|
||||
{"Let's set up your Codeflash account"}
|
||||
</motion.p>
|
||||
|
|
@ -322,7 +324,7 @@ export default function OnboardingPage() {
|
|||
<motion.div
|
||||
initial={{ width: 0 }}
|
||||
animate={{ width: 120 }}
|
||||
transition={{ duration: 1.2, delay: 1.2, ease: "easeInOut" }}
|
||||
transition={{ duration: 0.6, delay: 0.6, ease: "easeInOut" }}
|
||||
className="h-1 bg-primary/80 rounded-full mt-8 mx-auto"
|
||||
/>
|
||||
</motion.div>
|
||||
|
|
|
|||
|
|
@ -425,7 +425,30 @@ export async function getOptimizationPRs(
|
|||
? `AND oe.repository_id = '${String(repositoryId).replace(/'/g, "''")}'`
|
||||
: ""
|
||||
|
||||
const whereClause = `
|
||||
// Separate WHERE clauses: the count query uses EXISTS to avoid joining the
|
||||
// large optimization_features table when oe.pr_url already satisfies the
|
||||
// "has a PR" condition. The data query still LEFT JOINs to pull fallback
|
||||
// fields but only for the small LIMIT'd result set.
|
||||
const prCondition = `
|
||||
AND oe.is_optimization_found = true
|
||||
AND (
|
||||
oe.pr_url IS NOT NULL
|
||||
OR EXISTS (
|
||||
SELECT 1 FROM optimization_features of2
|
||||
WHERE of2.trace_id = oe.trace_id
|
||||
AND of2.pull_request IS NOT NULL
|
||||
)
|
||||
)
|
||||
`
|
||||
|
||||
const countWhereClause = `
|
||||
${accountCondition}
|
||||
${eventTypeCondition}
|
||||
${repositoryCondition}
|
||||
${prCondition}
|
||||
`
|
||||
|
||||
const dataWhereClause = `
|
||||
${accountCondition}
|
||||
${eventTypeCondition}
|
||||
${repositoryCondition}
|
||||
|
|
@ -438,7 +461,9 @@ export async function getOptimizationPRs(
|
|||
|
||||
const offset = (page - 1) * pageSize
|
||||
|
||||
// Optimized query - simplified COALESCE chains and extracted common speedup parsing logic
|
||||
// Run data + count queries in parallel.
|
||||
// Count uses EXISTS (no JOIN on optimization_features).
|
||||
// Data query JOINs optimization_features only for the LIMIT'd rows.
|
||||
const [events, countRows] = await Promise.all([
|
||||
prisma.$queryRawUnsafe<
|
||||
Array<{
|
||||
|
|
@ -514,7 +539,7 @@ export async function getOptimizationPRs(
|
|||
FROM optimization_events oe
|
||||
LEFT JOIN optimization_features of ON oe.trace_id = of.trace_id
|
||||
LEFT JOIN repositories r ON oe.repository_id = r.id
|
||||
WHERE ${whereClause}
|
||||
WHERE ${dataWhereClause}
|
||||
ORDER BY oe.created_at DESC
|
||||
LIMIT ${pageSize} OFFSET ${offset}
|
||||
`,
|
||||
|
|
@ -523,8 +548,7 @@ export async function getOptimizationPRs(
|
|||
`
|
||||
SELECT COUNT(*)::bigint AS count
|
||||
FROM optimization_events oe
|
||||
LEFT JOIN optimization_features of ON oe.trace_id = of.trace_id
|
||||
WHERE ${whereClause}
|
||||
WHERE ${countWhereClause}
|
||||
`,
|
||||
),
|
||||
])
|
||||
|
|
|
|||
|
|
@ -6,20 +6,28 @@ import Script from "next/script"
|
|||
import { ViewModeProvider } from "../app/ViewModeContext"
|
||||
import { PrivacyModeProvider } from "../app/PrivacyModeContext"
|
||||
import { DashboardShell } from "@/components/dashboard-shell"
|
||||
import { getDashboardInitData } from "../app/init-data-action"
|
||||
|
||||
export default async function DashboardLayout({ children }: { children: ReactNode }) {
|
||||
const session = await auth0.getSession()
|
||||
if (!session) return null
|
||||
|
||||
const completedOnboarding = await hasCompletedOnboarding(session.user.sub)
|
||||
const [completedOnboarding, initData] = await Promise.all([
|
||||
hasCompletedOnboarding(session.user.sub),
|
||||
getDashboardInitData(session.user.sub),
|
||||
])
|
||||
if (!completedOnboarding) {
|
||||
redirect("/onboarding")
|
||||
}
|
||||
|
||||
return (
|
||||
<ViewModeProvider user={session.user}>
|
||||
<PrivacyModeProvider userId={session.user.sub}>
|
||||
<DashboardShell user={session.user}>
|
||||
<ViewModeProvider user={session.user} initialOrganizations={initData.organizations}>
|
||||
<PrivacyModeProvider
|
||||
userId={session.user.sub}
|
||||
initialPrivacyMode={initData.privacyMode}
|
||||
initialCanUsePrivacyMode={initData.canUsePrivacyMode}
|
||||
>
|
||||
<DashboardShell user={session.user} initialSubscription={initData.subscription}>
|
||||
<Script
|
||||
id="crisp-chat-script"
|
||||
strategy="afterInteractive"
|
||||
|
|
|
|||
|
|
@ -785,6 +785,7 @@ export function OptimizationsTable({
|
|||
src={`https://github.com/${event.repository.full_name.split("/")[0]}.png`}
|
||||
alt={event.repository.full_name}
|
||||
fill
|
||||
sizes="32px"
|
||||
className="rounded-full object-cover"
|
||||
onError={e => {
|
||||
e.currentTarget.style.display = "none"
|
||||
|
|
|
|||
|
|
@ -36,27 +36,39 @@ const STORAGE_KEY = "privacyMode"
|
|||
interface PrivacyModeProviderProps {
|
||||
children: React.ReactNode
|
||||
userId?: string
|
||||
initialPrivacyMode?: boolean
|
||||
initialCanUsePrivacyMode?: boolean
|
||||
}
|
||||
|
||||
export function PrivacyModeProvider({ children, userId }: PrivacyModeProviderProps) {
|
||||
const [isPrivacyMode, setIsPrivacyMode] = useState<boolean>(false)
|
||||
const [isLoading, setIsLoading] = useState(true)
|
||||
const [canUsePrivacyMode, setCanUsePrivacyMode] = useState(false)
|
||||
export function PrivacyModeProvider({
|
||||
children,
|
||||
userId,
|
||||
initialPrivacyMode,
|
||||
initialCanUsePrivacyMode,
|
||||
}: PrivacyModeProviderProps) {
|
||||
const hasInitialData = initialPrivacyMode !== undefined && initialCanUsePrivacyMode !== undefined
|
||||
const [isPrivacyMode, setIsPrivacyMode] = useState<boolean>(
|
||||
hasInitialData ? (initialCanUsePrivacyMode ? initialPrivacyMode : false) : false,
|
||||
)
|
||||
const [isLoading, setIsLoading] = useState(!hasInitialData)
|
||||
const [canUsePrivacyMode, setCanUsePrivacyMode] = useState(initialCanUsePrivacyMode ?? false)
|
||||
const { mode, currentOrg } = useViewMode()
|
||||
|
||||
const isOrgMode = mode === "organization" && !!currentOrg
|
||||
const isOrgAdmin = isOrgMode && (currentOrg?.role === "admin" || currentOrg?.role === "owner")
|
||||
|
||||
// Can toggle: personal mode (if paid) OR org mode (if admin and org has subscription)
|
||||
const canTogglePrivacyMode = isOrgMode ? (isOrgAdmin && canUsePrivacyMode) : canUsePrivacyMode
|
||||
const canTogglePrivacyMode = isOrgMode ? isOrgAdmin && canUsePrivacyMode : canUsePrivacyMode
|
||||
|
||||
// Track whether we've used initial data for the first personal-mode render
|
||||
const usedInitialDataRef = React.useRef(false)
|
||||
|
||||
// Load saved preference from database
|
||||
useEffect(() => {
|
||||
const loadPrivacyMode = async () => {
|
||||
setIsLoading(true)
|
||||
|
||||
if (isOrgMode && currentOrg?.id) {
|
||||
// Organization mode - fetch org privacy settings
|
||||
// Organization mode - always fetch org-specific privacy settings
|
||||
setIsLoading(true)
|
||||
try {
|
||||
const [orgPrivacyMode, orgCanUse] = await Promise.all([
|
||||
getOrgPrivacyMode(currentOrg.id),
|
||||
|
|
@ -69,8 +81,22 @@ export function PrivacyModeProvider({ children, userId }: PrivacyModeProviderPro
|
|||
setIsPrivacyMode(false)
|
||||
setCanUsePrivacyMode(false)
|
||||
}
|
||||
setIsLoading(false)
|
||||
} else if (userId) {
|
||||
// Personal mode - fetch user privacy settings
|
||||
// Personal mode - skip fetch if initial data was provided by the server
|
||||
if (hasInitialData && !usedInitialDataRef.current) {
|
||||
usedInitialDataRef.current = true
|
||||
// Sync localStorage with server-provided value
|
||||
try {
|
||||
const effectivePrivacyMode = initialCanUsePrivacyMode ? initialPrivacyMode : false
|
||||
localStorage.setItem(STORAGE_KEY, String(effectivePrivacyMode))
|
||||
} catch {
|
||||
// localStorage not available
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
setIsLoading(true)
|
||||
// First, load from localStorage for instant UI
|
||||
try {
|
||||
const saved = localStorage.getItem(STORAGE_KEY)
|
||||
|
|
@ -98,13 +124,19 @@ export function PrivacyModeProvider({ children, userId }: PrivacyModeProviderPro
|
|||
} catch (error) {
|
||||
console.error("Error fetching privacy mode from database:", error)
|
||||
}
|
||||
setIsLoading(false)
|
||||
}
|
||||
|
||||
setIsLoading(false)
|
||||
}
|
||||
|
||||
loadPrivacyMode()
|
||||
}, [userId, isOrgMode, currentOrg?.id])
|
||||
}, [
|
||||
userId,
|
||||
isOrgMode,
|
||||
currentOrg?.id,
|
||||
hasInitialData,
|
||||
initialPrivacyMode,
|
||||
initialCanUsePrivacyMode,
|
||||
])
|
||||
|
||||
const updatePrivacyModeValue = useCallback(
|
||||
async (newValue: boolean) => {
|
||||
|
|
|
|||
|
|
@ -42,17 +42,19 @@ interface Organization {
|
|||
export function ViewModeProvider({
|
||||
children,
|
||||
user,
|
||||
initialOrganizations,
|
||||
}: {
|
||||
children: React.ReactNode
|
||||
user?: UserProfile
|
||||
initialOrganizations?: Organization[]
|
||||
}) {
|
||||
const router = useRouter()
|
||||
const [mode, setMode] = useState<ViewMode>("personal")
|
||||
const [loading, setIsLoading] = useState<boolean>(true)
|
||||
const [orgs, setOrgs] = useState<Organization[]>([])
|
||||
const [loading, setIsLoading] = useState<boolean>(!initialOrganizations)
|
||||
const [orgs, setOrgs] = useState<Organization[]>(initialOrganizations ?? [])
|
||||
const [currentOrg, setCurrentOrg] = useState<Organization | null>(null)
|
||||
const fetchingRef = useRef(false)
|
||||
const orgsRef = useRef<Organization[]>([])
|
||||
const orgsRef = useRef<Organization[]>(initialOrganizations ?? [])
|
||||
|
||||
const setLocalStorageMode = useCallback((newMode: ViewMode, orgId?: string) => {
|
||||
localStorage.setItem("viewMode", newMode)
|
||||
|
|
@ -99,6 +101,25 @@ export function ViewModeProvider({
|
|||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
const restoreViewMode = (fetchedOrgs: Organization[]) => {
|
||||
const savedMode = localStorage.getItem("viewMode") as ViewMode
|
||||
const savedOrgId = localStorage.getItem("currentOrganizationId")
|
||||
|
||||
if (savedMode === "organization" && savedOrgId) {
|
||||
switchToMode("organization", savedOrgId, fetchedOrgs)
|
||||
} else {
|
||||
switchToMode("personal", undefined, fetchedOrgs)
|
||||
}
|
||||
}
|
||||
|
||||
// If initial data was provided by the server, skip the client-side fetch
|
||||
if (initialOrganizations) {
|
||||
restoreViewMode(initialOrganizations)
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
if (fetchingRef.current) return
|
||||
fetchingRef.current = true
|
||||
|
||||
|
|
@ -108,15 +129,7 @@ export function ViewModeProvider({
|
|||
if (result.success && result.organizations) {
|
||||
setOrgs(result.organizations)
|
||||
orgsRef.current = result.organizations
|
||||
|
||||
const savedMode = localStorage.getItem("viewMode") as ViewMode
|
||||
const savedOrgId = localStorage.getItem("currentOrganizationId")
|
||||
|
||||
if (savedMode === "organization" && savedOrgId) {
|
||||
switchToMode("organization", savedOrgId, result.organizations)
|
||||
} else {
|
||||
switchToMode("personal", undefined, result.organizations)
|
||||
}
|
||||
restoreViewMode(result.organizations)
|
||||
}
|
||||
})
|
||||
.catch(error => console.error("Error fetching organizations:", error))
|
||||
|
|
@ -124,7 +137,7 @@ export function ViewModeProvider({
|
|||
setIsLoading(false)
|
||||
fetchingRef.current = false
|
||||
})
|
||||
}, [user?.sub, switchToMode])
|
||||
}, [user?.sub, switchToMode, initialOrganizations])
|
||||
|
||||
const contextValue = useMemo(
|
||||
() => ({ mode, orgs, loading, currentOrg, switchToMode }),
|
||||
|
|
|
|||
97
js/cf-webapp/src/app/app/init-data-action.ts
Normal file
97
js/cf-webapp/src/app/app/init-data-action.ts
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
"use server"
|
||||
|
||||
import {
|
||||
getUserPrivacyMode,
|
||||
isUserPaid,
|
||||
checkAndResetSubscriptionPeriod,
|
||||
prisma,
|
||||
} from "@codeflash-ai/common"
|
||||
|
||||
interface Organization {
|
||||
id: string
|
||||
name: string
|
||||
role: "admin" | "owner" | "member"
|
||||
avatarUrl?: string
|
||||
}
|
||||
|
||||
export interface DashboardInitData {
|
||||
organizations: Organization[]
|
||||
privacyMode: boolean
|
||||
canUsePrivacyMode: boolean
|
||||
subscription: {
|
||||
optimizations_used: number
|
||||
optimizations_limit: number
|
||||
} | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all dashboard initialization data in a single server action,
|
||||
* running all independent queries in parallel. This replaces the sequential
|
||||
* client-side waterfall of getUserOrganizations + checkCanUsePrivacyMode +
|
||||
* getPrivacyMode + getCurrentUserSubscriptionData.
|
||||
*/
|
||||
export async function getDashboardInitData(userId: string): Promise<DashboardInitData> {
|
||||
const [orgsResult, privacyMode, canUsePrivacyMode, subscription] = await Promise.all([
|
||||
// Fetch organizations
|
||||
prisma.organizations
|
||||
.findMany({
|
||||
where: {
|
||||
organization_members: {
|
||||
some: { user_id: userId },
|
||||
},
|
||||
},
|
||||
include: {
|
||||
organization_members: {
|
||||
where: { user_id: userId },
|
||||
select: { role: true },
|
||||
},
|
||||
},
|
||||
orderBy: { created_at: "desc" },
|
||||
})
|
||||
.then(orgs =>
|
||||
orgs.map(org => ({
|
||||
id: org.id,
|
||||
name: org.name,
|
||||
role: org.organization_members[0].role as "admin" | "owner" | "member",
|
||||
avatarUrl: `https://github.com/${org.name}.png`,
|
||||
})),
|
||||
)
|
||||
.catch(error => {
|
||||
console.error("Failed to get user organizations:", error)
|
||||
return [] as Organization[]
|
||||
}),
|
||||
|
||||
// Fetch privacy mode
|
||||
getUserPrivacyMode(userId).catch(error => {
|
||||
console.error("Error getting privacy mode:", error)
|
||||
return false
|
||||
}),
|
||||
|
||||
// Check if user can use privacy mode (is paid)
|
||||
isUserPaid(userId).catch(error => {
|
||||
console.error("Error checking if user can use privacy mode:", error)
|
||||
return false
|
||||
}),
|
||||
|
||||
// Fetch subscription data
|
||||
checkAndResetSubscriptionPeriod(userId)
|
||||
.then(sub => {
|
||||
if (!sub) return null
|
||||
return {
|
||||
optimizations_used: sub.optimizations_used || 0,
|
||||
optimizations_limit: sub.optimizations_limit || 0,
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error("Failed to fetch subscription data:", error)
|
||||
return null
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
organizations: orgsResult,
|
||||
privacyMode,
|
||||
canUsePrivacyMode,
|
||||
subscription,
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import { type JSX } from "react"
|
||||
import { type JSX, Suspense } from "react"
|
||||
import type { Metadata } from "next"
|
||||
import { Inter as FontSans, JetBrains_Mono, Sora, Source_Code_Pro } from "next/font/google"
|
||||
import "./globals.css"
|
||||
|
|
@ -15,11 +15,12 @@ import PostHogPageView from "./PostHogPageView"
|
|||
const fontSans = FontSans({
|
||||
subsets: ["latin"],
|
||||
variable: "--font-sans",
|
||||
display: "swap",
|
||||
})
|
||||
|
||||
const jetbrainsMono = JetBrains_Mono({
|
||||
subsets: ["latin"],
|
||||
weight: ["300", "400", "500", "600", "700"],
|
||||
weight: ["400", "600"],
|
||||
variable: "--font-jetbrains-mono",
|
||||
display: "swap",
|
||||
})
|
||||
|
|
@ -90,17 +91,6 @@ export default async function RootLayout({
|
|||
return (
|
||||
<html lang="en" suppressHydrationWarning>
|
||||
<PHProvider>
|
||||
<head>
|
||||
<link rel="preconnect" href="https://widget.intercom.io" />
|
||||
<link rel="dns-prefetch" href="https://widget.intercom.io" />
|
||||
<Script
|
||||
id="intercom-script"
|
||||
strategy="afterInteractive"
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: intercomSnippet,
|
||||
}}
|
||||
/>
|
||||
</head>
|
||||
<body
|
||||
className={cn(
|
||||
"min-h-screen bg-background font-sans antialiased",
|
||||
|
|
@ -110,7 +100,9 @@ export default async function RootLayout({
|
|||
sourceCodePro.variable,
|
||||
)}
|
||||
>
|
||||
<PostHogPageView />
|
||||
<Suspense fallback={null}>
|
||||
<PostHogPageView />
|
||||
</Suspense>
|
||||
<Auth0Provider>
|
||||
<ThemeProvider
|
||||
attribute="class"
|
||||
|
|
@ -123,6 +115,20 @@ export default async function RootLayout({
|
|||
<SonnerToaster position="top-right" richColors />
|
||||
</ThemeProvider>
|
||||
</Auth0Provider>
|
||||
<Script
|
||||
id="intercom-script"
|
||||
strategy="lazyOnload"
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: intercomSnippet,
|
||||
}}
|
||||
/>
|
||||
<Script
|
||||
id="crisp-chat-script"
|
||||
strategy="lazyOnload"
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: `window.$crisp=[];window.CRISP_WEBSITE_ID="3e855999-42a1-4543-accf-afc369edfca0";(function(){d=document;s=d.createElement("script");s.src="https://client.crisp.chat/l.js";s.async=1;d.getElementsByTagName("head")[0].appendChild(s);})();`,
|
||||
}}
|
||||
/>
|
||||
</body>
|
||||
</PHProvider>
|
||||
</html>
|
||||
|
|
|
|||
|
|
@ -8,7 +8,18 @@ import { TOP_BAR_ANNOUNCEMENT } from "@/config/announcements"
|
|||
import { cn } from "@/lib/utils"
|
||||
import type { User } from "@auth0/nextjs-auth0/types"
|
||||
|
||||
export function DashboardShell({ children, user }: { children: React.ReactNode; user?: User }) {
|
||||
export function DashboardShell({
|
||||
children,
|
||||
user,
|
||||
initialSubscription,
|
||||
}: {
|
||||
children: React.ReactNode
|
||||
user?: User
|
||||
initialSubscription?: {
|
||||
optimizations_used: number
|
||||
optimizations_limit: number
|
||||
} | null
|
||||
}) {
|
||||
const [isAnnouncementVisible, setIsAnnouncementVisible] = useState(true)
|
||||
|
||||
// Auto-collapse announcement after 4 seconds
|
||||
|
|
@ -77,6 +88,7 @@ export function DashboardShell({ children, user }: { children: React.ReactNode;
|
|||
<Sidebar
|
||||
className="h-full border-r border-border/30 flex-shrink-0 w-60 bg-background"
|
||||
user={user || undefined}
|
||||
initialSubscription={initialSubscription}
|
||||
/>
|
||||
<main className="flex-1 flex flex-col gap-5 w-full max-w-none mx-auto h-full overflow-y-auto p-4">
|
||||
<div className="hidden md:block sticky top-0 z-20 bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60 border-b border-border/40">
|
||||
|
|
|
|||
|
|
@ -98,14 +98,63 @@ const ActiveUsersLeaderboardSkeleton: FC = () => (
|
|||
</div>
|
||||
)
|
||||
|
||||
/**
|
||||
* Skeleton loader for OptimizationPRsTable component
|
||||
* Mimics the table structure with header, filter controls, and row placeholders
|
||||
*/
|
||||
const OptimizationPRsTableSkeleton: FC = () => (
|
||||
<div className="bg-card rounded-xl border border-border p-4 sm:p-5">
|
||||
{/* Header */}
|
||||
<div className="flex flex-col sm:flex-row sm:items-center sm:justify-between gap-3 mb-4">
|
||||
<div>
|
||||
<Skeleton className="h-5 w-52 mb-1.5" />
|
||||
<Skeleton className="h-3.5 w-72" />
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Skeleton className="h-8 w-[130px] rounded-md" />
|
||||
<Skeleton className="h-8 w-8 rounded-md" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Table */}
|
||||
<div className="rounded-lg border border-border overflow-hidden">
|
||||
{/* Table header */}
|
||||
<div className="bg-muted/30 px-4 py-3 flex items-center">
|
||||
<Skeleton className="h-4 w-40 mr-auto" />
|
||||
<Skeleton className="h-4 w-16 mx-4 hidden md:block" />
|
||||
<Skeleton className="h-4 w-16 mx-4 hidden lg:block" />
|
||||
<Skeleton className="h-4 w-20 mx-4 hidden sm:block" />
|
||||
<Skeleton className="h-4 w-16" />
|
||||
</div>
|
||||
{/* Table rows */}
|
||||
{[1, 2, 3, 4, 5].map(i => (
|
||||
<div key={i} className="px-4 py-3 border-t border-border flex items-center">
|
||||
<div className="flex items-center gap-3 flex-1">
|
||||
<Skeleton className="h-8 w-8 rounded-full flex-shrink-0" />
|
||||
<div className="space-y-1.5 flex-1">
|
||||
<Skeleton className="h-4 w-32" />
|
||||
<Skeleton className="h-3 w-48" />
|
||||
</div>
|
||||
</div>
|
||||
<Skeleton className="h-6 w-20 rounded-full mx-4 hidden md:block" />
|
||||
<Skeleton className="h-6 w-16 mx-4 hidden lg:block" />
|
||||
<Skeleton className="h-4 w-24 mx-4 hidden sm:block" />
|
||||
<Skeleton className="h-8 w-20 rounded-md" />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
/**
|
||||
* Complete Dashboard Skeleton Loader
|
||||
* Displays skeleton placeholders matching the full dashboard layout
|
||||
* Used while dashboard data is being fetched
|
||||
* Displays skeleton placeholders matching the full dashboard layout.
|
||||
* IMPORTANT: This skeleton must match the exact structure and spacing of
|
||||
* dashboard/page.tsx to prevent Cumulative Layout Shift (CLS).
|
||||
*/
|
||||
export const DashboardSkeleton: FC = () => {
|
||||
return (
|
||||
<div className="h-screen py-6 sm:py-8 px-4 sm:px-6 max-w-[1400px] mx-auto">
|
||||
<div className="min-h-screen pb-8 py-6 sm:py-8 px-4 sm:px-6 max-w-[1400px] mx-auto">
|
||||
{/* Header skeleton */}
|
||||
<div className="mb-6 sm:mb-8">
|
||||
<div className="flex items-center justify-between mb-2">
|
||||
|
|
@ -114,6 +163,11 @@ export const DashboardSkeleton: FC = () => {
|
|||
</div>
|
||||
</div>
|
||||
|
||||
{/* Optimization PRs Table skeleton — must appear before stats grid to match page layout */}
|
||||
<div className="mb-6 sm:mb-8">
|
||||
<OptimizationPRsTableSkeleton />
|
||||
</div>
|
||||
|
||||
{/* Main metrics grid */}
|
||||
<div className="grid grid-cols-1 gap-3 sm:gap-5 mb-6 sm:mb-8">
|
||||
{/* Top 2 large metric cards with charts */}
|
||||
|
|
|
|||
|
|
@ -450,6 +450,7 @@ export const OptimizationPRsTable: React.FC<OptimizationPRsTableProps> = memo(
|
|||
src={`https://github.com/${event.repository.full_name.split("/")[0]}.png`}
|
||||
alt={event.repository.full_name}
|
||||
fill
|
||||
sizes="32px"
|
||||
className="rounded-full object-cover"
|
||||
onError={e => {
|
||||
e.currentTarget.style.display = "none"
|
||||
|
|
|
|||
|
|
@ -45,9 +45,19 @@ interface SidebarProps {
|
|||
user?: UserProfile
|
||||
isLoading?: boolean
|
||||
error?: Error | null
|
||||
initialSubscription?: {
|
||||
optimizations_used: number
|
||||
optimizations_limit: number
|
||||
} | null
|
||||
}
|
||||
|
||||
export function Sidebar({ className, user, isLoading, error }: SidebarProps): JSX.Element {
|
||||
export function Sidebar({
|
||||
className,
|
||||
user,
|
||||
isLoading,
|
||||
error,
|
||||
initialSubscription,
|
||||
}: SidebarProps): JSX.Element {
|
||||
const currentRoute = usePathname()
|
||||
const [isDarkMode, setIsDarkMode] = useState(false)
|
||||
const [isDropdownOpen, setIsDropdownOpen] = useState(false)
|
||||
|
|
@ -66,7 +76,7 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
|
|||
const [subscription, setSubscription] = useState<{
|
||||
optimizations_used: number
|
||||
optimizations_limit: number
|
||||
} | null>(null)
|
||||
} | null>(initialSubscription ?? null)
|
||||
const subscriptionFetchRef = useRef(false)
|
||||
|
||||
const onMobileClose = () => {
|
||||
|
|
@ -99,11 +109,21 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
|
|||
return () => document.removeEventListener("mousedown", handleClickOutside)
|
||||
}, [])
|
||||
|
||||
const usedInitialSubscriptionRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (mode !== "personal") {
|
||||
setSubscription(null)
|
||||
return
|
||||
}
|
||||
|
||||
// Skip fetch if initial data was provided by the server (first render only)
|
||||
if (initialSubscription !== undefined && !usedInitialSubscriptionRef.current) {
|
||||
usedInitialSubscriptionRef.current = true
|
||||
setSubscription(initialSubscription)
|
||||
return
|
||||
}
|
||||
|
||||
if (subscriptionFetchRef.current) return
|
||||
subscriptionFetchRef.current = true
|
||||
|
||||
|
|
@ -128,7 +148,7 @@ export function Sidebar({ className, user, isLoading, error }: SidebarProps): JS
|
|||
cancelled = true
|
||||
subscriptionFetchRef.current = false
|
||||
}
|
||||
}, [mode])
|
||||
}, [mode, initialSubscription])
|
||||
|
||||
const toggleTheme = () => {
|
||||
const newMode = !isDarkMode
|
||||
|
|
|
|||
|
|
@ -300,8 +300,10 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
|
|||
|
||||
return (
|
||||
<div className="flex flex-col h-screen bg-gradient-to-br from-slate-900 via-slate-800 to-slate-900 text-slate-200">
|
||||
{/* Header Section - Mobile Optimized */}
|
||||
<div className="px-3 sm:px-4 md:px-6 py-2 sm:py-3 md:py-4 border-b border-slate-700/50 overflow-y-auto max-h-[40vh] md:max-h-none">
|
||||
{/* Header Section - Mobile Optimized. flex-shrink-0 prevents this section
|
||||
from collapsing during flex layout, which would cause the editor below to
|
||||
shift (CLS). The max-h constraint still limits overflow on mobile. */}
|
||||
<div className="px-3 sm:px-4 md:px-6 py-2 sm:py-3 md:py-4 border-b border-slate-700/50 overflow-y-auto max-h-[40vh] md:max-h-none flex-shrink-0">
|
||||
<div className="flex flex-col gap-3 sm:gap-4 md:gap-6">
|
||||
{/* Top Row - Title with PR Link and Observability Link */}
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
|
|
@ -500,10 +502,11 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
|
|||
</div>
|
||||
</div>
|
||||
|
||||
{/* File Path/Tabs - Mobile Optimized */}
|
||||
{/* File Path/Tabs - Mobile Optimized. flex-shrink-0 prevents CLS from
|
||||
this section collapsing during initial layout. */}
|
||||
{fileKeys.length === 1 ? (
|
||||
// Single file - show full path with view toggle
|
||||
<div className="bg-[rgba(15,15,15,0.95)] border-b border-[rgba(255,255,255,0.05)] px-3 sm:px-4 md:px-5 py-2 sm:py-2.5 md:py-3 flex items-center justify-between gap-2">
|
||||
<div className="bg-[rgba(15,15,15,0.95)] border-b border-[rgba(255,255,255,0.05)] px-3 sm:px-4 md:px-5 py-2 sm:py-2.5 md:py-3 flex items-center justify-between gap-2 flex-shrink-0">
|
||||
<div className="flex items-center gap-1.5 sm:gap-2 min-w-0 flex-1">
|
||||
<FileText size={12} className="text-sky-400 flex-shrink-0 sm:w-3.5 sm:h-3.5" />
|
||||
<span className="text-xs sm:text-sm text-slate-300 font-mono truncate">
|
||||
|
|
@ -532,7 +535,7 @@ const MonacoDiffViewer: React.FC<MonacoDiffViewerProps> = ({
|
|||
</div>
|
||||
) : (
|
||||
// Multiple files - show tabs with full path on hover
|
||||
<div className="bg-[rgba(15,15,15,0.95)] border-b border-[rgba(255,255,255,0.05)]">
|
||||
<div className="bg-[rgba(15,15,15,0.95)] border-b border-[rgba(255,255,255,0.05)] flex-shrink-0">
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
<div className="flex overflow-x-auto whitespace-nowrap scrollbar-thin scrollbar-thumb-slate-700 scrollbar-track-slate-800 flex-1 min-w-0">
|
||||
{fileKeys.map(fileKey => (
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ const config: Config = {
|
|||
},
|
||||
fontFamily: {
|
||||
sans: ["var(--font-sans)", ...fontFamily.sans],
|
||||
mono: ["var(--font-jetbrains-mono)", ...fontFamily.mono],
|
||||
},
|
||||
keyframes: {
|
||||
shimmer: {
|
||||
|
|
|
|||
|
|
@ -226,6 +226,9 @@ model optimization_events {
|
|||
@@index([repository_id, user_id])
|
||||
@@index([api_key_id])
|
||||
@@index([is_staging])
|
||||
// Covers the optimization-prs query: filter by repo + optimization found + event type, sort by created_at DESC
|
||||
@@index([repository_id, is_optimization_found, event_type, created_at(sort: Desc)])
|
||||
@@index([is_optimization_found, event_type, created_at(sort: Desc)])
|
||||
}
|
||||
|
||||
model comments {
|
||||
|
|
|
|||
Loading…
Reference in a new issue