restore debug data

This commit is contained in:
Kevin Turcios 2026-02-06 01:41:02 -05:00
parent f82bb41c19
commit 29255a6dd5
3 changed files with 238 additions and 1 deletions

View file

@ -186,6 +186,9 @@ function TraceContent({ traceId, traceData }: { traceId: string; traceData: Trac
total_tokens: call.total_tokens,
created_at: call.created_at,
context: call.context as { call_sequence?: number } | null,
system_prompt: call.system_prompt,
user_prompt: call.user_prompt,
raw_response: call.raw_response,
}))
const { sections, totalDuration } = transformToTimelineSections({

View file

@ -15,9 +15,18 @@ import {
XCircle,
AlertCircle,
BarChart3,
Bug,
} from "lucide-react"
import {
Dialog,
DialogContent,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "@/components/ui/dialog"
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"
import { CodeHighlighter, CODE_STYLE } from "./code-highlighter"
import type { TimelineSection, TimelineSectionContent } from "./timeline-types"
import type { TimelineSection, TimelineSectionContent, LLMCallDebugData } from "./timeline-types"
function stripCodeHeader(code: string): string {
let lines = code.split("\n")
@ -126,6 +135,189 @@ function findMatchingFile(
return files[0] || null
}
/** Renders prompt content with syntax-highlighted code blocks */
const PromptContent = memo(function PromptContent({ content }: { content: string }) {
const parts = useMemo(() => {
const result: { type: "text" | "code"; content: string; language?: string }[] = []
const codeBlockRegex = /```(\w+)?\n?([\s\S]*?)```/g
let lastIndex = 0
let match
while ((match = codeBlockRegex.exec(content)) !== null) {
if (match.index > lastIndex) {
result.push({ type: "text", content: content.slice(lastIndex, match.index) })
}
result.push({
type: "code",
content: match[2].trim(),
language: match[1] || "python",
})
lastIndex = match.index + match[0].length
}
if (lastIndex < content.length) {
result.push({ type: "text", content: content.slice(lastIndex) })
}
return result.length > 0 ? result : [{ type: "text" as const, content }]
}, [content])
return (
<div className="space-y-3">
{parts.map((part, index) =>
part.type === "code" ? (
<div key={index} className="rounded border border-zinc-200 dark:border-zinc-700 overflow-hidden">
<CodeHighlighter
language={part.language || "python"}
code={part.content}
customStyle={CODE_STYLE}
/>
</div>
) : (
<pre
key={index}
className="text-sm whitespace-pre-wrap break-words text-zinc-800 dark:text-zinc-200 leading-relaxed font-mono"
>
{part.content}
</pre>
)
)}
</div>
)
})
interface LLMCallDebugDialogProps {
debugData: LLMCallDebugData
title: string
model?: string | null
}
const LLMCallDebugDialog = memo(function LLMCallDebugDialog({
debugData,
title,
model,
}: LLMCallDebugDialogProps) {
const [open, setOpen] = useState(false)
const [activeTab, setActiveTab] = useState<"user" | "system">("user")
const [showResponse, setShowResponse] = useState(false)
const [contentReady, setContentReady] = useState(false)
useEffect(() => {
if (open) {
const timer = requestAnimationFrame(() => setContentReady(true))
return () => cancelAnimationFrame(timer)
} else {
setContentReady(false)
setShowResponse(false)
}
}, [open])
const hasContent = debugData.systemPrompt || debugData.userPrompt || debugData.rawResponse
if (!hasContent) return null
return (
<Dialog open={open} onOpenChange={setOpen}>
<DialogTrigger asChild>
<button
className="p-1.5 rounded text-zinc-400 hover:text-zinc-600 dark:hover:text-zinc-300 hover:bg-zinc-100 dark:hover:bg-zinc-700 transition-colors"
title="View LLM call details"
>
<Bug className="h-4 w-4" />
</button>
</DialogTrigger>
<DialogContent className="w-[95vw] max-w-[95vw] h-[90vh] max-h-[90vh] flex flex-col overflow-hidden">
<DialogHeader className="flex-shrink-0 pb-3 border-b border-zinc-200 dark:border-zinc-700">
<div className="flex items-center justify-between">
<DialogTitle className="flex items-center gap-2">
<Bug className="h-4 w-4" />
<span>{title}</span>
</DialogTitle>
{/* Response debug button */}
<button
onClick={() => setShowResponse(!showResponse)}
className={`flex items-center gap-1.5 px-2 py-1 text-xs rounded transition-colors duration-150 ${
showResponse
? "bg-green-100 dark:bg-green-900/50 text-green-700 dark:text-green-300"
: "text-zinc-600 dark:text-zinc-400 hover:bg-zinc-100 dark:hover:bg-zinc-700"
}`}
title="View raw LLM response"
>
<Code className="h-3 w-3" />
<span>Response</span>
<span className="text-xs opacity-70">
({(debugData.rawResponse?.length || 0).toLocaleString()})
</span>
</button>
</div>
{model && (
<div className="flex items-center gap-2 mt-2 text-sm">
<span className="text-xs px-1.5 py-0.5 bg-zinc-100 dark:bg-zinc-700 text-zinc-600 dark:text-zinc-300 rounded">
{model}
</span>
</div>
)}
</DialogHeader>
{showResponse ? (
/* Raw Response View */
<div className="flex-1 overflow-y-auto mt-3 p-4 bg-white dark:bg-zinc-900 rounded-sm border border-zinc-200 dark:border-zinc-700">
{debugData.rawResponse ? (
<pre className="text-sm whitespace-pre-wrap break-words text-zinc-800 dark:text-zinc-200 leading-relaxed font-mono">
{debugData.rawResponse}
</pre>
) : (
<span className="text-zinc-400">No response</span>
)}
</div>
) : (
/* Prompts View */
<Tabs value={activeTab} onValueChange={v => setActiveTab(v as "user" | "system")} className="flex-1 flex flex-col min-h-0 mt-3">
<TabsList className="flex-shrink-0 w-fit mx-auto">
<TabsTrigger value="user">
User Prompt
<span className="ml-1.5 text-xs text-zinc-400">
({(debugData.userPrompt?.length || 0).toLocaleString()} chars)
</span>
</TabsTrigger>
<TabsTrigger value="system">
System Prompt
<span className="ml-1.5 text-xs text-zinc-400">
({(debugData.systemPrompt?.length || 0).toLocaleString()} chars)
</span>
</TabsTrigger>
</TabsList>
<div className="flex-1 overflow-y-auto mt-3 p-4 bg-white dark:bg-zinc-900 rounded-sm border border-zinc-200 dark:border-zinc-700">
{!contentReady ? (
<div className="animate-pulse space-y-3">
<div className="h-4 bg-zinc-200 dark:bg-zinc-700 rounded w-3/4" />
<div className="h-4 bg-zinc-200 dark:bg-zinc-700 rounded w-1/2" />
<div className="h-4 bg-zinc-200 dark:bg-zinc-700 rounded w-2/3" />
</div>
) : activeTab === "user" ? (
debugData.userPrompt ? (
<PromptContent content={debugData.userPrompt} />
) : (
<span className="text-zinc-400">No user prompt</span>
)
) : (
debugData.systemPrompt ? (
<pre className="text-sm whitespace-pre-wrap break-words text-zinc-800 dark:text-zinc-200 leading-relaxed font-mono">
{debugData.systemPrompt}
</pre>
) : (
<span className="text-zinc-400">No system prompt</span>
)
)}
</div>
</Tabs>
)}
</DialogContent>
</Dialog>
)
})
const DiffView = memo(function DiffView({ diff }: { diff: string }) {
const lines = diff.split("\n")
@ -681,6 +873,13 @@ const TimelineSectionCard = memo(function TimelineSectionCard({
${section.cost.toFixed(4)}
</span>
)}
{section.debugData && (
<LLMCallDebugDialog
debugData={section.debugData}
title={section.title}
model={section.model}
/>
)}
</div>
</div>
</div>

View file

@ -1,3 +1,9 @@
export interface LLMCallDebugData {
systemPrompt: string | null
userPrompt: string | null
rawResponse: string | null
}
export interface TimelineSection {
id: string
type: "test_generation" | "optimization" | "line_profiler" | "refinement" | "ranking" | "summary"
@ -10,6 +16,7 @@ export interface TimelineSection {
cost?: number | null
tokens?: number | null
content: TimelineSectionContent
debugData?: LLMCallDebugData
}
export interface TestGroup {
@ -37,6 +44,9 @@ export interface TransformInput {
total_tokens: number | null
created_at: Date
context: { call_sequence?: number } | null
system_prompt?: string | null
user_prompt?: string | null
raw_response?: string | null
}>
optimizationCandidates: Array<{
id: string
@ -135,6 +145,11 @@ export function transformToTimelineSections(input: TransformInput): { sections:
testGroups,
testFramework: testFramework ?? undefined,
},
debugData: firstTestCall ? {
systemPrompt: firstTestCall.system_prompt ?? null,
userPrompt: firstTestCall.user_prompt ?? null,
rawResponse: firstTestCall.raw_response ?? null,
} : undefined,
})
}
@ -168,6 +183,11 @@ export function transformToTimelineSections(input: TransformInput): { sections:
rank,
isBest: candidate.id === bestCandidateId,
},
debugData: {
systemPrompt: call.system_prompt ?? null,
userPrompt: call.user_prompt ?? null,
rawResponse: call.raw_response ?? null,
},
})
}
} else if (callType === "line_profiler") {
@ -194,6 +214,11 @@ export function transformToTimelineSections(input: TransformInput): { sections:
rank,
isBest: candidate.id === bestCandidateId,
},
debugData: {
systemPrompt: call.system_prompt ?? null,
userPrompt: call.user_prompt ?? null,
rawResponse: call.raw_response ?? null,
},
})
}
} else if (callType === "refinement") {
@ -226,6 +251,11 @@ export function transformToTimelineSections(input: TransformInput): { sections:
rank,
isBest: candidate.id === bestCandidateId,
},
debugData: {
systemPrompt: call.system_prompt ?? null,
userPrompt: call.user_prompt ?? null,
rawResponse: call.raw_response ?? null,
},
})
}
} else if (callType === "ranking") {
@ -259,6 +289,11 @@ export function transformToTimelineSections(input: TransformInput): { sections:
rankings,
usedForPr,
},
debugData: {
systemPrompt: call.system_prompt ?? null,
userPrompt: call.user_prompt ?? null,
rawResponse: call.raw_response ?? null,
},
})
}
}