vitest support add to js/ts project

This commit is contained in:
Sarthak Agarwal 2026-01-31 01:09:52 +05:30
parent f5a61fb216
commit c56002f287
17 changed files with 3824 additions and 4 deletions

105
.github/workflows/js-tests.yml vendored Normal file
View file

@ -0,0 +1,105 @@
name: JavaScript/TypeScript Tests
on:
push:
branches:
- main
pull_request:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}
cancel-in-progress: true
jobs:
jest-tests:
name: Jest Tests
runs-on: ubuntu-latest
strategy:
matrix:
project:
- code_to_optimize_js
- code_to_optimize_ts
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install dependencies
working-directory: code_to_optimize/js/${{ matrix.project }}
run: npm install
- name: Run Jest tests
working-directory: code_to_optimize/js/${{ matrix.project }}
run: npm test
vitest-tests:
name: Vitest Tests
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install dependencies
working-directory: code_to_optimize/js/code_to_optimize_vitest
run: npm install
- name: Run Vitest tests
working-directory: code_to_optimize/js/code_to_optimize_vitest
run: npm test
python-js-tests:
name: Python JS/TS Integration Tests
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install uv
uses: astral-sh/setup-uv@v6
- name: Install Python dependencies
run: |
uv venv --seed
uv sync
- name: Install npm dependencies for test projects
run: |
npm install --prefix code_to_optimize/js/code_to_optimize_js
npm install --prefix code_to_optimize/js/code_to_optimize_ts
npm install --prefix code_to_optimize/js/code_to_optimize_vitest
- name: Run Vitest runner unit tests
run: uv run pytest tests/languages/javascript/test_vitest_runner.py -v
- name: Run Vitest E2E tests
run: uv run pytest tests/test_languages/test_vitest_e2e.py -v
- name: Run JavaScript E2E tests
run: uv run pytest tests/test_languages/test_javascript_e2e.py -v
- name: Run config_js tests
run: uv run pytest tests/code_utils/test_config_js.py -v

View file

@ -0,0 +1 @@
language: typescript

View file

@ -0,0 +1,52 @@
/**
* Fibonacci implementations - intentionally inefficient for optimization testing.
*/
/**
* Calculate the nth Fibonacci number using naive recursion.
* This is intentionally slow to demonstrate optimization potential.
* @param n - The index of the Fibonacci number to calculate
* @returns The nth Fibonacci number
*/
export function fibonacci(n: number): number {
if (n <= 1) {
return n;
}
return fibonacci(n - 1) + fibonacci(n - 2);
}
/**
* Check if a number is a Fibonacci number.
* @param num - The number to check
* @returns True if num is a Fibonacci number
*/
export function isFibonacci(num: number): boolean {
// A number is Fibonacci if one of (5*n*n + 4) or (5*n*n - 4) is a perfect square
const check1 = 5 * num * num + 4;
const check2 = 5 * num * num - 4;
return isPerfectSquare(check1) || isPerfectSquare(check2);
}
/**
* Check if a number is a perfect square.
* @param n - The number to check
* @returns True if n is a perfect square
*/
export function isPerfectSquare(n: number): boolean {
const sqrt = Math.sqrt(n);
return sqrt === Math.floor(sqrt);
}
/**
* Generate an array of Fibonacci numbers up to n.
* @param n - The number of Fibonacci numbers to generate
* @returns Array of Fibonacci numbers
*/
export function fibonacciSequence(n: number): number[] {
const result: number[] = [];
for (let i = 0; i < n; i++) {
result.push(fibonacci(i));
}
return result;
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,23 @@
{
"name": "codeflash-vitest-test",
"version": "1.0.0",
"description": "Sample TypeScript project with Vitest for codeflash optimization testing",
"type": "module",
"main": "dist/index.js",
"scripts": {
"test": "vitest run",
"test:watch": "vitest",
"test:coverage": "vitest run --coverage",
"build": "tsc"
},
"codeflash": {
"moduleRoot": ".",
"testsRoot": "tests"
},
"devDependencies": {
"@types/node": "^20.0.0",
"codeflash": "file:../../../packages/codeflash",
"typescript": "^5.0.0",
"vitest": "^2.0.0"
}
}

View file

@ -0,0 +1,62 @@
/**
* String utilities - intentionally inefficient for optimization testing.
*/
/**
* Reverse a string character by character.
* @param str - The string to reverse
* @returns The reversed string
*/
export function reverseString(str: string): string {
let result = '';
for (let i = str.length - 1; i >= 0; i--) {
result += str[i];
}
return result;
}
/**
* Check if a string is a palindrome.
* @param str - The string to check
* @returns True if the string is a palindrome
*/
export function isPalindrome(str: string): boolean {
const cleaned = str.toLowerCase().replace(/[^a-z0-9]/g, '');
return cleaned === reverseString(cleaned);
}
/**
* Count vowels in a string.
* @param str - The string to analyze
* @returns The number of vowels
*/
export function countVowels(str: string): number {
const vowels = 'aeiouAEIOU';
let count = 0;
for (const char of str) {
if (vowels.includes(char)) {
count++;
}
}
return count;
}
/**
* Find all unique words in a string.
* @param str - The string to analyze
* @returns Array of unique words
*/
export function uniqueWords(str: string): string[] {
const words = str.toLowerCase().split(/\s+/).filter(w => w.length > 0);
const seen = new Set<string>();
const result: string[] = [];
for (const word of words) {
if (!seen.has(word)) {
seen.add(word);
result.push(word);
}
}
return result;
}

View file

@ -0,0 +1,98 @@
import { describe, test, expect } from 'vitest';
import { fibonacci, isFibonacci, isPerfectSquare, fibonacciSequence } from '../fibonacci';
describe('fibonacci', () => {
test('returns 0 for n=0', () => {
expect(fibonacci(0)).toBe(0);
});
test('returns 1 for n=1', () => {
expect(fibonacci(1)).toBe(1);
});
test('returns 1 for n=2', () => {
expect(fibonacci(2)).toBe(1);
});
test('returns 5 for n=5', () => {
expect(fibonacci(5)).toBe(5);
});
test('returns 55 for n=10', () => {
expect(fibonacci(10)).toBe(55);
});
test('returns 233 for n=13', () => {
expect(fibonacci(13)).toBe(233);
});
});
describe('isFibonacci', () => {
test('returns true for 0', () => {
expect(isFibonacci(0)).toBe(true);
});
test('returns true for 1', () => {
expect(isFibonacci(1)).toBe(true);
});
test('returns true for 8', () => {
expect(isFibonacci(8)).toBe(true);
});
test('returns true for 13', () => {
expect(isFibonacci(13)).toBe(true);
});
test('returns false for 4', () => {
expect(isFibonacci(4)).toBe(false);
});
test('returns false for 6', () => {
expect(isFibonacci(6)).toBe(false);
});
});
describe('isPerfectSquare', () => {
test('returns true for 0', () => {
expect(isPerfectSquare(0)).toBe(true);
});
test('returns true for 1', () => {
expect(isPerfectSquare(1)).toBe(true);
});
test('returns true for 4', () => {
expect(isPerfectSquare(4)).toBe(true);
});
test('returns true for 16', () => {
expect(isPerfectSquare(16)).toBe(true);
});
test('returns false for 2', () => {
expect(isPerfectSquare(2)).toBe(false);
});
test('returns false for 3', () => {
expect(isPerfectSquare(3)).toBe(false);
});
});
describe('fibonacciSequence', () => {
test('returns empty array for n=0', () => {
expect(fibonacciSequence(0)).toEqual([]);
});
test('returns [0] for n=1', () => {
expect(fibonacciSequence(1)).toEqual([0]);
});
test('returns first 5 Fibonacci numbers', () => {
expect(fibonacciSequence(5)).toEqual([0, 1, 1, 2, 3]);
});
test('returns first 10 Fibonacci numbers', () => {
expect(fibonacciSequence(10)).toEqual([0, 1, 1, 2, 3, 5, 8, 13, 21, 34]);
});
});

View file

@ -0,0 +1,94 @@
import { describe, test, expect } from 'vitest';
import { reverseString, isPalindrome, countVowels, uniqueWords } from '../string_utils';
describe('reverseString', () => {
test('reverses a simple string', () => {
expect(reverseString('hello')).toBe('olleh');
});
test('returns empty string for empty input', () => {
expect(reverseString('')).toBe('');
});
test('returns same character for single character', () => {
expect(reverseString('a')).toBe('a');
});
test('handles strings with spaces', () => {
expect(reverseString('hello world')).toBe('dlrow olleh');
});
test('handles palindrome', () => {
expect(reverseString('racecar')).toBe('racecar');
});
});
describe('isPalindrome', () => {
test('returns true for palindrome', () => {
expect(isPalindrome('racecar')).toBe(true);
});
test('returns true for palindrome with spaces', () => {
expect(isPalindrome('A man a plan a canal Panama')).toBe(true);
});
test('returns false for non-palindrome', () => {
expect(isPalindrome('hello')).toBe(false);
});
test('returns true for empty string', () => {
expect(isPalindrome('')).toBe(true);
});
test('returns true for single character', () => {
expect(isPalindrome('a')).toBe(true);
});
test('handles mixed case', () => {
expect(isPalindrome('RaceCar')).toBe(true);
});
});
describe('countVowels', () => {
test('counts vowels in simple string', () => {
expect(countVowels('hello')).toBe(2);
});
test('returns 0 for string with no vowels', () => {
expect(countVowels('bcdfg')).toBe(0);
});
test('returns 0 for empty string', () => {
expect(countVowels('')).toBe(0);
});
test('counts uppercase vowels', () => {
expect(countVowels('HELLO')).toBe(2);
});
test('counts all vowels', () => {
expect(countVowels('aeiouAEIOU')).toBe(10);
});
});
describe('uniqueWords', () => {
test('finds unique words in simple string', () => {
expect(uniqueWords('hello world')).toEqual(['hello', 'world']);
});
test('removes duplicates', () => {
expect(uniqueWords('hello hello world')).toEqual(['hello', 'world']);
});
test('returns empty array for empty string', () => {
expect(uniqueWords('')).toEqual([]);
});
test('handles multiple spaces', () => {
expect(uniqueWords('hello world')).toEqual(['hello', 'world']);
});
test('normalizes case', () => {
expect(uniqueWords('Hello hello HELLO')).toEqual(['hello']);
});
});

View file

@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "ESNext",
"moduleResolution": "node",
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"outDir": "./dist",
"declaration": true,
"types": ["vitest/globals", "node"]
},
"include": ["./*.ts", "./tests/**/*.ts"],
"exclude": ["node_modules", "dist"]
}

View file

@ -0,0 +1,13 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
globals: true,
environment: 'node',
include: ['tests/**/*.test.ts'],
reporters: ['default', 'junit'],
outputFile: {
junit: '.codeflash/vitest-results.xml',
},
},
});

View file

@ -1872,7 +1872,7 @@ class JavaScriptSupport:
# Write instrumented code to source file
source_file_path.write_text(instrumented_source, encoding="utf-8")
logger.debug("Wrote instrumented source to %s", source_file_path)
return True # noqa: TRY300
return True
except Exception as e:
logger.warning("Failed to instrument source for line profiling: %s", e)
return False
@ -1925,8 +1925,9 @@ class JavaScriptSupport:
project_root: Path | None = None,
enable_coverage: bool = False,
candidate_index: int = 0,
test_framework: str = "jest",
) -> tuple[Path, Any, Path | None, Path | None]:
"""Run Jest behavioral tests.
"""Run behavioral tests using the detected test framework.
Args:
test_paths: TestFiles object containing test file information.
@ -1936,11 +1937,25 @@ class JavaScriptSupport:
project_root: Project root directory.
enable_coverage: Whether to collect coverage information.
candidate_index: Index of the candidate being tested.
test_framework: Test framework to use ("jest" or "vitest").
Returns:
Tuple of (result_file_path, subprocess_result, coverage_path, config_path).
"""
if test_framework == "vitest":
from codeflash.languages.javascript.vitest_runner import run_vitest_behavioral_tests
return run_vitest_behavioral_tests(
test_paths=test_paths,
test_env=test_env,
cwd=cwd,
timeout=timeout,
project_root=project_root,
enable_coverage=enable_coverage,
candidate_index=candidate_index,
)
from codeflash.languages.javascript.test_runner import run_jest_behavioral_tests
return run_jest_behavioral_tests(
@ -1963,8 +1978,9 @@ class JavaScriptSupport:
min_loops: int = 5,
max_loops: int = 100_000,
target_duration_seconds: float = 10.0,
test_framework: str = "jest",
) -> tuple[Path, Any]:
"""Run Jest benchmarking tests.
"""Run benchmarking tests using the detected test framework.
Args:
test_paths: TestFiles object containing test file information.
@ -1975,11 +1991,26 @@ class JavaScriptSupport:
min_loops: Minimum number of loops for benchmarking.
max_loops: Maximum number of loops for benchmarking.
target_duration_seconds: Target duration for benchmarking in seconds.
test_framework: Test framework to use ("jest" or "vitest").
Returns:
Tuple of (result_file_path, subprocess_result).
"""
if test_framework == "vitest":
from codeflash.languages.javascript.vitest_runner import run_vitest_benchmarking_tests
return run_vitest_benchmarking_tests(
test_paths=test_paths,
test_env=test_env,
cwd=cwd,
timeout=timeout,
project_root=project_root,
min_loops=min_loops,
max_loops=max_loops,
target_duration_ms=int(target_duration_seconds * 1000),
)
from codeflash.languages.javascript.test_runner import run_jest_benchmarking_tests
return run_jest_benchmarking_tests(
@ -2001,8 +2032,9 @@ class JavaScriptSupport:
timeout: int | None = None,
project_root: Path | None = None,
line_profile_output_file: Path | None = None,
test_framework: str = "jest",
) -> tuple[Path, Any]:
"""Run Jest tests for line profiling.
"""Run tests for line profiling using the detected test framework.
Args:
test_paths: TestFiles object containing test file information.
@ -2011,11 +2043,24 @@ class JavaScriptSupport:
timeout: Optional timeout in seconds.
project_root: Project root directory.
line_profile_output_file: Path where line profile results will be written.
test_framework: Test framework to use ("jest" or "vitest").
Returns:
Tuple of (result_file_path, subprocess_result).
"""
if test_framework == "vitest":
from codeflash.languages.javascript.vitest_runner import run_vitest_line_profile_tests
return run_vitest_line_profile_tests(
test_paths=test_paths,
test_env=test_env,
cwd=cwd,
timeout=timeout,
project_root=project_root,
line_profile_output_file=line_profile_output_file,
)
from codeflash.languages.javascript.test_runner import run_jest_line_profile_tests
return run_jest_line_profile_tests(

View file

@ -0,0 +1,490 @@
"""Vitest test runner for JavaScript/TypeScript.
This module provides functions for running Vitest tests for behavioral
verification and performance benchmarking.
"""
from __future__ import annotations
import subprocess
import time
from pathlib import Path
from typing import TYPE_CHECKING
from codeflash.cli_cmds.console import logger
from codeflash.code_utils.code_utils import get_run_tmp_file
from codeflash.code_utils.shell_utils import get_cross_platform_subprocess_run_args
if TYPE_CHECKING:
from codeflash.models.models import TestFiles
def _find_vitest_project_root(file_path: Path) -> Path | None:
"""Find the Vitest project root by looking for vitest/vite config or package.json.
Traverses up from the given file path to find the nearest directory
containing vitest.config.js/ts, vite.config.js/ts, or package.json.
Args:
file_path: A file path within the Vitest project.
Returns:
The project root directory, or None if not found.
"""
current = file_path.parent if file_path.is_file() else file_path
while current != current.parent: # Stop at filesystem root
# Check for Vitest-specific config files first
if (
(current / "vitest.config.js").exists()
or (current / "vitest.config.ts").exists()
or (current / "vitest.config.mjs").exists()
or (current / "vitest.config.mts").exists()
or (current / "vite.config.js").exists()
or (current / "vite.config.ts").exists()
or (current / "vite.config.mjs").exists()
or (current / "vite.config.mts").exists()
or (current / "package.json").exists()
):
return current
current = current.parent
return None
def _ensure_runtime_files(project_root: Path) -> None:
"""Ensure JavaScript runtime package is installed in the project.
Installs codeflash package if not already present.
The package provides all runtime files needed for test instrumentation.
Args:
project_root: The project root directory.
"""
# Check if package is already installed
node_modules_pkg = project_root / "node_modules" / "codeflash"
if node_modules_pkg.exists():
logger.debug("codeflash already installed")
return
# Try to install from local package first (for development)
local_package_path = Path(__file__).parent.parent.parent.parent / "packages" / "codeflash"
if local_package_path.exists():
try:
result = subprocess.run(
["npm", "install", "--save-dev", str(local_package_path)],
check=False,
cwd=project_root,
capture_output=True,
text=True,
timeout=120,
)
if result.returncode == 0:
logger.debug("Installed codeflash from local package")
return
logger.warning(f"Failed to install local package: {result.stderr}")
except Exception as e:
logger.warning(f"Error installing local package: {e}")
# Try to install from npm registry
try:
result = subprocess.run(
["npm", "install", "--save-dev", "codeflash"],
check=False,
cwd=project_root,
capture_output=True,
text=True,
timeout=120,
)
if result.returncode == 0:
logger.debug("Installed codeflash from npm registry")
return
logger.warning(f"Failed to install from npm: {result.stderr}")
except Exception as e:
logger.warning(f"Error installing from npm: {e}")
logger.error("Could not install codeflash. Please install it manually: npm install --save-dev codeflash")
def _build_vitest_behavioral_command(
test_files: list[Path], timeout: int | None = None, output_file: Path | None = None
) -> list[str]:
"""Build Vitest command for behavioral tests.
Args:
test_files: List of test files to run.
timeout: Optional timeout in seconds.
output_file: Optional path for JUnit XML output.
Returns:
Command list for subprocess execution.
"""
cmd = [
"npx",
"vitest",
"run", # Single execution (not watch mode)
"--reporter=default",
"--reporter=junit",
"--no-file-parallelism", # Serial execution for deterministic timing
]
if output_file:
cmd.append(f"--outputFile={output_file}")
if timeout:
cmd.append(f"--test-timeout={timeout * 1000}") # Vitest uses milliseconds
# Add test files as positional arguments (Vitest style)
cmd.extend(str(f.resolve()) for f in test_files)
return cmd
def _build_vitest_benchmarking_command(
test_files: list[Path], timeout: int | None = None, output_file: Path | None = None
) -> list[str]:
"""Build Vitest command for benchmarking tests.
Args:
test_files: List of test files to run.
timeout: Optional timeout in seconds.
output_file: Optional path for JUnit XML output.
Returns:
Command list for subprocess execution.
"""
cmd = [
"npx",
"vitest",
"run", # Single execution (not watch mode)
"--reporter=default",
"--reporter=junit",
"--no-file-parallelism", # Serial execution for consistent benchmarking
]
if output_file:
cmd.append(f"--outputFile={output_file}")
if timeout:
cmd.append(f"--test-timeout={timeout * 1000}")
# Add test files as positional arguments
cmd.extend(str(f.resolve()) for f in test_files)
return cmd
def run_vitest_behavioral_tests(
test_paths: TestFiles,
test_env: dict[str, str],
cwd: Path,
*,
timeout: int | None = None,
project_root: Path | None = None,
enable_coverage: bool = False,
candidate_index: int = 0,
) -> tuple[Path, subprocess.CompletedProcess, Path | None, Path | None]:
"""Run Vitest tests and return results in a format compatible with pytest output.
Args:
test_paths: TestFiles object containing test file information.
test_env: Environment variables for the test run.
cwd: Working directory for running tests.
timeout: Optional timeout in seconds.
project_root: Vitest project root (directory containing vitest.config or package.json).
enable_coverage: Whether to collect coverage information.
candidate_index: Index of the candidate being tested.
Returns:
Tuple of (result_file_path, subprocess_result, coverage_json_path, None).
"""
result_file_path = get_run_tmp_file(Path("vitest_results.xml"))
# Get test files to run
test_files = [Path(file.instrumented_behavior_file_path) for file in test_paths.test_files]
# Use provided project_root, or detect it as fallback
if project_root is None and test_files:
project_root = _find_vitest_project_root(test_files[0])
# Use the project root, or fall back to provided cwd
effective_cwd = project_root if project_root else cwd
logger.debug(f"Vitest working directory: {effective_cwd}")
# Ensure the codeflash npm package is installed
_ensure_runtime_files(effective_cwd)
# Coverage output directory
coverage_dir = get_run_tmp_file(Path("vitest_coverage"))
coverage_json_path = coverage_dir / "coverage-final.json" if enable_coverage else None
# Build Vitest command
vitest_cmd = _build_vitest_behavioral_command(test_files=test_files, timeout=timeout, output_file=result_file_path)
# Add coverage flags if enabled
if enable_coverage:
vitest_cmd.extend(["--coverage", "--coverage.reporter=json", f"--coverage.reportsDirectory={coverage_dir}"])
# Set up environment
vitest_env = test_env.copy()
# Set codeflash output file for the vitest helper to write timing/behavior data (SQLite format)
codeflash_sqlite_file = get_run_tmp_file(Path(f"test_return_values_{candidate_index}.sqlite"))
vitest_env["CODEFLASH_OUTPUT_FILE"] = str(codeflash_sqlite_file)
vitest_env["CODEFLASH_TEST_ITERATION"] = str(candidate_index)
vitest_env["CODEFLASH_LOOP_INDEX"] = "1"
vitest_env["CODEFLASH_MODE"] = "behavior"
# Seed random number generator for reproducible test runs across original and optimized code
vitest_env["CODEFLASH_RANDOM_SEED"] = "42"
logger.debug(f"Running Vitest tests with command: {' '.join(vitest_cmd)}")
start_time_ns = time.perf_counter_ns()
try:
run_args = get_cross_platform_subprocess_run_args(
cwd=effective_cwd, env=vitest_env, timeout=timeout or 600, check=False, text=True, capture_output=True
)
result = subprocess.run(vitest_cmd, **run_args) # noqa: PLW1510
# Combine stderr into stdout for timing markers
if result.stderr and not result.stdout:
result = subprocess.CompletedProcess(
args=result.args, returncode=result.returncode, stdout=result.stderr, stderr=""
)
elif result.stderr:
result = subprocess.CompletedProcess(
args=result.args, returncode=result.returncode, stdout=result.stdout + "\n" + result.stderr, stderr=""
)
logger.debug(f"Vitest result: returncode={result.returncode}")
except subprocess.TimeoutExpired:
logger.warning(f"Vitest tests timed out after {timeout}s")
result = subprocess.CompletedProcess(
args=vitest_cmd, returncode=-1, stdout="", stderr="Test execution timed out"
)
except FileNotFoundError:
logger.error("Vitest not found. Make sure Vitest is installed (npm install vitest)")
result = subprocess.CompletedProcess(
args=vitest_cmd, returncode=-1, stdout="", stderr="Vitest not found. Run: npm install vitest"
)
finally:
wall_clock_ns = time.perf_counter_ns() - start_time_ns
logger.debug(f"Vitest behavioral tests completed in {wall_clock_ns / 1e9:.2f}s")
return result_file_path, result, coverage_json_path, None
def run_vitest_benchmarking_tests(
test_paths: TestFiles,
test_env: dict[str, str],
cwd: Path,
*,
timeout: int | None = None,
project_root: Path | None = None,
min_loops: int = 5,
max_loops: int = 100,
target_duration_ms: int = 10_000,
stability_check: bool = True,
) -> tuple[Path, subprocess.CompletedProcess]:
"""Run Vitest benchmarking tests with external looping from Python.
Uses external process-level looping to run tests multiple times and
collect timing data. This matches the Python pytest approach where
looping is controlled externally for simplicity.
Args:
test_paths: TestFiles object containing test file information.
test_env: Environment variables for the test run.
cwd: Working directory for running tests.
timeout: Optional timeout in seconds for the entire benchmark run.
project_root: Vitest project root (directory containing vitest.config or package.json).
min_loops: Minimum number of loop iterations.
max_loops: Maximum number of loop iterations.
target_duration_ms: Target TOTAL duration in milliseconds for all loops.
stability_check: Whether to enable stability-based early stopping.
Returns:
Tuple of (result_file_path, subprocess_result with stdout from all iterations).
"""
result_file_path = get_run_tmp_file(Path("vitest_perf_results.xml"))
# Get performance test files
test_files = [Path(file.benchmarking_file_path) for file in test_paths.test_files if file.benchmarking_file_path]
# Use provided project_root, or detect it as fallback
if project_root is None and test_files:
project_root = _find_vitest_project_root(test_files[0])
effective_cwd = project_root if project_root else cwd
logger.debug(f"Vitest benchmarking working directory: {effective_cwd}")
# Ensure the codeflash npm package is installed
_ensure_runtime_files(effective_cwd)
# Build Vitest command for performance tests
vitest_cmd = _build_vitest_benchmarking_command(
test_files=test_files, timeout=timeout, output_file=result_file_path
)
# Base environment setup
vitest_env = test_env.copy()
codeflash_sqlite_file = get_run_tmp_file(Path("test_return_values_0.sqlite"))
vitest_env["CODEFLASH_OUTPUT_FILE"] = str(codeflash_sqlite_file)
vitest_env["CODEFLASH_TEST_ITERATION"] = "0"
vitest_env["CODEFLASH_MODE"] = "performance"
vitest_env["CODEFLASH_RANDOM_SEED"] = "42"
# Internal loop configuration for capturePerf
vitest_env["CODEFLASH_PERF_LOOP_COUNT"] = str(max_loops)
vitest_env["CODEFLASH_PERF_MIN_LOOPS"] = str(min_loops)
vitest_env["CODEFLASH_PERF_TARGET_DURATION_MS"] = str(target_duration_ms)
vitest_env["CODEFLASH_PERF_STABILITY_CHECK"] = "true" if stability_check else "false"
vitest_env["CODEFLASH_LOOP_INDEX"] = "1"
# Total timeout for the entire benchmark run
total_timeout = max(120, (target_duration_ms // 1000) + 60, timeout or 120)
logger.debug(f"Running Vitest benchmarking tests: {' '.join(vitest_cmd)}")
logger.debug(
f"Vitest benchmarking config: min_loops={min_loops}, max_loops={max_loops}, "
f"target_duration={target_duration_ms}ms, stability_check={stability_check}"
)
total_start_time = time.time()
try:
run_args = get_cross_platform_subprocess_run_args(
cwd=effective_cwd, env=vitest_env, timeout=total_timeout, check=False, text=True, capture_output=True
)
result = subprocess.run(vitest_cmd, **run_args) # noqa: PLW1510
# Combine stderr into stdout for timing markers
stdout = result.stdout or ""
if result.stderr:
stdout = stdout + "\n" + result.stderr if stdout else result.stderr
result = subprocess.CompletedProcess(args=result.args, returncode=result.returncode, stdout=stdout, stderr="")
except subprocess.TimeoutExpired:
logger.warning(f"Vitest benchmarking timed out after {total_timeout}s")
result = subprocess.CompletedProcess(args=vitest_cmd, returncode=-1, stdout="", stderr="Benchmarking timed out")
except FileNotFoundError:
logger.error("Vitest not found for benchmarking")
result = subprocess.CompletedProcess(args=vitest_cmd, returncode=-1, stdout="", stderr="Vitest not found")
wall_clock_seconds = time.time() - total_start_time
logger.debug(f"Vitest benchmarking completed in {wall_clock_seconds:.2f}s")
return result_file_path, result
def run_vitest_line_profile_tests(
test_paths: TestFiles,
test_env: dict[str, str],
cwd: Path,
*,
timeout: int | None = None,
project_root: Path | None = None,
line_profile_output_file: Path | None = None,
) -> tuple[Path, subprocess.CompletedProcess]:
"""Run Vitest tests for line profiling.
This runs tests against source code that has been instrumented with line profiler.
The instrumentation collects execution counts and timing per line.
Args:
test_paths: TestFiles object containing test file information.
test_env: Environment variables for the test run.
cwd: Working directory for running tests.
timeout: Optional timeout in seconds for the subprocess.
project_root: Vitest project root (directory containing vitest.config or package.json).
line_profile_output_file: Path where line profile results will be written.
Returns:
Tuple of (result_file_path, subprocess_result).
"""
result_file_path = get_run_tmp_file(Path("vitest_line_profile_results.xml"))
# Get test files to run - use instrumented behavior files if available, otherwise benchmarking files
test_files = []
for file in test_paths.test_files:
if file.instrumented_behavior_file_path:
test_files.append(Path(file.instrumented_behavior_file_path))
elif file.benchmarking_file_path:
test_files.append(Path(file.benchmarking_file_path))
# Use provided project_root, or detect it as fallback
if project_root is None and test_files:
project_root = _find_vitest_project_root(test_files[0])
effective_cwd = project_root if project_root else cwd
logger.debug(f"Vitest line profiling working directory: {effective_cwd}")
# Ensure the codeflash npm package is installed
_ensure_runtime_files(effective_cwd)
# Build Vitest command for line profiling - simple run without benchmarking loops
vitest_cmd = [
"npx",
"vitest",
"run",
"--reporter=default",
"--reporter=junit",
"--no-file-parallelism", # Serial execution for consistent line profiling
]
vitest_cmd.append(f"--outputFile={result_file_path}")
if timeout:
vitest_cmd.append(f"--test-timeout={timeout * 1000}")
vitest_cmd.extend(str(f.resolve()) for f in test_files)
# Set up environment
vitest_env = test_env.copy()
codeflash_sqlite_file = get_run_tmp_file(Path("test_return_values_line_profile.sqlite"))
vitest_env["CODEFLASH_OUTPUT_FILE"] = str(codeflash_sqlite_file)
vitest_env["CODEFLASH_TEST_ITERATION"] = "0"
vitest_env["CODEFLASH_LOOP_INDEX"] = "1"
vitest_env["CODEFLASH_MODE"] = "line_profile"
vitest_env["CODEFLASH_RANDOM_SEED"] = "42"
# Pass the line profile output file path to the instrumented code
if line_profile_output_file:
vitest_env["CODEFLASH_LINE_PROFILE_OUTPUT"] = str(line_profile_output_file)
subprocess_timeout = timeout or 600
logger.debug(f"Running Vitest line profile tests: {' '.join(vitest_cmd)}")
start_time_ns = time.perf_counter_ns()
try:
run_args = get_cross_platform_subprocess_run_args(
cwd=effective_cwd, env=vitest_env, timeout=subprocess_timeout, check=False, text=True, capture_output=True
)
result = subprocess.run(vitest_cmd, **run_args) # noqa: PLW1510
# Combine stderr into stdout
if result.stderr and not result.stdout:
result = subprocess.CompletedProcess(
args=result.args, returncode=result.returncode, stdout=result.stderr, stderr=""
)
elif result.stderr:
result = subprocess.CompletedProcess(
args=result.args, returncode=result.returncode, stdout=result.stdout + "\n" + result.stderr, stderr=""
)
logger.debug(f"Vitest line profile result: returncode={result.returncode}")
except subprocess.TimeoutExpired:
logger.warning(f"Vitest line profile tests timed out after {subprocess_timeout}s")
result = subprocess.CompletedProcess(
args=vitest_cmd, returncode=-1, stdout="", stderr="Line profile tests timed out"
)
except FileNotFoundError:
logger.error("Vitest not found for line profiling")
result = subprocess.CompletedProcess(args=vitest_cmd, returncode=-1, stdout="", stderr="Vitest not found")
finally:
wall_clock_ns = time.perf_counter_ns() - start_time_ns
logger.debug(f"Vitest line profile tests completed in {wall_clock_ns / 1e9:.2f}s")
return result_file_path, result

View file

View file

@ -0,0 +1,288 @@
"""Tests for JavaScript/TypeScript support.py test framework dispatch logic.
These tests verify that run_behavioral_tests, run_benchmarking_tests, and
run_line_profile_tests correctly dispatch to Jest or Vitest based on the
test_framework parameter.
"""
from __future__ import annotations
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from codeflash.languages.javascript.support import JavaScriptSupport, TypeScriptSupport
@pytest.fixture
def js_support() -> JavaScriptSupport:
"""Create a JavaScriptSupport instance."""
return JavaScriptSupport()
@pytest.fixture
def ts_support() -> TypeScriptSupport:
"""Create a TypeScriptSupport instance."""
return TypeScriptSupport()
@pytest.fixture
def mock_test_paths() -> MagicMock:
"""Create a mock TestFiles object."""
mock = MagicMock()
mock_file = MagicMock()
mock_file.instrumented_behavior_file_path = Path("/project/tests/test_func.test.ts")
mock_file.benchmarking_file_path = Path("/project/tests/test_func__perf.test.ts")
mock.test_files = [mock_file]
return mock
class TestBehavioralTestsDispatch:
"""Tests for run_behavioral_tests dispatch logic."""
@patch("codeflash.languages.javascript.test_runner.run_jest_behavioral_tests")
def test_dispatches_to_jest_by_default(
self,
mock_jest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should dispatch to Jest when test_framework is not specified."""
mock_jest_runner.return_value = (tmp_path / "result.xml", MagicMock(), None, None)
(tmp_path / "package.json").write_text('{"name": "test"}')
js_support.run_behavioral_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
)
mock_jest_runner.assert_called_once()
@patch("codeflash.languages.javascript.test_runner.run_jest_behavioral_tests")
def test_dispatches_to_jest_explicitly(
self,
mock_jest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should dispatch to Jest when test_framework='jest'."""
mock_jest_runner.return_value = (tmp_path / "result.xml", MagicMock(), None, None)
(tmp_path / "package.json").write_text('{"name": "test"}')
js_support.run_behavioral_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
test_framework="jest",
)
mock_jest_runner.assert_called_once()
@patch("codeflash.languages.javascript.vitest_runner.run_vitest_behavioral_tests")
def test_dispatches_to_vitest(
self,
mock_vitest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should dispatch to Vitest when test_framework='vitest'."""
mock_vitest_runner.return_value = (tmp_path / "result.xml", MagicMock(), None, None)
(tmp_path / "package.json").write_text('{"name": "test"}')
js_support.run_behavioral_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
test_framework="vitest",
)
mock_vitest_runner.assert_called_once()
@patch("codeflash.languages.javascript.vitest_runner.run_vitest_behavioral_tests")
def test_typescript_support_dispatches_to_vitest(
self,
mock_vitest_runner: MagicMock,
ts_support: TypeScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""TypeScriptSupport should also dispatch to Vitest when test_framework='vitest'."""
mock_vitest_runner.return_value = (tmp_path / "result.xml", MagicMock(), None, None)
(tmp_path / "package.json").write_text('{"name": "test"}')
ts_support.run_behavioral_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
test_framework="vitest",
)
mock_vitest_runner.assert_called_once()
class TestBenchmarkingTestsDispatch:
"""Tests for run_benchmarking_tests dispatch logic."""
@patch("codeflash.languages.javascript.test_runner.run_jest_benchmarking_tests")
def test_dispatches_to_jest_by_default(
self,
mock_jest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should dispatch to Jest when test_framework is not specified."""
mock_jest_runner.return_value = (tmp_path / "result.xml", MagicMock())
(tmp_path / "package.json").write_text('{"name": "test"}')
js_support.run_benchmarking_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
)
mock_jest_runner.assert_called_once()
@patch("codeflash.languages.javascript.vitest_runner.run_vitest_benchmarking_tests")
def test_dispatches_to_vitest(
self,
mock_vitest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should dispatch to Vitest when test_framework='vitest'."""
mock_vitest_runner.return_value = (tmp_path / "result.xml", MagicMock())
(tmp_path / "package.json").write_text('{"name": "test"}')
js_support.run_benchmarking_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
test_framework="vitest",
)
mock_vitest_runner.assert_called_once()
@patch("codeflash.languages.javascript.vitest_runner.run_vitest_benchmarking_tests")
def test_passes_loop_parameters(
self,
mock_vitest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should pass loop parameters to Vitest runner."""
mock_vitest_runner.return_value = (tmp_path / "result.xml", MagicMock())
(tmp_path / "package.json").write_text('{"name": "test"}')
js_support.run_benchmarking_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
test_framework="vitest",
min_loops=10,
max_loops=50,
target_duration_seconds=5.0,
)
call_kwargs = mock_vitest_runner.call_args.kwargs
assert call_kwargs["min_loops"] == 10
assert call_kwargs["max_loops"] == 50
assert call_kwargs["target_duration_ms"] == 5000
class TestLineProfileTestsDispatch:
"""Tests for run_line_profile_tests dispatch logic."""
@patch("codeflash.languages.javascript.test_runner.run_jest_line_profile_tests")
def test_dispatches_to_jest_by_default(
self,
mock_jest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should dispatch to Jest when test_framework is not specified."""
mock_jest_runner.return_value = (tmp_path / "result.xml", MagicMock())
(tmp_path / "package.json").write_text('{"name": "test"}')
js_support.run_line_profile_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
)
mock_jest_runner.assert_called_once()
@patch("codeflash.languages.javascript.vitest_runner.run_vitest_line_profile_tests")
def test_dispatches_to_vitest(
self,
mock_vitest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should dispatch to Vitest when test_framework='vitest'."""
mock_vitest_runner.return_value = (tmp_path / "result.xml", MagicMock())
(tmp_path / "package.json").write_text('{"name": "test"}')
js_support.run_line_profile_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
test_framework="vitest",
)
mock_vitest_runner.assert_called_once()
@patch("codeflash.languages.javascript.vitest_runner.run_vitest_line_profile_tests")
def test_passes_line_profile_output_file(
self,
mock_vitest_runner: MagicMock,
js_support: JavaScriptSupport,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should pass line_profile_output_file to Vitest runner."""
mock_vitest_runner.return_value = (tmp_path / "result.xml", MagicMock())
(tmp_path / "package.json").write_text('{"name": "test"}')
output_file = tmp_path / "line_profile.json"
js_support.run_line_profile_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
test_framework="vitest",
line_profile_output_file=output_file,
)
call_kwargs = mock_vitest_runner.call_args.kwargs
assert call_kwargs["line_profile_output_file"] == output_file
class TestTestFrameworkProperty:
"""Tests for test_framework property."""
def test_javascript_default_framework_is_jest(self, js_support: JavaScriptSupport) -> None:
"""JavaScriptSupport should have Jest as default test framework."""
assert js_support.test_framework == "jest"
def test_typescript_default_framework_is_jest(self, ts_support: TypeScriptSupport) -> None:
"""TypeScriptSupport should have Jest as default test framework."""
assert ts_support.test_framework == "jest"

View file

@ -0,0 +1,286 @@
"""Tests for Vitest JUnit XML output parsing and compatibility.
These tests verify that Vitest's JUnit XML output can be parsed
by the existing parsing infrastructure.
"""
from __future__ import annotations
from pathlib import Path
import pytest
class TestVitestJunitXmlFormat:
"""Tests for Vitest JUnit XML format compatibility."""
@pytest.fixture
def vitest_junit_xml(self, tmp_path: Path) -> Path:
"""Create a sample Vitest JUnit XML file."""
xml_content = """<?xml version="1.0" encoding="UTF-8" ?>
<testsuites name="vitest tests" tests="4" failures="1" errors="0" time="0.537">
<testsuite name="tests/fibonacci.test.ts" timestamp="2026-01-30T18:03:49.433Z" hostname="localhost" tests="3" failures="0" errors="0" skipped="0" time="0.008">
<testcase classname="tests/fibonacci.test.ts" name="fibonacci &gt; returns 0 for n=0" time="0.001">
</testcase>
<testcase classname="tests/fibonacci.test.ts" name="fibonacci &gt; returns 1 for n=1" time="0.0005">
</testcase>
<testcase classname="tests/fibonacci.test.ts" name="fibonacci &gt; returns 55 for n=10" time="0.0001">
</testcase>
</testsuite>
<testsuite name="tests/string_utils.test.ts" timestamp="2026-01-30T18:03:49.438Z" hostname="localhost" tests="1" failures="1" errors="0" skipped="0" time="0.01">
<testcase classname="tests/string_utils.test.ts" name="reverseString &gt; reverses a simple string" time="0.0007">
<failure message="expected &apos;olleh&apos; to equal &apos;hello&apos;" type="AssertionError">AssertionError: expected 'olleh' to equal 'hello'</failure>
</testcase>
</testsuite>
</testsuites>"""
junit_file = tmp_path / "vitest-results.xml"
junit_file.write_text(xml_content)
return junit_file
def test_can_parse_vitest_junit_xml(self, vitest_junit_xml: Path) -> None:
"""Should be able to parse Vitest JUnit XML with junitparser."""
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(vitest_junit_xml))
assert xml is not None
# Count test cases
test_count = sum(len(list(suite)) for suite in xml)
assert test_count == 4
def test_extracts_test_suite_names(self, vitest_junit_xml: Path) -> None:
"""Should extract test suite names from Vitest JUnit XML."""
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(vitest_junit_xml))
suite_names = [suite.name for suite in xml]
assert "tests/fibonacci.test.ts" in suite_names
assert "tests/string_utils.test.ts" in suite_names
def test_extracts_test_case_names(self, vitest_junit_xml: Path) -> None:
"""Should extract test case names from Vitest JUnit XML."""
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(vitest_junit_xml))
test_names = []
for suite in xml:
for case in suite:
test_names.append(case.name)
# Vitest uses > as separator
assert "fibonacci > returns 0 for n=0" in test_names
assert "reverseString > reverses a simple string" in test_names
def test_extracts_classname_as_file_path(self, vitest_junit_xml: Path) -> None:
"""Should extract classname which contains file path in Vitest."""
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(vitest_junit_xml))
classnames = set()
for suite in xml:
for case in suite:
classnames.add(case.classname)
# Vitest uses file path as classname
assert "tests/fibonacci.test.ts" in classnames
assert "tests/string_utils.test.ts" in classnames
def test_extracts_test_time(self, vitest_junit_xml: Path) -> None:
"""Should extract test execution time from Vitest JUnit XML."""
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(vitest_junit_xml))
for suite in xml:
for case in suite:
# Time should be a float
assert isinstance(case.time, float)
assert case.time >= 0
def test_detects_failures(self, vitest_junit_xml: Path) -> None:
"""Should detect test failures in Vitest JUnit XML."""
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(vitest_junit_xml))
failures = []
for suite in xml:
for case in suite:
if not case.is_passed:
failures.append(case.name)
assert len(failures) == 1
assert "reverseString > reverses a simple string" in failures
def test_extracts_failure_message(self, vitest_junit_xml: Path) -> None:
"""Should extract failure message from Vitest JUnit XML."""
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(vitest_junit_xml))
for suite in xml:
for case in suite:
if not case.is_passed:
# Get failure element
for result in case.result:
if hasattr(result, "message"):
assert "expected" in result.message.lower()
class TestVitestJunitXmlResolution:
"""Tests for resolving test file paths from Vitest JUnit XML."""
def test_resolves_test_file_from_vitest_classname(self, tmp_path: Path) -> None:
"""Should resolve test file path from Vitest classname."""
from codeflash.verification.parse_test_output import resolve_test_file_from_class_path
# Create test directory structure
tests_dir = tmp_path / "tests"
tests_dir.mkdir()
test_file = tests_dir / "fibonacci.test.ts"
test_file.write_text("// test file")
# Vitest uses file paths as classname
classname = "tests/fibonacci.test.ts"
result = resolve_test_file_from_class_path(classname, tmp_path)
assert result is not None
assert result.exists()
def test_handles_nested_test_paths(self, tmp_path: Path) -> None:
"""Should handle nested test paths from Vitest."""
from codeflash.verification.parse_test_output import resolve_test_file_from_class_path
# Create nested test directory structure
tests_dir = tmp_path / "tests" / "unit"
tests_dir.mkdir(parents=True)
test_file = tests_dir / "fibonacci.test.ts"
test_file.write_text("// test file")
# Vitest uses file paths as classname
classname = "tests/unit/fibonacci.test.ts"
result = resolve_test_file_from_class_path(classname, tmp_path)
assert result is not None
assert result.exists()
class TestVitestTimingMarkers:
"""Tests for Vitest timing marker extraction.
Timing markers are used to measure function execution time during benchmarking.
The format is the same for Jest and Vitest since they use the same codeflash helper.
"""
def test_parses_start_timing_marker(self) -> None:
"""Should parse start timing marker from Vitest output."""
from codeflash.verification.parse_test_output import jest_start_pattern
# Timing marker format: !$######testName:testName:funcName:loopIndex:lineId######$!
output = "!$######fibonacci.test.ts:returns 0 for n=0:fibonacci:1:line_0######$!"
matches = jest_start_pattern.findall(output)
assert len(matches) == 1
match = matches[0]
assert match[0] == "fibonacci.test.ts" # test file
assert match[1] == "returns 0 for n=0" # test name
assert match[2] == "fibonacci" # function name
assert match[3] == "1" # loop index
assert match[4] == "line_0" # line id
def test_parses_end_timing_marker(self) -> None:
"""Should parse end timing marker from Vitest output."""
from codeflash.verification.parse_test_output import jest_end_pattern
# End marker format: !######testName:testName:funcName:loopIndex:lineId:durationNs######!
output = "!######fibonacci.test.ts:returns 0 for n=0:fibonacci:1:line_0:123456######!"
matches = jest_end_pattern.findall(output)
assert len(matches) == 1
match = matches[0]
assert match[0] == "fibonacci.test.ts" # test file
assert match[1] == "returns 0 for n=0" # test name
assert match[2] == "fibonacci" # function name
assert match[3] == "1" # loop index
assert match[4] == "line_0" # line id
assert match[5] == "123456" # duration in nanoseconds
def test_extracts_multiple_timing_markers(self) -> None:
"""Should extract multiple timing markers from Vitest output."""
from codeflash.verification.parse_test_output import jest_end_pattern, jest_start_pattern
output = """Running tests...
!$######test.ts:test1:func:1:id1######$!
executing...
!######test.ts:test1:func:1:id1:100000######!
!$######test.ts:test2:func:1:id2######$!
executing...
!######test.ts:test2:func:1:id2:200000######!
Done."""
start_matches = jest_start_pattern.findall(output)
end_matches = jest_end_pattern.findall(output)
assert len(start_matches) == 2
assert len(end_matches) == 2
# Verify durations
durations = [int(m[5]) for m in end_matches]
assert durations == [100000, 200000]
class TestVitestRealJunitOutput:
"""Tests using real Vitest JUnit output from the test project."""
@pytest.fixture
def vitest_project_dir(self):
"""Get the Vitest sample project directory."""
project_root = Path(__file__).parent.parent.parent.parent
vitest_dir = project_root / "code_to_optimize" / "js" / "code_to_optimize_vitest"
if not vitest_dir.exists():
pytest.skip("code_to_optimize_vitest directory not found")
return vitest_dir
def test_parses_real_vitest_junit_output(self, vitest_project_dir: Path) -> None:
"""Should parse real Vitest JUnit output from test project."""
junit_file = vitest_project_dir / ".codeflash" / "vitest-results.xml"
if not junit_file.exists():
pytest.skip("Vitest JUnit output not found - run npm test first")
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(junit_file))
# Should have parsed without errors
assert xml is not None
# Should have multiple test suites
suite_count = len(list(xml))
assert suite_count >= 2
# All tests should pass in the sample project
for suite in xml:
for case in suite:
assert case.is_passed, f"Test {case.name} should pass"
def test_counts_tests_in_real_output(self, vitest_project_dir: Path) -> None:
"""Should count all tests in real Vitest JUnit output."""
junit_file = vitest_project_dir / ".codeflash" / "vitest-results.xml"
if not junit_file.exists():
pytest.skip("Vitest JUnit output not found - run npm test first")
from junitparser import JUnitXml
xml = JUnitXml.fromfile(str(junit_file))
test_count = sum(len(list(suite)) for suite in xml)
# We have 22 tests in fibonacci.test.ts and 21 in string_utils.test.ts
assert test_count >= 40

View file

@ -0,0 +1,458 @@
"""Tests for Vitest test runner."""
from __future__ import annotations
import subprocess
from pathlib import Path
from typing import TYPE_CHECKING
from unittest.mock import MagicMock, patch
import pytest
from codeflash.languages.javascript.vitest_runner import (
_build_vitest_behavioral_command,
_build_vitest_benchmarking_command,
_find_vitest_project_root,
run_vitest_behavioral_tests,
run_vitest_benchmarking_tests,
run_vitest_line_profile_tests,
)
if TYPE_CHECKING:
from codeflash.models.models import TestFiles
@pytest.fixture
def mock_test_paths() -> MagicMock:
"""Create a mock TestFiles object."""
mock = MagicMock()
mock_file = MagicMock()
mock_file.instrumented_behavior_file_path = Path("/project/tests/test_func.test.ts")
mock_file.benchmarking_file_path = Path("/project/tests/test_func__perf.test.ts")
mock.test_files = [mock_file]
return mock
class TestFindVitestProjectRoot:
"""Tests for _find_vitest_project_root function."""
def test_finds_vitest_config_js(self, tmp_path: Path) -> None:
"""Should find project root via vitest.config.js."""
(tmp_path / "vitest.config.js").write_text("export default {}")
test_file = tmp_path / "tests" / "test.test.ts"
test_file.parent.mkdir(parents=True)
test_file.write_text("")
result = _find_vitest_project_root(test_file)
assert result == tmp_path
def test_finds_vitest_config_ts(self, tmp_path: Path) -> None:
"""Should find project root via vitest.config.ts."""
(tmp_path / "vitest.config.ts").write_text("export default {}")
test_file = tmp_path / "tests" / "test.test.ts"
test_file.parent.mkdir(parents=True)
test_file.write_text("")
result = _find_vitest_project_root(test_file)
assert result == tmp_path
def test_finds_vite_config_js(self, tmp_path: Path) -> None:
"""Should find project root via vite.config.js (Vitest can be configured in vite config)."""
(tmp_path / "vite.config.js").write_text("export default {}")
test_file = tmp_path / "tests" / "test.test.ts"
test_file.parent.mkdir(parents=True)
test_file.write_text("")
result = _find_vitest_project_root(test_file)
assert result == tmp_path
def test_falls_back_to_package_json(self, tmp_path: Path) -> None:
"""Should fall back to package.json when no vitest config found."""
(tmp_path / "package.json").write_text('{"name": "test"}')
test_file = tmp_path / "tests" / "test.test.ts"
test_file.parent.mkdir(parents=True)
test_file.write_text("")
result = _find_vitest_project_root(test_file)
assert result == tmp_path
def test_returns_none_when_no_config(self, tmp_path: Path) -> None:
"""Should return None when no vitest/vite config or package.json found."""
test_file = tmp_path / "tests" / "test.test.ts"
test_file.parent.mkdir(parents=True)
test_file.write_text("")
result = _find_vitest_project_root(test_file)
assert result is None
class TestBuildVitestBehavioralCommand:
"""Tests for _build_vitest_behavioral_command function."""
def test_basic_command_structure(self) -> None:
"""Should build basic Vitest command with required flags."""
test_files = [Path("/project/tests/test.test.ts")]
cmd = _build_vitest_behavioral_command(test_files, timeout=60)
assert "npx" in cmd
assert "vitest" in cmd
assert "run" in cmd # Vitest uses 'run' for single execution
def test_includes_reporter_flags(self) -> None:
"""Should include reporter flags for JUnit output."""
test_files = [Path("/project/tests/test.test.ts")]
cmd = _build_vitest_behavioral_command(test_files, timeout=60)
assert "--reporter=default" in cmd
assert "--reporter=junit" in cmd
def test_includes_serial_execution_flag(self) -> None:
"""Should include flag for serial test execution."""
test_files = [Path("/project/tests/test.test.ts")]
cmd = _build_vitest_behavioral_command(test_files, timeout=60)
# Vitest uses --no-file-parallelism for serial execution
assert "--no-file-parallelism" in cmd
def test_includes_test_files(self) -> None:
"""Should include test files at the end of command."""
test_files = [
Path("/project/tests/test_a.test.ts"),
Path("/project/tests/test_b.test.ts"),
]
cmd = _build_vitest_behavioral_command(test_files, timeout=60)
assert str(Path("/project/tests/test_a.test.ts").resolve()) in cmd
assert str(Path("/project/tests/test_b.test.ts").resolve()) in cmd
def test_includes_timeout(self) -> None:
"""Should include test timeout in milliseconds."""
test_files = [Path("/project/tests/test.test.ts")]
cmd = _build_vitest_behavioral_command(test_files, timeout=120)
# Vitest uses --test-timeout=<ms> (note the hyphen, not camelCase)
assert "--test-timeout=120000" in cmd
class TestBuildVitestBenchmarkingCommand:
"""Tests for _build_vitest_benchmarking_command function."""
def test_basic_command_structure(self) -> None:
"""Should build basic Vitest benchmarking command."""
test_files = [Path("/project/tests/test__perf.test.ts")]
cmd = _build_vitest_benchmarking_command(test_files, timeout=60)
assert "npx" in cmd
assert "vitest" in cmd
assert "run" in cmd
def test_includes_serial_execution(self) -> None:
"""Should include serial execution for consistent benchmarking."""
test_files = [Path("/project/tests/test__perf.test.ts")]
cmd = _build_vitest_benchmarking_command(test_files, timeout=60)
assert "--no-file-parallelism" in cmd
class TestRunVitestBehavioralTests:
"""Tests for run_vitest_behavioral_tests function."""
@patch("subprocess.run")
@patch("codeflash.languages.javascript.vitest_runner._ensure_runtime_files")
@patch("codeflash.languages.javascript.vitest_runner.get_run_tmp_file")
def test_sets_vitest_env_vars(
self,
mock_tmp_file: MagicMock,
mock_ensure_runtime: MagicMock,
mock_subprocess_run: MagicMock,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should set correct environment variables for Vitest."""
mock_tmp_file.return_value = tmp_path / "vitest_results.xml"
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=[], returncode=0, stdout="", stderr=""
)
(tmp_path / "package.json").write_text('{"name": "test"}')
run_vitest_behavioral_tests(
test_paths=mock_test_paths,
test_env={"PATH": "/usr/bin"},
cwd=tmp_path,
project_root=tmp_path,
)
call_args = mock_subprocess_run.call_args
env = call_args.kwargs.get("env", {})
# Check Vitest-specific env vars
assert "CODEFLASH_OUTPUT_FILE" in env
assert "CODEFLASH_MODE" in env
assert env["CODEFLASH_MODE"] == "behavior"
assert "CODEFLASH_LOOP_INDEX" in env
@patch("subprocess.run")
@patch("codeflash.languages.javascript.vitest_runner._ensure_runtime_files")
@patch("codeflash.languages.javascript.vitest_runner.get_run_tmp_file")
def test_returns_result_file_path(
self,
mock_tmp_file: MagicMock,
mock_ensure_runtime: MagicMock,
mock_subprocess_run: MagicMock,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should return the result file path as first element of tuple."""
result_path = tmp_path / "vitest_results.xml"
mock_tmp_file.return_value = result_path
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=[], returncode=0, stdout="", stderr=""
)
(tmp_path / "package.json").write_text('{"name": "test"}')
result_file_path, _, _, _ = run_vitest_behavioral_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
)
assert result_file_path == result_path
@patch("subprocess.run")
@patch("codeflash.languages.javascript.vitest_runner._ensure_runtime_files")
@patch("codeflash.languages.javascript.vitest_runner.get_run_tmp_file")
def test_uses_vitest_run_command(
self,
mock_tmp_file: MagicMock,
mock_ensure_runtime: MagicMock,
mock_subprocess_run: MagicMock,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should use 'vitest run' for single execution."""
mock_tmp_file.return_value = tmp_path / "vitest_results.xml"
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=[], returncode=0, stdout="", stderr=""
)
(tmp_path / "package.json").write_text('{"name": "test"}')
run_vitest_behavioral_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
)
call_args = mock_subprocess_run.call_args
cmd = call_args[0][0]
assert "vitest" in cmd
assert "run" in cmd
class TestRunVitestBenchmarkingTests:
"""Tests for run_vitest_benchmarking_tests function."""
@patch("subprocess.run")
@patch("codeflash.languages.javascript.vitest_runner._ensure_runtime_files")
@patch("codeflash.languages.javascript.vitest_runner.get_run_tmp_file")
def test_sets_performance_mode(
self,
mock_tmp_file: MagicMock,
mock_ensure_runtime: MagicMock,
mock_subprocess_run: MagicMock,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should set CODEFLASH_MODE to 'performance'."""
mock_tmp_file.return_value = tmp_path / "vitest_perf_results.xml"
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=[], returncode=0, stdout="", stderr=""
)
(tmp_path / "package.json").write_text('{"name": "test"}')
run_vitest_benchmarking_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
)
call_args = mock_subprocess_run.call_args
env = call_args.kwargs.get("env", {})
assert env["CODEFLASH_MODE"] == "performance"
@patch("subprocess.run")
@patch("codeflash.languages.javascript.vitest_runner._ensure_runtime_files")
@patch("codeflash.languages.javascript.vitest_runner.get_run_tmp_file")
def test_sets_loop_configuration(
self,
mock_tmp_file: MagicMock,
mock_ensure_runtime: MagicMock,
mock_subprocess_run: MagicMock,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should set loop configuration environment variables."""
mock_tmp_file.return_value = tmp_path / "vitest_perf_results.xml"
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=[], returncode=0, stdout="", stderr=""
)
(tmp_path / "package.json").write_text('{"name": "test"}')
run_vitest_benchmarking_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
min_loops=10,
max_loops=50,
target_duration_ms=5000,
)
call_args = mock_subprocess_run.call_args
env = call_args.kwargs.get("env", {})
assert env["CODEFLASH_PERF_MIN_LOOPS"] == "10"
assert env["CODEFLASH_PERF_LOOP_COUNT"] == "50"
assert env["CODEFLASH_PERF_TARGET_DURATION_MS"] == "5000"
class TestRunVitestLineProfileTests:
"""Tests for run_vitest_line_profile_tests function."""
@patch("subprocess.run")
@patch("codeflash.languages.javascript.vitest_runner._ensure_runtime_files")
@patch("codeflash.languages.javascript.vitest_runner.get_run_tmp_file")
def test_sets_line_profile_mode(
self,
mock_tmp_file: MagicMock,
mock_ensure_runtime: MagicMock,
mock_subprocess_run: MagicMock,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should set CODEFLASH_MODE to 'line_profile'."""
mock_tmp_file.return_value = tmp_path / "vitest_line_profile_results.xml"
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=[], returncode=0, stdout="", stderr=""
)
(tmp_path / "package.json").write_text('{"name": "test"}')
run_vitest_line_profile_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
)
call_args = mock_subprocess_run.call_args
env = call_args.kwargs.get("env", {})
assert env["CODEFLASH_MODE"] == "line_profile"
@patch("subprocess.run")
@patch("codeflash.languages.javascript.vitest_runner._ensure_runtime_files")
@patch("codeflash.languages.javascript.vitest_runner.get_run_tmp_file")
def test_sets_line_profile_output_file(
self,
mock_tmp_file: MagicMock,
mock_ensure_runtime: MagicMock,
mock_subprocess_run: MagicMock,
mock_test_paths: MagicMock,
tmp_path: Path,
) -> None:
"""Should set CODEFLASH_LINE_PROFILE_OUTPUT when provided."""
mock_tmp_file.return_value = tmp_path / "vitest_line_profile_results.xml"
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=[], returncode=0, stdout="", stderr=""
)
(tmp_path / "package.json").write_text('{"name": "test"}')
line_profile_output = tmp_path / "line_profile.json"
run_vitest_line_profile_tests(
test_paths=mock_test_paths,
test_env={},
cwd=tmp_path,
project_root=tmp_path,
line_profile_output_file=line_profile_output,
)
call_args = mock_subprocess_run.call_args
env = call_args.kwargs.get("env", {})
assert env["CODEFLASH_LINE_PROFILE_OUTPUT"] == str(line_profile_output)
class TestVitestVsJestCommandDifferences:
"""Tests documenting the key differences between Vitest and Jest commands."""
def test_vitest_uses_run_subcommand(self) -> None:
"""Vitest uses 'run' for single execution, Jest doesn't need it."""
test_files = [Path("/project/tests/test.test.ts")]
vitest_cmd = _build_vitest_behavioral_command(test_files, timeout=60)
# Vitest: npx vitest run ...
# Jest: npx jest ...
assert vitest_cmd[0:3] == ["npx", "vitest", "run"]
def test_vitest_uses_hyphenated_timeout(self) -> None:
"""Vitest uses --test-timeout, Jest uses --testTimeout (camelCase)."""
test_files = [Path("/project/tests/test.test.ts")]
vitest_cmd = _build_vitest_behavioral_command(test_files, timeout=60)
# Vitest: --test-timeout=<ms>
# Jest: --testTimeout=<ms>
assert any("--test-timeout=" in arg for arg in vitest_cmd)
assert not any("--testTimeout=" in arg for arg in vitest_cmd)
def test_vitest_uses_no_file_parallelism(self) -> None:
"""Vitest uses --no-file-parallelism, Jest uses --runInBand."""
test_files = [Path("/project/tests/test.test.ts")]
vitest_cmd = _build_vitest_behavioral_command(test_files, timeout=60)
# Vitest: --no-file-parallelism
# Jest: --runInBand
assert "--no-file-parallelism" in vitest_cmd
assert "--runInBand" not in vitest_cmd
def test_vitest_uses_output_file_flag(self) -> None:
"""Vitest uses --outputFile for JUnit output path."""
test_files = [Path("/project/tests/test.test.ts")]
vitest_cmd = _build_vitest_behavioral_command(
test_files, timeout=60, output_file=Path("/tmp/results.xml")
)
# Vitest: --outputFile=/tmp/results.xml
# Jest: uses JEST_JUNIT_OUTPUT_FILE env var
assert any("--outputFile=" in arg for arg in vitest_cmd)
def test_vitest_positional_test_files(self) -> None:
"""Vitest uses positional args for test files, not --runTestsByPath."""
test_files = [Path("/project/tests/test.test.ts")]
vitest_cmd = _build_vitest_behavioral_command(test_files, timeout=60)
# Vitest: files are positional
# Jest: --runTestsByPath <files>
assert "--runTestsByPath" not in vitest_cmd
# Test files should be at the end
assert str(Path("/project/tests/test.test.ts").resolve()) in vitest_cmd

View file

@ -0,0 +1,300 @@
"""End-to-end integration tests for Vitest pipeline.
Tests the full optimization pipeline for Vitest projects:
- Function discovery
- Code context extraction
- Test discovery
- Test framework detection
"""
from pathlib import Path
import pytest
from codeflash.code_utils.config_js import detect_test_runner, get_package_json_data
from codeflash.discovery.functions_to_optimize import find_all_functions_in_file, get_files_for_language
from codeflash.languages.base import Language
class TestVitestProjectDiscovery:
"""Tests for function discovery in a Vitest project."""
@pytest.fixture
def vitest_project_dir(self):
"""Get the Vitest sample project directory."""
project_root = Path(__file__).parent.parent.parent
vitest_dir = project_root / "code_to_optimize" / "js" / "code_to_optimize_vitest"
if not vitest_dir.exists():
pytest.skip("code_to_optimize_vitest directory not found")
return vitest_dir
def test_detects_vitest_as_test_runner(self, vitest_project_dir):
"""Test that Vitest is detected as the test runner."""
package_json = vitest_project_dir / "package.json"
package_data = get_package_json_data(package_json)
assert package_data is not None
runner = detect_test_runner(vitest_project_dir, package_data)
assert runner == "vitest"
def test_discover_functions_in_fibonacci(self, vitest_project_dir):
"""Test discovering functions in fibonacci.ts."""
fib_file = vitest_project_dir / "fibonacci.ts"
if not fib_file.exists():
pytest.skip("fibonacci.ts not found")
functions = find_all_functions_in_file(fib_file)
assert fib_file in functions
func_list = functions[fib_file]
# Should find the main exported functions
func_names = {f.function_name for f in func_list}
assert "fibonacci" in func_names
assert "isFibonacci" in func_names
assert "isPerfectSquare" in func_names
assert "fibonacciSequence" in func_names
# All should be TypeScript functions
for func in func_list:
assert func.language == "typescript"
def test_discover_functions_in_string_utils(self, vitest_project_dir):
"""Test discovering functions in string_utils.ts."""
utils_file = vitest_project_dir / "string_utils.ts"
if not utils_file.exists():
pytest.skip("string_utils.ts not found")
functions = find_all_functions_in_file(utils_file)
assert utils_file in functions
func_list = functions[utils_file]
func_names = {f.function_name for f in func_list}
assert "reverseString" in func_names
assert "isPalindrome" in func_names
assert "countVowels" in func_names
assert "uniqueWords" in func_names
def test_get_typescript_files(self, vitest_project_dir):
"""Test getting TypeScript files from Vitest project directory."""
files = get_files_for_language(vitest_project_dir, Language.TYPESCRIPT)
# Should find .ts files
ts_files = [f for f in files if f.suffix == ".ts" and "test" not in f.name]
assert len(ts_files) >= 2 # fibonacci.ts, string_utils.ts
# Should not include test files in root (they're in tests/)
root_files = [f for f in ts_files if f.parent == vitest_project_dir]
assert len(root_files) >= 2
class TestVitestCodeContext:
"""Tests for code context extraction in Vitest project."""
@pytest.fixture
def vitest_project_dir(self):
"""Get the Vitest sample project directory."""
project_root = Path(__file__).parent.parent.parent
vitest_dir = project_root / "code_to_optimize" / "js" / "code_to_optimize_vitest"
if not vitest_dir.exists():
pytest.skip("code_to_optimize_vitest directory not found")
return vitest_dir
def test_extract_code_context_for_typescript(self, vitest_project_dir):
"""Test extracting code context for a TypeScript function."""
from codeflash.context.code_context_extractor import get_code_optimization_context
from codeflash.languages import current as lang_current
from codeflash.languages.base import Language
# Force set language to TypeScript for proper context extraction routing
lang_current._current_language = Language.TYPESCRIPT
fib_file = vitest_project_dir / "fibonacci.ts"
if not fib_file.exists():
pytest.skip("fibonacci.ts not found")
functions = find_all_functions_in_file(fib_file)
func_list = functions[fib_file]
# Find the fibonacci function
fib_func = next((f for f in func_list if f.function_name == "fibonacci"), None)
assert fib_func is not None
# Extract code context
context = get_code_optimization_context(fib_func, vitest_project_dir)
# Verify context structure
assert context.read_writable_code is not None
assert context.read_writable_code.language == "typescript"
assert len(context.read_writable_code.code_strings) > 0
# The code should contain the function
code = context.read_writable_code.code_strings[0].code
assert "fibonacci" in code
class TestVitestTestDiscovery:
"""Tests for Vitest test discovery."""
@pytest.fixture
def vitest_project_dir(self):
"""Get the Vitest sample project directory."""
project_root = Path(__file__).parent.parent.parent
vitest_dir = project_root / "code_to_optimize" / "js" / "code_to_optimize_vitest"
if not vitest_dir.exists():
pytest.skip("code_to_optimize_vitest directory not found")
return vitest_dir
def test_discover_vitest_tests(self, vitest_project_dir):
"""Test discovering Vitest tests for TypeScript functions."""
from codeflash.languages import get_language_support
from codeflash.languages.base import FunctionInfo, Language
ts_support = get_language_support(Language.TYPESCRIPT)
test_root = vitest_project_dir / "tests"
if not test_root.exists():
pytest.skip("tests directory not found")
# Create FunctionInfo for fibonacci function
fib_file = vitest_project_dir / "fibonacci.ts"
func_info = FunctionInfo(
name="fibonacci", file_path=fib_file, start_line=11, end_line=16, language=Language.TYPESCRIPT
)
# Discover tests
tests = ts_support.discover_tests(test_root, [func_info])
# Should find tests for fibonacci
assert func_info.qualified_name in tests or "fibonacci" in str(tests)
class TestVitestRunnerDispatch:
"""Tests for Vitest runner dispatch in support.py."""
@pytest.fixture
def vitest_project_dir(self):
"""Get the Vitest sample project directory."""
project_root = Path(__file__).parent.parent.parent
vitest_dir = project_root / "code_to_optimize" / "js" / "code_to_optimize_vitest"
if not vitest_dir.exists():
pytest.skip("code_to_optimize_vitest directory not found")
return vitest_dir
def test_language_support_has_test_framework_property(self):
"""Test that JavaScriptSupport has test_framework property."""
from codeflash.languages import get_language_support
from codeflash.languages.base import Language
js_support = get_language_support(Language.JAVASCRIPT)
ts_support = get_language_support(Language.TYPESCRIPT)
# Default test framework should be jest
assert js_support.test_framework == "jest"
assert ts_support.test_framework == "jest"
def test_behavioral_tests_accepts_test_framework(self):
"""Test that run_behavioral_tests accepts test_framework parameter."""
from codeflash.languages import get_language_support
from codeflash.languages.base import Language
ts_support = get_language_support(Language.TYPESCRIPT)
# Check signature has test_framework parameter
import inspect
sig = inspect.signature(ts_support.run_behavioral_tests)
params = list(sig.parameters.keys())
assert "test_framework" in params
def test_benchmarking_tests_accepts_test_framework(self):
"""Test that run_benchmarking_tests accepts test_framework parameter."""
from codeflash.languages import get_language_support
from codeflash.languages.base import Language
ts_support = get_language_support(Language.TYPESCRIPT)
import inspect
sig = inspect.signature(ts_support.run_benchmarking_tests)
params = list(sig.parameters.keys())
assert "test_framework" in params
def test_line_profile_tests_accepts_test_framework(self):
"""Test that run_line_profile_tests accepts test_framework parameter."""
from codeflash.languages import get_language_support
from codeflash.languages.base import Language
ts_support = get_language_support(Language.TYPESCRIPT)
import inspect
sig = inspect.signature(ts_support.run_line_profile_tests)
params = list(sig.parameters.keys())
assert "test_framework" in params
class TestVitestVsJestDetection:
"""Tests comparing Vitest and Jest project detection."""
@pytest.fixture
def jest_project_dir(self):
"""Get the Jest sample project directory."""
project_root = Path(__file__).parent.parent.parent
jest_dir = project_root / "code_to_optimize" / "js" / "code_to_optimize_ts"
if not jest_dir.exists():
pytest.skip("code_to_optimize_ts directory not found")
return jest_dir
@pytest.fixture
def vitest_project_dir(self):
"""Get the Vitest sample project directory."""
project_root = Path(__file__).parent.parent.parent
vitest_dir = project_root / "code_to_optimize" / "js" / "code_to_optimize_vitest"
if not vitest_dir.exists():
pytest.skip("code_to_optimize_vitest directory not found")
return vitest_dir
def test_jest_detected_in_jest_project(self, jest_project_dir):
"""Test that Jest is detected in the Jest project."""
package_json = jest_project_dir / "package.json"
package_data = get_package_json_data(package_json)
assert package_data is not None
runner = detect_test_runner(jest_project_dir, package_data)
assert runner == "jest"
def test_vitest_detected_in_vitest_project(self, vitest_project_dir):
"""Test that Vitest is detected in the Vitest project."""
package_json = vitest_project_dir / "package.json"
package_data = get_package_json_data(package_json)
assert package_data is not None
runner = detect_test_runner(vitest_project_dir, package_data)
assert runner == "vitest"
def test_vitest_prioritized_over_jest(self, tmp_path):
"""Test that Vitest is prioritized when both are present."""
import json
package_json = tmp_path / "package.json"
package_json.write_text(
json.dumps(
{
"name": "test",
"devDependencies": {
"vitest": "^2.0.0",
"jest": "^29.0.0",
},
}
)
)
package_data = get_package_json_data(package_json)
runner = detect_test_runner(tmp_path, package_data)
assert runner == "vitest"