Unify SQLite schema into single codeflash_results table

Merge async_results and test_results tables into one 15-column
codeflash_results table with a dedicated cpu_time_ns column.
Consolidate file pattern to codeflash_results_{N}.sqlite and
delete the now-unused _data_parsers.py module.
This commit is contained in:
Kevin Turcios 2026-04-24 07:12:34 -05:00
parent ba001950ee
commit 5b20981cd4
13 changed files with 257 additions and 414 deletions

View file

@ -55,7 +55,7 @@ async def collect_baseline_async_metrics( # noqa: PLR0913
calculate_async_throughput,
)
async_db = get_run_tmp_file(Path("async_results_0.sqlite"))
async_db = get_run_tmp_file(Path("codeflash_results_0.sqlite"))
async_throughput = calculate_async_throughput(
async_db,
func.function_name,
@ -165,7 +165,7 @@ async def run_concurrency_benchmark(
iteration = 0
async_db = get_run_tmp_file(
Path(f"async_results_{iteration}.sqlite"),
Path(f"codeflash_results_{iteration}.sqlite"),
)
return parse_async_concurrency_metrics(
async_db,
@ -206,7 +206,7 @@ async def evaluate_async_candidate( # noqa: PLR0913
func = fn_input.function
iteration = 0
async_db = get_run_tmp_file(
Path(f"async_results_{iteration}.sqlite"),
Path(f"codeflash_results_{iteration}.sqlite"),
)
candidate_throughput = calculate_async_throughput(
async_db,

View file

@ -81,7 +81,7 @@ _codeflash_call_site: contextvars.ContextVar[str] = contextvars.ContextVar(
)
CREATE_TABLE_SQL = (
"CREATE TABLE IF NOT EXISTS async_results ("
"CREATE TABLE IF NOT EXISTS codeflash_results ("
"test_module_path TEXT NOT NULL, "
"test_class_name TEXT, "
"test_function_name TEXT NOT NULL, "
@ -90,6 +90,7 @@ CREATE_TABLE_SQL = (
"invocation_id TEXT NOT NULL, "
"mode TEXT NOT NULL, "
"wall_time_ns INTEGER NOT NULL, "
"cpu_time_ns INTEGER, "
"return_value BLOB, "
"verification_type TEXT, "
"sequential_time_ns INTEGER, "
@ -222,7 +223,7 @@ def codeflash_behavior_sync(func: F) -> F:
"""
Capture sync return values, timing, and stdout for behavioral tests.
Results are written to the async_results SQLite table.
Results are written to the codeflash_results SQLite table.
"""
@wraps(func)
@ -252,7 +253,7 @@ def codeflash_behavior_sync(func: F) -> F:
invocation_id = f"{call_site}_{call_index}"
iteration = os.environ.get("CODEFLASH_TEST_ITERATION", "0")
db_path = get_run_tmp_file(Path(f"async_results_{iteration}.sqlite"))
db_path = get_run_tmp_file(Path(f"codeflash_results_{iteration}.sqlite"))
conn, cur = get_async_db(db_path)
exception = None
@ -284,8 +285,8 @@ def codeflash_behavior_sync(func: F) -> F:
else pickle.dumps((args, kwargs, return_value))
)
cur.execute(
"INSERT INTO async_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
test_module_name,
test_class_name,
@ -295,9 +296,10 @@ def codeflash_behavior_sync(func: F) -> F:
invocation_id,
"behavior",
wall_time,
cpu_time,
pickled,
VerificationType.FUNCTION_CALL.value,
cpu_time,
None,
None,
None,
stdout_text,
@ -316,7 +318,7 @@ def codeflash_performance_sync(func: F) -> F:
"""
Measure sync execution time for performance tests.
Results are written to the async_results SQLite table.
Results are written to the codeflash_results SQLite table.
"""
@wraps(func)
@ -346,7 +348,7 @@ def codeflash_performance_sync(func: F) -> F:
invocation_id = f"{call_site}_{call_index}"
iteration = os.environ.get("CODEFLASH_TEST_ITERATION", "0")
db_path = get_run_tmp_file(Path(f"async_results_{iteration}.sqlite"))
db_path = get_run_tmp_file(Path(f"codeflash_results_{iteration}.sqlite"))
conn, cur = get_async_db(db_path)
exception = None
@ -364,8 +366,8 @@ def codeflash_performance_sync(func: F) -> F:
gc.enable()
cur.execute(
"INSERT INTO async_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
test_module_name,
test_class_name,
@ -381,6 +383,7 @@ def codeflash_performance_sync(func: F) -> F:
None,
None,
None,
None,
),
)
conn.commit()
@ -396,7 +399,7 @@ def codeflash_behavior_async(func: F) -> F:
"""
Capture async return values and timing for behavioral tests.
Results are written to the async_results SQLite table.
Results are written to the codeflash_results SQLite table.
"""
@wraps(func)
@ -427,7 +430,7 @@ def codeflash_behavior_async(func: F) -> F:
invocation_id = f"{call_site}_{call_index}"
iteration = os.environ.get("CODEFLASH_TEST_ITERATION", "0")
db_path = get_run_tmp_file(Path(f"async_results_{iteration}.sqlite"))
db_path = get_run_tmp_file(Path(f"codeflash_results_{iteration}.sqlite"))
conn, cur = get_async_db(db_path)
exception = None
@ -456,8 +459,8 @@ def codeflash_behavior_async(func: F) -> F:
else pickle.dumps((args, kwargs, return_value))
)
cur.execute(
"INSERT INTO async_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
test_module_name,
test_class_name,
@ -467,6 +470,7 @@ def codeflash_behavior_async(func: F) -> F:
invocation_id,
"behavior",
wall_time,
None,
pickled,
VerificationType.FUNCTION_CALL.value,
None,
@ -488,7 +492,7 @@ def codeflash_performance_async(func: F) -> F:
"""
Measure async execution time for performance tests.
Results are written to the async_results SQLite table.
Results are written to the codeflash_results SQLite table.
"""
@wraps(func)
@ -518,7 +522,7 @@ def codeflash_performance_async(func: F) -> F:
invocation_id = f"{call_site}_{call_index}"
iteration = os.environ.get("CODEFLASH_TEST_ITERATION", "0")
db_path = get_run_tmp_file(Path(f"async_results_{iteration}.sqlite"))
db_path = get_run_tmp_file(Path(f"codeflash_results_{iteration}.sqlite"))
conn, cur = get_async_db(db_path)
exception = None
@ -536,8 +540,8 @@ def codeflash_performance_async(func: F) -> F:
gc.enable()
cur.execute(
"INSERT INTO async_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
test_module_name,
test_class_name,
@ -553,6 +557,7 @@ def codeflash_performance_async(func: F) -> F:
None,
None,
None,
None,
),
)
conn.commit()
@ -568,7 +573,7 @@ def codeflash_concurrency_async(func: F) -> F:
"""
Measure concurrent vs sequential execution for async functions.
Results are written to the async_results SQLite table.
Results are written to the codeflash_results SQLite table.
"""
@wraps(func)
@ -585,7 +590,7 @@ def codeflash_concurrency_async(func: F) -> F:
loop_index = int(os.environ.get("CODEFLASH_LOOP_INDEX", "0"))
iteration = os.environ.get("CODEFLASH_TEST_ITERATION", "0")
db_path = get_run_tmp_file(Path(f"async_results_{iteration}.sqlite"))
db_path = get_run_tmp_file(Path(f"codeflash_results_{iteration}.sqlite"))
conn, cur = get_async_db(db_path)
gc.disable()
@ -607,8 +612,8 @@ def codeflash_concurrency_async(func: F) -> F:
gc.enable()
cur.execute(
"INSERT INTO async_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
test_module_name,
test_class_name,
@ -620,6 +625,7 @@ def codeflash_concurrency_async(func: F) -> F:
0,
None,
None,
None,
sequential_time,
concurrent_time,
concurrency_factor,

View file

@ -179,7 +179,23 @@ def codeflash_capture(
and not callable(getattr(instance, attr, None))
}
codeflash_cur.execute(
"CREATE TABLE IF NOT EXISTS test_results (test_module_path TEXT, test_class_name TEXT, test_function_name TEXT, function_getting_tested TEXT, loop_index INTEGER, iteration_id TEXT, runtime INTEGER, return_value BLOB, verification_type TEXT, cpu_runtime INTEGER)"
"CREATE TABLE IF NOT EXISTS codeflash_results ("
"test_module_path TEXT NOT NULL, "
"test_class_name TEXT, "
"test_function_name TEXT NOT NULL, "
"function_getting_tested TEXT NOT NULL, "
"loop_index INTEGER NOT NULL, "
"invocation_id TEXT NOT NULL, "
"mode TEXT NOT NULL, "
"wall_time_ns INTEGER NOT NULL, "
"cpu_time_ns INTEGER, "
"return_value BLOB, "
"verification_type TEXT, "
"sequential_time_ns INTEGER, "
"concurrent_time_ns INTEGER, "
"concurrency_factor INTEGER, "
"stdout TEXT"
")"
)
# Write to sqlite
@ -189,7 +205,8 @@ def codeflash_capture(
else PicklePatcher.dumps(instance_state)
)
codeflash_cur.execute(
"INSERT INTO test_results VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
test_module_name,
test_class_name,
@ -197,12 +214,17 @@ def codeflash_capture(
function_name,
loop_index,
invocation_id,
"behavior",
codeflash_duration,
0,
pickled_return_value,
VerificationType.INIT_STATE_FTO
if is_fto
else VerificationType.INIT_STATE_HELPER,
0,
None,
None,
None,
None,
),
)
codeflash_con.commit()

View file

@ -23,13 +23,13 @@ _BEHAVIOR_QUERY = (
"SELECT test_module_path, test_class_name,"
" test_function_name, function_getting_tested,"
" loop_index, invocation_id, wall_time_ns,"
" return_value, verification_type, stdout"
" FROM async_results"
" cpu_time_ns, return_value, verification_type, stdout"
" FROM codeflash_results"
" WHERE mode = 'behavior'"
)
_THROUGHPUT_QUERY = (
"SELECT COUNT(*) FROM async_results"
"SELECT COUNT(*) FROM codeflash_results"
" WHERE function_getting_tested = ?"
" AND mode = 'performance'"
)
@ -37,7 +37,7 @@ _THROUGHPUT_QUERY = (
_CONCURRENCY_QUERY = (
"SELECT sequential_time_ns, concurrent_time_ns,"
" concurrency_factor"
" FROM async_results"
" FROM codeflash_results"
" WHERE function_getting_tested = ?"
" AND mode = 'concurrency'"
)
@ -108,8 +108,9 @@ def _process_behavior_row_inner(
loop_index = val[4]
invocation_id = val[5]
wall_time_ns = val[6]
verification_type = val[8]
stdout_text = val[9] if len(val) > 9 else None
cpu_time_ns = val[7]
verification_type = val[9]
stdout_text = val[10] if len(val) > 10 else None
test_file_path = file_path_from_module_name(
test_module_path, # type: ignore[arg-type]
@ -138,11 +139,11 @@ def _process_behavior_row_inner(
test_type = found
ret_val = None
if loop_index == 1 and val[7]:
if loop_index == 1 and val[8]:
import dill as pickle # noqa: PLC0415
try:
ret_val = (pickle.loads(val[7]),) # noqa: S301
ret_val = (pickle.loads(val[8]),) # noqa: S301
except Exception: # noqa: BLE001
log.debug(
"Failed to deserialize return value for %s",
@ -167,7 +168,7 @@ def _process_behavior_row_inner(
test_framework=test_config.test_framework,
test_type=test_type,
return_value=ret_val,
cpu_runtime=0,
cpu_runtime=cpu_time_ns or 0,
timed_out=False,
verification_type=(
VerificationType(verification_type)

View file

@ -1,169 +0,0 @@
"""SQLite test result parsing."""
from __future__ import annotations
import logging
import sqlite3
from typing import TYPE_CHECKING
from .._model import VerificationType
from ..test_discovery.models import TestType
from ._path_resolution import file_path_from_module_name
from .models import FunctionTestInvocation, InvocationId, TestResults
if TYPE_CHECKING:
from pathlib import Path
from .models import TestConfig, TestFiles
log = logging.getLogger(__name__)
def parse_sqlite_test_results(
sqlite_file_path: Path,
test_files: TestFiles,
test_config: TestConfig,
) -> TestResults:
"""Parse test results from a SQLite database."""
test_results = TestResults()
if not sqlite_file_path.exists():
log.warning(
"No test results for %s found.",
sqlite_file_path,
)
return test_results
db: sqlite3.Connection | None = None
try:
db = sqlite3.connect(sqlite_file_path)
cur = db.cursor()
data = cur.execute(
"SELECT test_module_path, test_class_name,"
" test_function_name,"
" function_getting_tested, loop_index,"
" iteration_id, runtime,"
" return_value, verification_type,"
" cpu_runtime"
" FROM test_results"
).fetchall()
except sqlite3.Error:
log.warning(
"Failed to read test results from %s.",
sqlite_file_path,
exc_info=True,
)
if db is not None:
db.close()
return test_results
except OSError:
log.warning(
"Failed to open %s.",
sqlite_file_path,
exc_info=True,
)
return test_results
finally:
if db is not None:
db.close()
for val in data:
_process_sqlite_row(val, test_files, test_config, test_results)
return test_results
def _process_sqlite_row(
val: tuple[object, ...],
test_files: TestFiles,
test_config: TestConfig,
test_results: TestResults,
) -> None:
"""Process a single row from the sqlite table."""
try:
_process_sqlite_row_inner(val, test_files, test_config, test_results)
except Exception:
log.exception("Failed to parse sqlite test result")
def _process_sqlite_row_inner(
val: tuple[object, ...],
test_files: TestFiles,
test_config: TestConfig,
test_results: TestResults,
) -> None:
"""Inner processing for a single sqlite row."""
test_module_path = val[0]
test_class_name = val[1] or None
test_function_name = val[2] or None
function_getting_tested = val[3]
loop_index = val[4]
iteration_id = val[5]
runtime = val[6]
verification_type = val[8]
cpu_runtime = val[9]
test_file_path = file_path_from_module_name(
test_module_path, # type: ignore[arg-type]
test_config.tests_project_rootdir,
)
if verification_type in {
VerificationType.INIT_STATE_FTO,
VerificationType.INIT_STATE_HELPER,
}:
test_type: TestType = TestType.INIT_STATE_TEST
else:
found = test_files.get_test_type_by_original_file_path(
test_file_path,
)
if found is None:
found = test_files.get_test_type_by_instrumented_file_path(
test_file_path,
)
if found is None:
log.debug(
"Skipping result for %s: could not determine test type",
test_function_name,
)
return
test_type = found
ret_val = None
if loop_index == 1 and val[7]:
import dill as pickle # noqa: PLC0415
try:
ret_val = (pickle.loads(val[7]),) # noqa: S301
except (pickle.UnpicklingError, EOFError, ValueError, TypeError):
log.warning(
"Failed to deserialize return value for %s",
test_function_name,
exc_info=True,
)
return
test_results.add(
FunctionTestInvocation(
loop_index=loop_index, # type: ignore[arg-type]
id=InvocationId(
test_module_path=test_module_path, # type: ignore[arg-type]
test_class_name=test_class_name, # type: ignore[arg-type]
test_function_name=test_function_name, # type: ignore[arg-type]
function_getting_tested=function_getting_tested, # type: ignore[arg-type]
iteration_id=iteration_id, # type: ignore[arg-type]
),
file_name=test_file_path,
did_pass=True,
runtime=runtime, # type: ignore[arg-type]
test_framework=test_config.test_framework,
test_type=test_type,
return_value=ret_val,
cpu_runtime=cpu_runtime, # type: ignore[arg-type]
timed_out=False,
verification_type=(
VerificationType(verification_type)
if verification_type
else None
),
),
)

View file

@ -72,7 +72,7 @@ def instrument_codeflash_capture(
modified_code = add_codeflash_capture_to_init(
target_classes={class_parent.name},
fto_name=function_to_optimize.function_name,
tmp_dir_path=get_run_tmp_file(Path("test_return_values")).as_posix(),
tmp_dir_path=get_run_tmp_file(Path("codeflash_results")).as_posix(),
code=original_code,
tests_root=tests_root,
is_fto=True,
@ -90,7 +90,7 @@ def instrument_codeflash_capture(
target_classes=helper_classes,
fto_name=function_to_optimize.function_name,
tmp_dir_path=get_run_tmp_file(
Path("test_return_values")
Path("codeflash_results")
).as_posix(),
code=original_code,
tests_root=tests_root,

View file

@ -12,7 +12,6 @@ from typing import TYPE_CHECKING
from ..runtime._codeflash_wrap_decorator import get_run_tmp_file
from ._async_data_parser import parse_async_behavior_results
from ._data_parsers import parse_sqlite_test_results
from ._result_merger import merge_test_results
from ._stdout_parsers import parse_test_failures_from_stdout
from ._xml_parser import parse_test_xml
@ -41,41 +40,17 @@ def parse_test_results(
run_result,
)
# Parse SQLite results
# Parse unified SQLite results
data_results = TestResults()
sql_file = get_run_tmp_file(
Path(f"test_return_values_{optimization_iteration}.sqlite"),
Path(f"codeflash_results_{optimization_iteration}.sqlite"),
)
if sql_file.exists():
data_results = parse_sqlite_test_results(
data_results = parse_async_behavior_results(
sql_file, test_files, test_config
)
sql_file.unlink(missing_ok=True)
# Parse async SQLite results
async_sql_file = get_run_tmp_file(
Path(f"async_results_{optimization_iteration}.sqlite"),
)
if async_sql_file.exists():
async_results = parse_async_behavior_results(
async_sql_file,
test_files,
test_config,
)
for inv in async_results:
data_results.test_results.append(inv)
async_sql_file.unlink(missing_ok=True)
# Clean up deprecated binary pickle file if present
bin_file = get_run_tmp_file(
Path(f"test_return_values_{optimization_iteration}.bin"),
)
if bin_file.exists():
log.debug(
"Found deprecated .bin result file %s, removing.",
bin_file,
)
bin_file.unlink(missing_ok=True)
sql_file.unlink(missing_ok=True)
get_run_tmp_file(Path("pytest_results.xml")).unlink(
missing_ok=True,
)

View file

@ -42,17 +42,17 @@ def _test_files(tmp_path):
return tf
def _create_async_db(
def _create_db(
db_path: Path,
rows: list[tuple],
) -> None:
"""Create an async_results SQLite DB with the given rows."""
"""Create a codeflash_results SQLite DB with the given rows."""
conn = sqlite3.connect(db_path)
conn.execute(CREATE_TABLE_SQL)
for row in rows:
conn.execute(
"INSERT INTO async_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
row,
)
conn.commit()
@ -79,11 +79,11 @@ class TestParseAsyncBehaviorResults:
"""Parses behavior rows into FunctionTestInvocation objects."""
db_path = tmp_path / "test_module.py"
db_path.write_text("", encoding="utf-8")
db_path = tmp_path / "async_results.sqlite"
db_path = tmp_path / "results.sqlite"
module_path = "test_module"
pickled = pickle.dumps(((1, 2), {}, 3))
_create_async_db(
_create_db(
db_path,
[
(
@ -95,6 +95,7 @@ class TestParseAsyncBehaviorResults:
"0_0",
"behavior",
1000,
None,
pickled,
VerificationType.FUNCTION_CALL.value,
None,
@ -121,9 +122,9 @@ class TestParseAsyncBehaviorResults:
"""Ignores performance and concurrency rows."""
db_path = tmp_path / "test_module.py"
db_path.write_text("", encoding="utf-8")
db_path = tmp_path / "async_results.sqlite"
db_path = tmp_path / "results.sqlite"
_create_async_db(
_create_db(
db_path,
[
(
@ -141,6 +142,7 @@ class TestParseAsyncBehaviorResults:
None,
None,
None,
None,
),
],
)
@ -175,8 +177,8 @@ class TestCalculateAsyncThroughput:
def test_counts_performance_rows(self, tmp_path) -> None:
"""Counts rows matching function name and performance mode."""
db_path = tmp_path / "async_results.sqlite"
_create_async_db(
db_path = tmp_path / "results.sqlite"
_create_db(
db_path,
[
(
@ -194,6 +196,7 @@ class TestCalculateAsyncThroughput:
None,
None,
None,
None,
),
(
"mod",
@ -210,6 +213,7 @@ class TestCalculateAsyncThroughput:
None,
None,
None,
None,
),
(
"mod",
@ -226,6 +230,7 @@ class TestCalculateAsyncThroughput:
None,
None,
None,
None,
),
(
"mod",
@ -242,6 +247,7 @@ class TestCalculateAsyncThroughput:
None,
None,
None,
None,
),
],
)
@ -250,8 +256,8 @@ class TestCalculateAsyncThroughput:
def test_returns_zero_for_no_matches(self, tmp_path) -> None:
"""Returns 0 when no rows match the function name."""
db_path = tmp_path / "async_results.sqlite"
_create_async_db(db_path, [])
db_path = tmp_path / "results.sqlite"
_create_db(db_path, [])
result = calculate_async_throughput(db_path, "nonexistent")
assert 0 == result
@ -269,8 +275,8 @@ class TestParseAsyncConcurrencyMetrics:
def test_parses_concurrency_metrics(self, tmp_path) -> None:
"""Computes averages from multiple concurrency rows."""
db_path = tmp_path / "async_results.sqlite"
_create_async_db(
db_path = tmp_path / "results.sqlite"
_create_db(
db_path,
[
(
@ -284,6 +290,7 @@ class TestParseAsyncConcurrencyMetrics:
0,
None,
None,
None,
100_000,
50_000,
10,
@ -300,6 +307,7 @@ class TestParseAsyncConcurrencyMetrics:
0,
None,
None,
None,
200_000,
100_000,
10,
@ -316,8 +324,8 @@ class TestParseAsyncConcurrencyMetrics:
def test_returns_none_for_wrong_function(self, tmp_path) -> None:
"""Returns None when no rows match the function name."""
db_path = tmp_path / "async_results.sqlite"
_create_async_db(
db_path = tmp_path / "results.sqlite"
_create_db(
db_path,
[
(
@ -331,6 +339,7 @@ class TestParseAsyncConcurrencyMetrics:
0,
None,
None,
None,
100_000,
50_000,
10,
@ -343,8 +352,8 @@ class TestParseAsyncConcurrencyMetrics:
def test_handles_zero_concurrent_time(self, tmp_path) -> None:
"""Returns ratio 1.0 when concurrent time is zero."""
db_path = tmp_path / "async_results.sqlite"
_create_async_db(
db_path = tmp_path / "results.sqlite"
_create_db(
db_path,
[
(
@ -358,6 +367,7 @@ class TestParseAsyncConcurrencyMetrics:
0,
None,
None,
None,
100_000,
0,
5,
@ -397,9 +407,9 @@ class TestParseAsyncBehaviorEdgeCases:
"""Rows with INIT_STATE_FTO get INIT_STATE_TEST type."""
db_path = tmp_path / "test_module.py"
db_path.write_text("", encoding="utf-8")
db_path = tmp_path / "async_results.sqlite"
db_path = tmp_path / "results.sqlite"
_create_async_db(
_create_db(
db_path,
[
(
@ -411,6 +421,7 @@ class TestParseAsyncBehaviorEdgeCases:
"0_0",
"behavior",
1000,
None,
pickle.dumps(((1,), {}, 2)),
VerificationType.INIT_STATE_FTO.value,
None,
@ -435,7 +446,7 @@ class TestParseAsyncBehaviorEdgeCases:
db_path = tmp_path / "test_module.py"
db_path.write_text("", encoding="utf-8")
db_path = tmp_path / "async_results.sqlite"
db_path = tmp_path / "results.sqlite"
tf = MagicMock()
tf.get_test_type_by_original_file_path.return_value = None
@ -443,7 +454,7 @@ class TestParseAsyncBehaviorEdgeCases:
TestType.EXISTING_UNIT_TEST
)
_create_async_db(
_create_db(
db_path,
[
(
@ -455,6 +466,7 @@ class TestParseAsyncBehaviorEdgeCases:
"0_0",
"behavior",
1000,
None,
pickle.dumps(((1,), {}, 2)),
VerificationType.FUNCTION_CALL.value,
None,
@ -474,13 +486,13 @@ class TestParseAsyncBehaviorEdgeCases:
"""Skips rows when test type cannot be determined."""
db_path = tmp_path / "test_module.py"
db_path.write_text("", encoding="utf-8")
db_path = tmp_path / "async_results.sqlite"
db_path = tmp_path / "results.sqlite"
tf = MagicMock()
tf.get_test_type_by_original_file_path.return_value = None
tf.get_test_type_by_instrumented_file_path.return_value = None
_create_async_db(
_create_db(
db_path,
[
(
@ -492,6 +504,7 @@ class TestParseAsyncBehaviorEdgeCases:
"0_0",
"behavior",
1000,
None,
pickle.dumps(((1,), {}, 2)),
VerificationType.FUNCTION_CALL.value,
None,
@ -511,9 +524,9 @@ class TestParseAsyncBehaviorEdgeCases:
"""Skips rows with unpicklable return value data."""
db_path = tmp_path / "test_module.py"
db_path.write_text("", encoding="utf-8")
db_path = tmp_path / "async_results.sqlite"
db_path = tmp_path / "results.sqlite"
_create_async_db(
_create_db(
db_path,
[
(
@ -525,6 +538,7 @@ class TestParseAsyncBehaviorEdgeCases:
"0_0",
"behavior",
1000,
None,
b"definitely not valid pickle",
VerificationType.FUNCTION_CALL.value,
None,
@ -544,14 +558,14 @@ class TestParseAsyncBehaviorEdgeCases:
"""Outer exception handler catches errors from inner processing."""
db_path = tmp_path / "test_module.py"
db_path.write_text("", encoding="utf-8")
db_path = tmp_path / "async_results.sqlite"
db_path = tmp_path / "results.sqlite"
tf = MagicMock()
tf.get_test_type_by_original_file_path.side_effect = RuntimeError(
"boom"
)
_create_async_db(
_create_db(
db_path,
[
(
@ -564,6 +578,7 @@ class TestParseAsyncBehaviorEdgeCases:
"behavior",
1000,
None,
None,
VerificationType.FUNCTION_CALL.value,
None,
None,

View file

@ -61,11 +61,11 @@ def _env_setup(request, tmp_path):
close_all_connections()
@pytest.fixture(name="async_db_path")
def _async_db_path(env_setup):
"""Return the path where the async results DB will be written."""
@pytest.fixture(name="results_db_path")
def _results_db_path(env_setup):
"""Return the path where the results DB will be written."""
iteration = env_setup["CODEFLASH_TEST_ITERATION"]
db_path = get_run_tmp_file(Path(f"async_results_{iteration}.sqlite"))
db_path = get_run_tmp_file(Path(f"codeflash_results_{iteration}.sqlite"))
yield db_path
if db_path.exists():
db_path.unlink()
@ -129,12 +129,12 @@ class TestGetAsyncDb:
"""get_async_db connection caching."""
def test_creates_table(self, tmp_path) -> None:
"""Creates the async_results table on first connect."""
"""Creates the codeflash_results table on first connect."""
db_path = tmp_path / "test.sqlite"
conn, cur = get_async_db(db_path)
cur.execute(
"SELECT name FROM sqlite_master "
"WHERE type='table' AND name='async_results'"
"WHERE type='table' AND name='codeflash_results'"
)
assert cur.fetchone() is not None
conn.close()
@ -167,7 +167,7 @@ class TestBehaviorAsync:
@pytest.mark.asyncio
async def test_returns_correct_value(
self, env_setup, async_db_path
self, env_setup, results_db_path
) -> None:
"""Decorated function returns the original return value."""
@ -180,7 +180,7 @@ class TestBehaviorAsync:
assert 7 == result
@pytest.mark.asyncio
async def test_writes_to_sqlite(self, env_setup, async_db_path) -> None:
async def test_writes_to_sqlite(self, env_setup, results_db_path) -> None:
"""Writes behavior result to async_results table."""
@codeflash_behavior_async
@ -191,26 +191,26 @@ class TestBehaviorAsync:
await multiply(5, 6)
close_all_connections()
assert async_db_path.exists()
con = sqlite3.connect(async_db_path)
assert results_db_path.exists()
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT * FROM async_results")
cur.execute("SELECT * FROM codeflash_results")
rows = cur.fetchall()
assert 1 == len(rows)
row = rows[0]
assert "behavior" == row[6]
assert 0 < row[7]
data = pickle.loads(row[8])
data = pickle.loads(row[9])
args, kwargs, ret = data
assert (5, 6) == args
assert {} == kwargs
assert 30 == ret
assert VerificationType.FUNCTION_CALL.value == row[9]
assert VerificationType.FUNCTION_CALL.value == row[10]
con.close()
@pytest.mark.asyncio
async def test_exception_handling(self, env_setup, async_db_path) -> None:
async def test_exception_handling(self, env_setup, results_db_path) -> None:
"""Re-raises exceptions and stores them pickled."""
@codeflash_behavior_async
@ -222,9 +222,9 @@ class TestBehaviorAsync:
await fail()
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT return_value FROM async_results")
cur.execute("SELECT return_value FROM codeflash_results")
row = cur.fetchone()
exc = pickle.loads(row[0])
assert isinstance(exc, ValueError)
@ -233,7 +233,7 @@ class TestBehaviorAsync:
@pytest.mark.asyncio
async def test_no_stdout_output(
self, env_setup, async_db_path, capsys
self, env_setup, results_db_path, capsys
) -> None:
"""Behavior decorator does not leak stdout to outer scope."""
@ -248,7 +248,7 @@ class TestBehaviorAsync:
@pytest.mark.asyncio
async def test_captures_stdout_in_sqlite(
self, env_setup, async_db_path
self, env_setup, results_db_path
) -> None:
"""Behavior decorator captures print output into the stdout column."""
@ -261,9 +261,9 @@ class TestBehaviorAsync:
await greeter("world")
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT stdout FROM async_results")
cur.execute("SELECT stdout FROM codeflash_results")
row = cur.fetchone()
assert "hello world\n" == row[0]
con.close()
@ -272,7 +272,7 @@ class TestBehaviorAsync:
class TestBehaviorSync:
"""codeflash_behavior_sync decorator."""
def test_returns_correct_value(self, env_setup, async_db_path) -> None:
def test_returns_correct_value(self, env_setup, results_db_path) -> None:
"""Decorated function returns the original return value."""
@codeflash_behavior_sync
@ -283,7 +283,7 @@ class TestBehaviorSync:
result = add(3, 4)
assert 7 == result
def test_writes_to_sqlite(self, env_setup, async_db_path) -> None:
def test_writes_to_sqlite(self, env_setup, results_db_path) -> None:
"""Writes behavior result to async_results table."""
@codeflash_behavior_sync
@ -294,25 +294,25 @@ class TestBehaviorSync:
multiply(5, 6)
close_all_connections()
assert async_db_path.exists()
con = sqlite3.connect(async_db_path)
assert results_db_path.exists()
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT * FROM async_results")
cur.execute("SELECT * FROM codeflash_results")
rows = cur.fetchall()
assert 1 == len(rows)
row = rows[0]
assert "behavior" == row[6]
assert 0 < row[7]
data = pickle.loads(row[8])
data = pickle.loads(row[9])
args, kwargs, ret = data
assert (5, 6) == args
assert {} == kwargs
assert 30 == ret
assert VerificationType.FUNCTION_CALL.value == row[9]
assert VerificationType.FUNCTION_CALL.value == row[10]
con.close()
def test_exception_handling(self, env_setup, async_db_path) -> None:
def test_exception_handling(self, env_setup, results_db_path) -> None:
"""Re-raises exceptions and stores them pickled."""
@codeflash_behavior_sync
@ -324,9 +324,9 @@ class TestBehaviorSync:
fail()
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT return_value FROM async_results")
cur.execute("SELECT return_value FROM codeflash_results")
row = cur.fetchone()
exc = pickle.loads(row[0])
assert isinstance(exc, ValueError)
@ -334,7 +334,7 @@ class TestBehaviorSync:
con.close()
def test_captures_stdout_in_sqlite(
self, env_setup, async_db_path
self, env_setup, results_db_path
) -> None:
"""Captures print output into the stdout column."""
@ -347,15 +347,15 @@ class TestBehaviorSync:
greeter("world")
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT stdout FROM async_results")
cur.execute("SELECT stdout FROM codeflash_results")
row = cur.fetchone()
assert "hello world\n" == row[0]
con.close()
def test_no_stdout_leak(
self, env_setup, async_db_path, capsys
self, env_setup, results_db_path, capsys
) -> None:
"""Sync decorator does not leak stdout to outer scope."""
@ -369,8 +369,8 @@ class TestBehaviorSync:
captured = capsys.readouterr()
assert "" == captured.out
def test_records_cpu_time(self, env_setup, async_db_path) -> None:
"""Records cpu_time in the sequential_time_ns column."""
def test_records_cpu_time(self, env_setup, results_db_path) -> None:
"""Records cpu_time_ns in its own column."""
@codeflash_behavior_sync
def work() -> int:
@ -380,10 +380,10 @@ class TestBehaviorSync:
work()
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute(
"SELECT sequential_time_ns FROM async_results"
"SELECT cpu_time_ns FROM codeflash_results"
)
row = cur.fetchone()
assert row[0] is not None
@ -394,7 +394,7 @@ class TestBehaviorSync:
class TestPerformanceSync:
"""codeflash_performance_sync decorator."""
def test_returns_correct_value(self, env_setup, async_db_path) -> None:
def test_returns_correct_value(self, env_setup, results_db_path) -> None:
"""Decorated function returns the original return value."""
@codeflash_performance_sync
@ -405,7 +405,7 @@ class TestPerformanceSync:
result = add(3, 4)
assert 7 == result
def test_writes_to_sqlite(self, env_setup, async_db_path) -> None:
def test_writes_to_sqlite(self, env_setup, results_db_path) -> None:
"""Writes performance result with mode='performance' and null return_value."""
@codeflash_performance_sync
@ -416,20 +416,20 @@ class TestPerformanceSync:
work()
close_all_connections()
assert async_db_path.exists()
con = sqlite3.connect(async_db_path)
assert results_db_path.exists()
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT * FROM async_results")
cur.execute("SELECT * FROM codeflash_results")
rows = cur.fetchall()
assert 1 == len(rows)
row = rows[0]
assert "performance" == row[6]
assert 0 < row[7]
assert row[8] is None
assert row[9] is None
assert row[10] is None
con.close()
def test_exception_handling(self, env_setup, async_db_path) -> None:
def test_exception_handling(self, env_setup, results_db_path) -> None:
"""Re-raises exceptions from the wrapped function."""
@codeflash_performance_sync
@ -441,7 +441,7 @@ class TestPerformanceSync:
fail()
def test_no_stdout_capture(
self, env_setup, async_db_path, capsys
self, env_setup, results_db_path, capsys
) -> None:
"""Performance decorator does not redirect stdout."""
@ -455,7 +455,7 @@ class TestPerformanceSync:
captured = capsys.readouterr()
assert "visible" in captured.out
def test_records_wall_time(self, env_setup, async_db_path) -> None:
def test_records_wall_time(self, env_setup, results_db_path) -> None:
"""Records a positive wall_time_ns value."""
@codeflash_performance_sync
@ -466,9 +466,9 @@ class TestPerformanceSync:
work()
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT wall_time_ns FROM async_results")
cur.execute("SELECT wall_time_ns FROM codeflash_results")
row = cur.fetchone()
assert row[0] is not None
assert 0 < row[0]
@ -484,7 +484,7 @@ class TestPerformanceAsync:
@pytest.mark.asyncio
async def test_returns_correct_value(
self, env_setup, async_db_path
self, env_setup, results_db_path
) -> None:
"""Returns the original return value."""
@ -497,7 +497,7 @@ class TestPerformanceAsync:
assert 7 == result
@pytest.mark.asyncio
async def test_writes_to_sqlite(self, env_setup, async_db_path) -> None:
async def test_writes_to_sqlite(self, env_setup, results_db_path) -> None:
"""Writes performance result with null return_value."""
@codeflash_performance_async
@ -508,21 +508,21 @@ class TestPerformanceAsync:
await work()
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT * FROM async_results")
cur.execute("SELECT * FROM codeflash_results")
rows = cur.fetchall()
assert 1 == len(rows)
row = rows[0]
assert "performance" == row[6]
assert 0 < row[7]
assert row[8] is None
assert row[9] is None
assert row[10] is None
con.close()
@pytest.mark.asyncio
async def test_no_stdout_output(
self, env_setup, async_db_path, capsys
self, env_setup, results_db_path, capsys
) -> None:
"""Performance decorator emits no stdout."""
@ -545,7 +545,7 @@ class TestConcurrencyAsync:
@pytest.mark.asyncio
async def test_returns_correct_value(
self, env_setup, async_db_path
self, env_setup, results_db_path
) -> None:
"""Returns the result from sequential execution."""
@ -558,7 +558,7 @@ class TestConcurrencyAsync:
@pytest.mark.asyncio
async def test_writes_concurrency_metrics(
self, env_setup, async_db_path
self, env_setup, results_db_path
) -> None:
"""Writes sequential/concurrent timing to async_results."""
os.environ["CODEFLASH_CONCURRENCY_FACTOR"] = "3"
@ -571,24 +571,24 @@ class TestConcurrencyAsync:
await work()
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute("SELECT * FROM async_results")
cur.execute("SELECT * FROM codeflash_results")
rows = cur.fetchall()
assert 1 == len(rows)
row = rows[0]
assert "concurrency" == row[6]
assert 0 == row[7]
assert 0 < row[10]
assert 0 < row[11]
assert 3 == row[12]
assert 0 < row[12]
assert 3 == row[13]
con.close()
os.environ.pop("CODEFLASH_CONCURRENCY_FACTOR", None)
@pytest.mark.asyncio
async def test_no_stdout_output(
self, env_setup, async_db_path, capsys
self, env_setup, results_db_path, capsys
) -> None:
"""Concurrency decorator emits no stdout."""
os.environ["CODEFLASH_CONCURRENCY_FACTOR"] = "2"
@ -613,7 +613,7 @@ class TestBehaviorAsyncEdgeCases:
@pytest.mark.asyncio
async def test_multiple_calls_increment_index(
self, env_setup, async_db_path
self, env_setup, results_db_path
) -> None:
"""Second call to same decorator increments the invocation counter."""
@ -626,10 +626,10 @@ class TestBehaviorAsyncEdgeCases:
await inc(2)
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute(
"SELECT invocation_id FROM async_results ORDER BY invocation_id"
"SELECT invocation_id FROM codeflash_results ORDER BY invocation_id"
)
ids = [row[0] for row in cur.fetchall()]
assert 2 == len(ids)
@ -646,7 +646,7 @@ class TestPerformanceAsyncEdgeCases:
"""Edge cases for codeflash_performance_async."""
@pytest.mark.asyncio
async def test_exception_handling(self, env_setup, async_db_path) -> None:
async def test_exception_handling(self, env_setup, results_db_path) -> None:
"""Re-raises exceptions from the wrapped function."""
@codeflash_performance_async
@ -659,7 +659,7 @@ class TestPerformanceAsyncEdgeCases:
@pytest.mark.asyncio
async def test_multiple_calls_increment_index(
self, env_setup, async_db_path
self, env_setup, results_db_path
) -> None:
"""Second call increments the invocation counter."""
@ -672,10 +672,10 @@ class TestPerformanceAsyncEdgeCases:
await work()
close_all_connections()
con = sqlite3.connect(async_db_path)
con = sqlite3.connect(results_db_path)
cur = con.cursor()
cur.execute(
"SELECT invocation_id FROM async_results ORDER BY invocation_id"
"SELECT invocation_id FROM codeflash_results ORDER BY invocation_id"
)
ids = [row[0] for row in cur.fetchall()]
assert "0_0" == ids[0]
@ -715,13 +715,13 @@ class TestExtractTestContextEdgeCases:
class TestSchemaValidation:
"""Validate the async_results SQLite schema."""
"""Validate the codeflash_results SQLite schema."""
def test_table_columns(self, tmp_path) -> None:
"""async_results table has exactly 14 columns."""
"""codeflash_results table has exactly 15 columns."""
db_path = tmp_path / "schema_test.sqlite"
conn, cur = get_async_db(db_path)
cur.execute("PRAGMA table_info(async_results)")
cur.execute("PRAGMA table_info(codeflash_results)")
columns = cur.fetchall()
expected_names = [
"test_module_path",
@ -732,6 +732,7 @@ class TestSchemaValidation:
"invocation_id",
"mode",
"wall_time_ns",
"cpu_time_ns",
"return_value",
"verification_type",
"sequential_time_ns",

View file

@ -528,7 +528,7 @@ class TestUnittestExample(unittest.TestCase):
test_dir = (
Path(__file__).parent / "code_to_optimize" / "tests" / "pytest"
).resolve()
tmp_dir_path = get_run_tmp_file(Path("test_return_values"))
tmp_dir_path = get_run_tmp_file(Path("codeflash_results"))
sample_code = f"""
from codeflash_python.runtime._codeflash_capture import codeflash_capture
class MyClass:
@ -668,7 +668,7 @@ class TestUnittestExample(unittest.TestCase):
test_dir = (
Path(__file__).parent / "code_to_optimize" / "tests" / "pytest"
).resolve()
tmp_dir_path = get_run_tmp_file(Path("test_return_values"))
tmp_dir_path = get_run_tmp_file(Path("codeflash_results"))
# MyClass did not have an init function, we created the init function with the codeflash_capture decorator using instrumentation
sample_code = f"""
from codeflash_python.runtime._codeflash_capture import codeflash_capture
@ -810,7 +810,7 @@ def test_example_test():
test_dir = (
Path(__file__).parent / "code_to_optimize" / "tests" / "pytest"
).resolve()
tmp_dir_path = get_run_tmp_file(Path("test_return_values"))
tmp_dir_path = get_run_tmp_file(Path("codeflash_results"))
sample_code = f"""
from codeflash_python.runtime._codeflash_capture import codeflash_capture
@ -949,7 +949,7 @@ def test_helper_classes():
test_dir = (
Path(__file__).parent / "code_to_optimize" / "tests" / "pytest"
).resolve()
tmp_dir_path = get_run_tmp_file(Path("test_return_values"))
tmp_dir_path = get_run_tmp_file(Path("codeflash_results"))
original_code = f"""
from codeflash_python.runtime._codeflash_capture import codeflash_capture
from code_to_optimize.tests.pytest.helper_file_1 import HelperClass1
@ -1976,7 +1976,7 @@ def test_slots_class():
test_dir = (
Path(__file__).parent / "code_to_optimize" / "tests" / "pytest"
).resolve()
tmp_dir_path = get_run_tmp_file(Path("test_return_values"))
tmp_dir_path = get_run_tmp_file(Path("codeflash_results"))
sample_code = f"""
from codeflash_python.runtime._codeflash_capture import codeflash_capture

View file

@ -196,11 +196,14 @@ class TestMergeByIndexCpuRuntime:
class TestSqliteParserCpuRuntime:
"""parse_sqlite_test_results extracts cpu_runtime from the database."""
"""parse_async_behavior_results extracts cpu_runtime from the database."""
def test_cpu_runtime_from_sqlite(self, tmp_path: Path) -> None:
from codeflash_python.testing._data_parsers import (
parse_sqlite_test_results,
from codeflash_python.runtime._codeflash_async_decorators import (
CREATE_TABLE_SQL,
)
from codeflash_python.testing._async_data_parser import (
parse_async_behavior_results,
)
test_file = tmp_path / "tests" / "test_example.py"
@ -209,22 +212,10 @@ class TestSqliteParserCpuRuntime:
db_path = tmp_path / "results.sqlite"
db = sqlite3.connect(db_path)
db.execute(CREATE_TABLE_SQL)
db.execute(
"CREATE TABLE test_results ("
" test_module_path TEXT,"
" test_class_name TEXT,"
" test_function_name TEXT,"
" function_getting_tested TEXT,"
" loop_index INTEGER,"
" iteration_id TEXT,"
" runtime INTEGER,"
" return_value BLOB,"
" verification_type TEXT,"
" cpu_runtime INTEGER"
")"
)
db.execute(
"INSERT INTO test_results VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
"tests.test_example",
None,
@ -232,10 +223,15 @@ class TestSqliteParserCpuRuntime:
"run",
1,
"0",
"behavior",
500,
98765,
None,
"function_call",
98765,
None,
None,
None,
None,
),
)
db.commit()
@ -251,7 +247,9 @@ class TestSqliteParserCpuRuntime:
)
test_config = TestConfig(tests_project_rootdir=tmp_path)
results = parse_sqlite_test_results(db_path, test_files, test_config)
results = parse_async_behavior_results(
db_path, test_files, test_config
)
assert 1 == len(results)
assert 98765 == results[0].cpu_runtime
@ -264,8 +262,11 @@ class TestEndToEndCpuRuntime:
self,
tmp_path: Path,
) -> None:
from codeflash_python.testing._data_parsers import (
parse_sqlite_test_results,
from codeflash_python.runtime._codeflash_async_decorators import (
CREATE_TABLE_SQL,
)
from codeflash_python.testing._async_data_parser import (
parse_async_behavior_results,
)
test_file = tmp_path / "tests" / "test_example.py"
@ -274,22 +275,10 @@ class TestEndToEndCpuRuntime:
db_path = tmp_path / "results.sqlite"
db = sqlite3.connect(db_path)
db.execute(CREATE_TABLE_SQL)
db.execute(
"CREATE TABLE test_results ("
" test_module_path TEXT,"
" test_class_name TEXT,"
" test_function_name TEXT,"
" function_getting_tested TEXT,"
" loop_index INTEGER,"
" iteration_id TEXT,"
" runtime INTEGER,"
" return_value BLOB,"
" verification_type TEXT,"
" cpu_runtime INTEGER"
")"
)
db.execute(
"INSERT INTO test_results VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
"tests.test_example",
None,
@ -297,10 +286,15 @@ class TestEndToEndCpuRuntime:
"run",
1,
"0",
"behavior",
750,
42000,
None,
"function_call",
42000,
None,
None,
None,
None,
),
)
db.commit()
@ -316,7 +310,7 @@ class TestEndToEndCpuRuntime:
)
test_config = TestConfig(tests_project_rootdir=tmp_path)
data_results = parse_sqlite_test_results(
data_results = parse_async_behavior_results(
db_path,
test_files,
test_config,

View file

@ -27,7 +27,7 @@ from codeflash_python.runtime._codeflash_capture import codeflash_capture
class MyClass:
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
def __init__(self):
self.x = 1
@ -101,7 +101,7 @@ from codeflash_python.runtime._codeflash_capture import codeflash_capture
class MyClass(ParentClass):
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@ -148,7 +148,7 @@ from codeflash_python.runtime._codeflash_capture import codeflash_capture
class MyClass:
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
def __init__(self):
self.x = 1
@ -209,7 +209,7 @@ from codeflash_python.runtime._codeflash_capture import codeflash_capture
class MyClass:
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
def __init__(self):
self.x = 1
@ -222,7 +222,7 @@ from codeflash_python.runtime._codeflash_capture import codeflash_capture
class HelperClass:
@codeflash_capture(function_name='HelperClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=False)
@codeflash_capture(function_name='HelperClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=False)
def __init__(self):
self.y = 1
@ -306,7 +306,7 @@ from codeflash_python.runtime._codeflash_capture import codeflash_capture
class MyClass:
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
@codeflash_capture(function_name='MyClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)
def __init__(self):
self.x = 1
@ -324,7 +324,7 @@ from codeflash_python.runtime._codeflash_capture import codeflash_capture
class HelperClass1:
@codeflash_capture(function_name='HelperClass1.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=False)
@codeflash_capture(function_name='HelperClass1.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=False)
def __init__(self):
self.y = 1
@ -339,7 +339,7 @@ from codeflash_python.runtime._codeflash_capture import codeflash_capture
class HelperClass2:
@codeflash_capture(function_name='HelperClass2.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=False)
@codeflash_capture(function_name='HelperClass2.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=False)
def __init__(self):
self.z = 2
@ -348,7 +348,7 @@ class HelperClass2:
class AnotherHelperClass:
@codeflash_capture(function_name='AnotherHelperClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=False)
@codeflash_capture(function_name='AnotherHelperClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@ -610,7 +610,7 @@ _codeflash_orig_MyAttrsClass_init = MyAttrsClass.__init__
def _codeflash_patched_MyAttrsClass_init(self, *args, **kwargs):
return _codeflash_orig_MyAttrsClass_init(self, *args, **kwargs)
MyAttrsClass.__init__ = codeflash_capture(function_name='MyAttrsClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)(_codeflash_patched_MyAttrsClass_init)
MyAttrsClass.__init__ = codeflash_capture(function_name='MyAttrsClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)(_codeflash_patched_MyAttrsClass_init)
"""
test_path.write_text(original_code)
@ -661,7 +661,7 @@ _codeflash_orig_FrozenPoint_init = FrozenPoint.__init__
def _codeflash_patched_FrozenPoint_init(self, *args, **kwargs):
return _codeflash_orig_FrozenPoint_init(self, *args, **kwargs)
FrozenPoint.__init__ = codeflash_capture(function_name='FrozenPoint.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)(_codeflash_patched_FrozenPoint_init)
FrozenPoint.__init__ = codeflash_capture(function_name='FrozenPoint.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)(_codeflash_patched_FrozenPoint_init)
"""
test_path.write_text(original_code)
@ -710,7 +710,7 @@ _codeflash_orig_MyAttrClass_init = MyAttrClass.__init__
def _codeflash_patched_MyAttrClass_init(self, *args, **kwargs):
return _codeflash_orig_MyAttrClass_init(self, *args, **kwargs)
MyAttrClass.__init__ = codeflash_capture(function_name='MyAttrClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("test_return_values")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)(_codeflash_patched_MyAttrClass_init)
MyAttrClass.__init__ = codeflash_capture(function_name='MyAttrClass.__init__', tmp_dir_path='{get_run_tmp_file(Path("codeflash_results")).as_posix()}', tests_root='{test_path.parent.as_posix()}', is_fto=True)(_codeflash_patched_MyAttrClass_init)
"""
test_path.write_text(original_code)

View file

@ -5,11 +5,16 @@ import sqlite3
from pathlib import Path
from codeflash_python._model import VerificationType
from codeflash_python.runtime._codeflash_async_decorators import (
CREATE_TABLE_SQL,
)
from codeflash_python.test_discovery.linking import (
module_name_from_file_path,
)
from codeflash_python.test_discovery.models import TestType
from codeflash_python.testing._data_parsers import parse_sqlite_test_results
from codeflash_python.testing._async_data_parser import (
parse_async_behavior_results,
)
from codeflash_python.testing._path_resolution import (
file_name_from_test_module_name,
file_path_from_module_name,
@ -243,13 +248,13 @@ class TestParseTestXml:
class TestParseSqliteTestResults:
"""parse_sqlite_test_results SQLite parsing."""
"""parse_async_behavior_results SQLite parsing."""
def test_missing_file(self, tmp_path: Path) -> None:
"""Returns empty TestResults for nonexistent file."""
config = TestConfig(tests_project_rootdir=tmp_path)
files = TestFiles()
result = parse_sqlite_test_results(
result = parse_async_behavior_results(
tmp_path / "missing.sqlite",
files,
config,
@ -257,30 +262,18 @@ class TestParseSqliteTestResults:
assert 0 == len(result)
def test_parses_basic_sqlite(self, tmp_path: Path) -> None:
"""Create sqlite with test_results table, parse it."""
"""Create sqlite with codeflash_results table, parse it."""
test_file = tmp_path / "tests" / "test_foo.py"
test_file.parent.mkdir(parents=True)
test_file.touch()
db_path = tmp_path / "results.sqlite"
conn = sqlite3.connect(db_path)
conn.execute(
"CREATE TABLE test_results ("
" test_module_path TEXT,"
" test_class_name TEXT,"
" test_function_name TEXT,"
" function_getting_tested TEXT,"
" loop_index INTEGER,"
" iteration_id TEXT,"
" runtime INTEGER,"
" return_value BLOB,"
" verification_type TEXT,"
" cpu_runtime INTEGER"
")"
)
conn.execute(CREATE_TABLE_SQL)
module_name = f"tests{os.sep}test_foo".replace(os.sep, ".")
conn.execute(
"INSERT INTO test_results VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO codeflash_results VALUES "
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
module_name,
None,
@ -288,10 +281,15 @@ class TestParseSqliteTestResults:
"bar",
1,
"0",
"behavior",
100,
0,
None,
VerificationType.FUNCTION_CALL.value,
0,
None,
None,
None,
None,
),
)
conn.commit()
@ -304,7 +302,7 @@ class TestParseSqliteTestResults:
)
files = TestFiles(test_files=[tf])
result = parse_sqlite_test_results(db_path, files, config)
result = parse_async_behavior_results(db_path, files, config)
assert 1 == len(result)
assert result[0].did_pass is True
assert 100 == result[0].runtime