shifted PR to new repo. refactors optimizer code into a FunctionOptimizer class

This commit is contained in:
Alvin Ryanputra 2025-02-13 16:10:53 +08:00
parent 5851ec2b84
commit 8e8258a99f
10 changed files with 1564 additions and 1519 deletions

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,6 @@ from __future__ import annotations
import dataclasses import dataclasses
import os import os
from argparse import Namespace
from collections import defaultdict from collections import defaultdict
from pathlib import Path from pathlib import Path
@ -14,7 +13,8 @@ from codeflash.code_utils.code_replacer import (
) )
from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.discovery.functions_to_optimize import FunctionToOptimize
from codeflash.models.models import FunctionParent from codeflash.models.models import FunctionParent
from codeflash.optimization.optimizer import Optimizer from codeflash.optimization.function_optimizer import FunctionOptimizer
from codeflash.verification.verification_utils import TestConfig
os.environ["CODEFLASH_API_KEY"] = "cf-test-key" os.environ["CODEFLASH_API_KEY"] = "cf-test-key"
@ -766,24 +766,18 @@ class MainClass:
return HelperClass(self.name).helper_method() return HelperClass(self.name).helper_method()
""" """
file_path = Path(__file__).resolve() file_path = Path(__file__).resolve()
opt = Optimizer(
Namespace(
project_root=file_path.parent.resolve(),
disable_telemetry=True,
tests_root="tests",
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=file_path.parent.resolve(),
)
)
func_top_optimize = FunctionToOptimize( func_top_optimize = FunctionToOptimize(
function_name="main_method", file_path=file_path, parents=[FunctionParent("MainClass", "ClassDef")] function_name="main_method", file_path=file_path, parents=[FunctionParent("MainClass", "ClassDef")]
) )
original_code = file_path.read_text() test_config = TestConfig(
code_context = opt.get_code_optimization_context( tests_root=file_path.parent,
function_to_optimize=func_top_optimize, project_root=file_path.parent, original_source_code=original_code tests_project_rootdir=file_path.parent,
).unwrap() project_root_path=file_path.parent,
test_framework="pytest",
pytest_cmd="pytest",
)
func_optimizer = FunctionOptimizer(function_to_optimize=func_top_optimize, test_cfg=test_config)
code_context = func_optimizer.get_code_optimization_context().unwrap()
assert code_context.code_to_optimize_with_helpers == get_code_output assert code_context.code_to_optimize_with_helpers == get_code_output
@ -1013,35 +1007,35 @@ class TestType(Enum):
class TestResults(BaseModel): class TestResults(BaseModel):
def __iter__(self) -> Iterator[FunctionTestInvocation]: def __iter__(self) -> Iterator[FunctionTestInvocation]:
return iter(self.test_results) return iter(self.test_results)
def __len__(self) -> int: def __len__(self) -> int:
return len(self.test_results) return len(self.test_results)
def __getitem__(self, index: int) -> FunctionTestInvocation: def __getitem__(self, index: int) -> FunctionTestInvocation:
return self.test_results[index] return self.test_results[index]
def __setitem__(self, index: int, value: FunctionTestInvocation) -> None: def __setitem__(self, index: int, value: FunctionTestInvocation) -> None:
self.test_results[index] = value self.test_results[index] = value
def __delitem__(self, index: int) -> None: def __delitem__(self, index: int) -> None:
del self.test_results[index] del self.test_results[index]
def __contains__(self, value: FunctionTestInvocation) -> bool: def __contains__(self, value: FunctionTestInvocation) -> bool:
return value in self.test_results return value in self.test_results
def __bool__(self) -> bool: def __bool__(self) -> bool:
return bool(self.test_results) return bool(self.test_results)
def __eq__(self, other: object) -> bool: def __eq__(self, other: object) -> bool:
# Unordered comparison # Unordered comparison
if not isinstance(other, TestResults) or len(self) != len(other): if not isinstance(other, TestResults) or len(self) != len(other):
return False return False
# Increase recursion limit only if necessary # Increase recursion limit only if necessary
original_recursion_limit = sys.getrecursionlimit() original_recursion_limit = sys.getrecursionlimit()
if original_recursion_limit < 5000: if original_recursion_limit < 5000:
sys.setrecursionlimit(5000) sys.setrecursionlimit(5000)
for test_result in self: for test_result in self:
other_test_result = other.get_by_id(test_result.id) other_test_result = other.get_by_id(test_result.id)
if other_test_result is None or not ( if other_test_result is None or not (
@ -1054,10 +1048,10 @@ class TestResults(BaseModel):
): ):
sys.setrecursionlimit(original_recursion_limit) sys.setrecursionlimit(original_recursion_limit)
return False return False
sys.setrecursionlimit(original_recursion_limit) sys.setrecursionlimit(original_recursion_limit)
return True return True
def get_test_pass_fail_report_by_type(self) -> dict[TestType, dict[str, int]]: def get_test_pass_fail_report_by_type(self) -> dict[TestType, dict[str, int]]:
report = {test_type: {"passed": 0, "failed": 0} for test_type in TestType} report = {test_type: {"passed": 0, "failed": 0} for test_type in TestType}
for test_result in self.test_results: for test_result in self.test_results:
@ -1105,8 +1099,8 @@ class TestResults(BaseModel):
) )
assert ( assert (
new_code new_code
== """from __future__ import annotations == """from __future__ import annotations
import sys import sys
from codeflash.verification.comparator import comparator from codeflash.verification.comparator import comparator
from enum import Enum from enum import Enum
@ -1245,21 +1239,21 @@ def cosine_similarity(X: Matrix, Y: Matrix) -> np.ndarray:
"""Row-wise cosine similarity between two equal-width matrices.""" """Row-wise cosine similarity between two equal-width matrices."""
if len(X.data) == 0 or len(Y.data) == 0: if len(X.data) == 0 or len(Y.data) == 0:
return np.array([]) return np.array([])
X_np, Y_np = np.asarray(X.data), np.asarray(Y.data) X_np, Y_np = np.asarray(X.data), np.asarray(Y.data)
if X_np.shape[1] != Y_np.shape[1]: if X_np.shape[1] != Y_np.shape[1]:
raise ValueError(f"Number of columns in X and Y must be the same. X has shape {X_np.shape} and Y has shape {Y_np.shape}.") raise ValueError(f"Number of columns in X and Y must be the same. X has shape {X_np.shape} and Y has shape {Y_np.shape}.")
X_norm = np.linalg.norm(X_np, axis=1, keepdims=True) X_norm = np.linalg.norm(X_np, axis=1, keepdims=True)
Y_norm = np.linalg.norm(Y_np, axis=1, keepdims=True) Y_norm = np.linalg.norm(Y_np, axis=1, keepdims=True)
norm_product = X_norm * Y_norm.T norm_product = X_norm * Y_norm.T
norm_product[norm_product == 0] = np.inf # Prevent division by zero norm_product[norm_product == 0] = np.inf # Prevent division by zero
dot_product = np.dot(X_np, Y_np.T) dot_product = np.dot(X_np, Y_np.T)
similarity = dot_product / norm_product similarity = dot_product / norm_product
# Any NaN or Inf values are set to 0.0 # Any NaN or Inf values are set to 0.0
np.nan_to_num(similarity, copy=False) np.nan_to_num(similarity, copy=False)
return similarity return similarity
def cosine_similarity_top_k( def cosine_similarity_top_k(
X: Matrix, X: Matrix,
@ -1270,15 +1264,15 @@ def cosine_similarity_top_k(
"""Row-wise cosine similarity with optional top-k and score threshold filtering.""" """Row-wise cosine similarity with optional top-k and score threshold filtering."""
if len(X.data) == 0 or len(Y.data) == 0: if len(X.data) == 0 or len(Y.data) == 0:
return [], [] return [], []
score_array = cosine_similarity(X, Y) score_array = cosine_similarity(X, Y)
sorted_idxs = np.argpartition(-score_array.flatten(), range(top_k or len(score_array.flatten())))[:(top_k or len(score_array.flatten()))] sorted_idxs = np.argpartition(-score_array.flatten(), range(top_k or len(score_array.flatten())))[:(top_k or len(score_array.flatten()))]
sorted_idxs = sorted_idxs[score_array.flatten()[sorted_idxs] > (score_threshold if score_threshold is not None else -1)] sorted_idxs = sorted_idxs[score_array.flatten()[sorted_idxs] > (score_threshold if score_threshold is not None else -1)]
ret_idxs = [(x // score_array.shape[1], x % score_array.shape[1]) for x in sorted_idxs] ret_idxs = [(x // score_array.shape[1], x % score_array.shape[1]) for x in sorted_idxs]
scores = score_array.flatten()[sorted_idxs].tolist() scores = score_array.flatten()[sorted_idxs].tolist()
return ret_idxs, scores return ret_idxs, scores
''' '''
preexisting_objects: list[tuple[str, list[FunctionParent]]] = find_preexisting_objects(original_code) preexisting_objects: list[tuple[str, list[FunctionParent]]] = find_preexisting_objects(original_code)
@ -1311,8 +1305,8 @@ def cosine_similarity_top_k(
project_root_path=Path(__file__).parent.parent.resolve(), project_root_path=Path(__file__).parent.parent.resolve(),
) )
assert ( assert (
new_code new_code
== '''import numpy as np == '''import numpy as np
from pydantic.dataclasses import dataclass from pydantic.dataclasses import dataclass
from typing import List, Optional, Tuple, Union from typing import List, Optional, Tuple, Union
@dataclass(config=dict(arbitrary_types_allowed=True)) @dataclass(config=dict(arbitrary_types_allowed=True))
@ -1343,15 +1337,15 @@ def cosine_similarity_top_k(
"""Row-wise cosine similarity with optional top-k and score threshold filtering.""" """Row-wise cosine similarity with optional top-k and score threshold filtering."""
if len(X.data) == 0 or len(Y.data) == 0: if len(X.data) == 0 or len(Y.data) == 0:
return [], [] return [], []
score_array = cosine_similarity(X, Y) score_array = cosine_similarity(X, Y)
sorted_idxs = np.argpartition(-score_array.flatten(), range(top_k or len(score_array.flatten())))[:(top_k or len(score_array.flatten()))] sorted_idxs = np.argpartition(-score_array.flatten(), range(top_k or len(score_array.flatten())))[:(top_k or len(score_array.flatten()))]
sorted_idxs = sorted_idxs[score_array.flatten()[sorted_idxs] > (score_threshold if score_threshold is not None else -1)] sorted_idxs = sorted_idxs[score_array.flatten()[sorted_idxs] > (score_threshold if score_threshold is not None else -1)]
ret_idxs = [(x // score_array.shape[1], x % score_array.shape[1]) for x in sorted_idxs] ret_idxs = [(x // score_array.shape[1], x % score_array.shape[1]) for x in sorted_idxs]
scores = score_array.flatten()[sorted_idxs].tolist() scores = score_array.flatten()[sorted_idxs].tolist()
return ret_idxs, scores return ret_idxs, scores
''' '''
) )
@ -1370,8 +1364,8 @@ def cosine_similarity_top_k(
) )
assert ( assert (
new_helper_code new_helper_code
== '''import numpy as np == '''import numpy as np
from pydantic.dataclasses import dataclass from pydantic.dataclasses import dataclass
from typing import List, Optional, Tuple, Union from typing import List, Optional, Tuple, Union
@dataclass(config=dict(arbitrary_types_allowed=True)) @dataclass(config=dict(arbitrary_types_allowed=True))
@ -1381,21 +1375,21 @@ def cosine_similarity(X: Matrix, Y: Matrix) -> np.ndarray:
"""Row-wise cosine similarity between two equal-width matrices.""" """Row-wise cosine similarity between two equal-width matrices."""
if len(X.data) == 0 or len(Y.data) == 0: if len(X.data) == 0 or len(Y.data) == 0:
return np.array([]) return np.array([])
X_np, Y_np = np.asarray(X.data), np.asarray(Y.data) X_np, Y_np = np.asarray(X.data), np.asarray(Y.data)
if X_np.shape[1] != Y_np.shape[1]: if X_np.shape[1] != Y_np.shape[1]:
raise ValueError(f"Number of columns in X and Y must be the same. X has shape {X_np.shape} and Y has shape {Y_np.shape}.") raise ValueError(f"Number of columns in X and Y must be the same. X has shape {X_np.shape} and Y has shape {Y_np.shape}.")
X_norm = np.linalg.norm(X_np, axis=1, keepdims=True) X_norm = np.linalg.norm(X_np, axis=1, keepdims=True)
Y_norm = np.linalg.norm(Y_np, axis=1, keepdims=True) Y_norm = np.linalg.norm(Y_np, axis=1, keepdims=True)
norm_product = X_norm * Y_norm.T norm_product = X_norm * Y_norm.T
norm_product[norm_product == 0] = np.inf # Prevent division by zero norm_product[norm_product == 0] = np.inf # Prevent division by zero
dot_product = np.dot(X_np, Y_np.T) dot_product = np.dot(X_np, Y_np.T)
similarity = dot_product / norm_product similarity = dot_product / norm_product
# Any NaN or Inf values are set to 0.0 # Any NaN or Inf values are set to 0.0
np.nan_to_num(similarity, copy=False) np.nan_to_num(similarity, copy=False)
return similarity return similarity
def cosine_similarity_top_k( def cosine_similarity_top_k(
X: Matrix, X: Matrix,
@ -1406,15 +1400,15 @@ def cosine_similarity_top_k(
"""Row-wise cosine similarity with optional top-k and score threshold filtering.""" """Row-wise cosine similarity with optional top-k and score threshold filtering."""
if len(X.data) == 0 or len(Y.data) == 0: if len(X.data) == 0 or len(Y.data) == 0:
return [], [] return [], []
score_array = cosine_similarity(X, Y) score_array = cosine_similarity(X, Y)
sorted_idxs = np.argpartition(-score_array.flatten(), range(top_k or len(score_array.flatten())))[:(top_k or len(score_array.flatten()))] sorted_idxs = np.argpartition(-score_array.flatten(), range(top_k or len(score_array.flatten())))[:(top_k or len(score_array.flatten()))]
sorted_idxs = sorted_idxs[score_array.flatten()[sorted_idxs] > (score_threshold if score_threshold is not None else -1)] sorted_idxs = sorted_idxs[score_array.flatten()[sorted_idxs] > (score_threshold if score_threshold is not None else -1)]
ret_idxs = [(x // score_array.shape[1], x % score_array.shape[1]) for x in sorted_idxs] ret_idxs = [(x // score_array.shape[1], x % score_array.shape[1]) for x in sorted_idxs]
scores = score_array.flatten()[sorted_idxs].tolist() scores = score_array.flatten()[sorted_idxs].tolist()
return ret_idxs, scores return ret_idxs, scores
''' '''
) )
@ -1481,7 +1475,7 @@ from __future__ import annotations as _annotations
def test_0_diff_code_replacement(): def test_0_diff_code_replacement():
original_code = """from __future__ import annotations original_code = """from __future__ import annotations
import numpy as np import numpy as np
def functionA(): def functionA():
return np.array([1, 2, 3]) return np.array([1, 2, 3])

View file

@ -2,18 +2,18 @@ from __future__ import annotations
import os import os
import re import re
from argparse import Namespace
from pathlib import Path from pathlib import Path
from codeflash.code_utils.code_utils import get_run_tmp_file from codeflash.code_utils.code_utils import get_run_tmp_file
from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE
from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.discovery.functions_to_optimize import FunctionToOptimize
from codeflash.models.models import FunctionParent, TestFile, TestFiles, TestingMode from codeflash.models.models import FunctionParent, TestFile, TestFiles, TestingMode
from codeflash.optimization.optimizer import Optimizer from codeflash.optimization.function_optimizer import FunctionOptimizer
from codeflash.verification.equivalence import compare_test_results from codeflash.verification.equivalence import compare_test_results
from codeflash.verification.instrument_codeflash_capture import instrument_codeflash_capture from codeflash.verification.instrument_codeflash_capture import instrument_codeflash_capture
from codeflash.verification.test_results import TestType, VerificationType from codeflash.verification.test_results import TestType, VerificationType
from codeflash.verification.test_runner import execute_test_subprocess from codeflash.verification.test_runner import execute_test_subprocess
from codeflash.verification.verification_utils import TestConfig
# Tests for get_stack_info. Ensures that when a test is run via pytest, the correct test information is extracted # Tests for get_stack_info. Ensures that when a test is run via pytest, the correct test information is extracted
@ -184,6 +184,7 @@ class MyClass:
self.x = 2 self.x = 2
print(f"TEST_INFO_START|{{get_test_info_from_stack('{test_dir!s}')}}|TEST_INFO_END") print(f"TEST_INFO_START|{{get_test_info_from_stack('{test_dir!s}')}}|TEST_INFO_END")
""" """
test_dir = (Path(__file__).parent.parent / "code_to_optimize" / "tests" / "pytest").resolve()
test_file_name = "test_stack_info_temp.py" test_file_name = "test_stack_info_temp.py"
test_path = test_dir / test_file_name test_path = test_dir / test_file_name
@ -319,7 +320,6 @@ class MyClass:
assert results[5][3] == "23" assert results[5][3] == "23"
finally: finally:
# Clean up files
test_path.unlink(missing_ok=True) test_path.unlink(missing_ok=True)
sample_code_path.unlink(missing_ok=True) sample_code_path.unlink(missing_ok=True)
@ -346,6 +346,7 @@ class MyClass:
self.x = 2 self.x = 2
print(f"TEST_INFO_START|{{get_test_info_from_stack('{test_dir!s}')}}|TEST_INFO_END") print(f"TEST_INFO_START|{{get_test_info_from_stack('{test_dir!s}')}}|TEST_INFO_END")
""" """
test_dir = (Path(__file__).parent.parent / "code_to_optimize" / "tests" / "pytest").resolve()
test_file_name = "test_stack_info_temp.py" test_file_name = "test_stack_info_temp.py"
test_path = test_dir / test_file_name test_path = test_dir / test_file_name
@ -431,23 +432,25 @@ class MyClass:
f.write(test_code) f.write(test_code)
with sample_code_path.open("w") as f: with sample_code_path.open("w") as f:
f.write(sample_code) f.write(sample_code)
opt = Optimizer(
Namespace(
project_root=project_root_path,
disable_telemetry=True,
tests_root=tests_root,
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
)
test_env = os.environ.copy() test_env = os.environ.copy()
test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_TEST_ITERATION"] = "0"
test_env["CODEFLASH_LOOP_INDEX"] = "1" test_env["CODEFLASH_LOOP_INDEX"] = "1"
test_type = TestType.EXISTING_UNIT_TEST test_type = TestType.EXISTING_UNIT_TEST
test_files = TestFiles( test_config = TestConfig(
tests_root=tests_root,
tests_project_rootdir=project_root_path,
project_root_path=project_root_path,
test_framework="pytest",
pytest_cmd="pytest",
)
fto = FunctionToOptimize(
function_name="some_function",
file_path=sample_code_path,
parents=[FunctionParent(name="MyClass", type="ClassDef")],
)
func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config)
func_optimizer.test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
instrumented_behavior_file_path=test_path, instrumented_behavior_file_path=test_path,
@ -457,10 +460,10 @@ class MyClass:
) )
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
@ -541,23 +544,24 @@ class MyClass(ParentClass):
f.write(test_code) f.write(test_code)
with sample_code_path.open("w") as f: with sample_code_path.open("w") as f:
f.write(sample_code) f.write(sample_code)
opt = Optimizer(
Namespace(
project_root=project_root_path,
disable_telemetry=True,
tests_root=tests_root,
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
)
test_env = os.environ.copy() test_env = os.environ.copy()
test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_TEST_ITERATION"] = "0"
test_env["CODEFLASH_LOOP_INDEX"] = "1" test_env["CODEFLASH_LOOP_INDEX"] = "1"
test_type = TestType.EXISTING_UNIT_TEST test_type = TestType.EXISTING_UNIT_TEST
test_files = TestFiles( test_config = TestConfig(
tests_root=tests_root,
tests_project_rootdir=project_root_path,
project_root_path=project_root_path,
test_framework="pytest",
pytest_cmd="pytest",
)
fto = FunctionToOptimize(
function_name="some_function",
file_path=sample_code_path,
parents=[FunctionParent(name="MyClass", type="ClassDef")],
)
func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config)
func_optimizer.test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
instrumented_behavior_file_path=test_path, instrumented_behavior_file_path=test_path,
@ -567,10 +571,10 @@ class MyClass(ParentClass):
) )
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
@ -653,25 +657,24 @@ class MyClass:
with sample_code_path.open("w") as f: with sample_code_path.open("w") as f:
f.write(sample_code) f.write(sample_code)
opt = Optimizer(
Namespace(
project_root=project_root_path,
disable_telemetry=True,
tests_root=tests_root,
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
)
# Environment variables for codeflash
test_env = os.environ.copy() test_env = os.environ.copy()
test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_TEST_ITERATION"] = "0"
test_env["CODEFLASH_LOOP_INDEX"] = "1" test_env["CODEFLASH_LOOP_INDEX"] = "1"
test_type = TestType.EXISTING_UNIT_TEST test_type = TestType.EXISTING_UNIT_TEST
test_files = TestFiles( test_config = TestConfig(
tests_root=tests_root,
tests_project_rootdir=project_root_path,
project_root_path=project_root_path,
test_framework="pytest",
pytest_cmd="pytest",
)
fto = FunctionToOptimize(
function_name="some_function",
file_path=sample_code_path,
parents=[FunctionParent(name="MyClass", type="ClassDef")],
)
func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config)
func_optimizer.test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
instrumented_behavior_file_path=test_path, instrumented_behavior_file_path=test_path,
@ -681,12 +684,10 @@ class MyClass:
) )
] ]
) )
test_results, coverage_data = func_optimizer.run_and_parse_tests(
# Run the tests and parse results
test_results, coverage_data = opt.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
@ -804,24 +805,26 @@ class AnotherHelperClass:
f.write(original_code) f.write(original_code)
with test_path.open("w") as f: with test_path.open("w") as f:
f.write(test_code) f.write(test_code)
opt = Optimizer(
Namespace(
project_root=project_root_path,
disable_telemetry=True,
tests_root=tests_root,
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
)
test_env = os.environ.copy() test_env = os.environ.copy()
test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_TEST_ITERATION"] = "0"
test_env["CODEFLASH_LOOP_INDEX"] = "1" test_env["CODEFLASH_LOOP_INDEX"] = "1"
test_type = TestType.EXISTING_UNIT_TEST test_type = TestType.EXISTING_UNIT_TEST
test_files = TestFiles( test_config = TestConfig(
tests_root=tests_root,
tests_project_rootdir=project_root_path,
project_root_path=project_root_path,
test_framework="pytest",
pytest_cmd="pytest",
)
fto = FunctionToOptimize(
function_name="target_function",
file_path=fto_file_path,
parents=[FunctionParent(name="MyClass", type="ClassDef")],
)
func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config)
func_optimizer.test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
instrumented_behavior_file_path=test_path, instrumented_behavior_file_path=test_path,
@ -832,10 +835,10 @@ class AnotherHelperClass:
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
@ -925,7 +928,7 @@ class AnotherHelperClass:
helper_path_2 = test_dir / helper_file_2 helper_path_2 = test_dir / helper_file_2
fto_file_path = test_dir / fto_file_name fto_file_path = test_dir / fto_file_name
tests_root = (Path(__file__).parent.resolve() / "../code_to_optimize/tests/pytest/").resolve() tests_root = Path(__file__).parent.resolve() / "../code_to_optimize/tests/pytest/"
project_root_path = (Path(__file__).parent / "..").resolve() project_root_path = (Path(__file__).parent / "..").resolve()
try: try:
@ -938,30 +941,26 @@ class AnotherHelperClass:
with test_path.open("w") as f: with test_path.open("w") as f:
f.write(test_code) f.write(test_code)
fto = FunctionToOptimize("target_function", str(fto_file_path), parents=[FunctionParent("MyClass", "ClassDef")]) fto = FunctionToOptimize("target_function", fto_file_path, parents=[FunctionParent("MyClass", "ClassDef")])
file_path_to_helper_class = { file_path_to_helper_class = {
helper_path_1: {"HelperClass1"}, helper_path_1: {"HelperClass1"},
helper_path_2: {"HelperClass2", "AnotherHelperClass"}, helper_path_2: {"HelperClass2", "AnotherHelperClass"},
} }
instrument_codeflash_capture(fto, file_path_to_helper_class, tests_root) instrument_codeflash_capture(fto, file_path_to_helper_class, tests_root)
opt = Optimizer(
Namespace(
project_root=project_root_path,
disable_telemetry=True,
tests_root=tests_root,
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
)
test_env = os.environ.copy() test_env = os.environ.copy()
test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_TEST_ITERATION"] = "0"
test_env["CODEFLASH_LOOP_INDEX"] = "1" test_env["CODEFLASH_LOOP_INDEX"] = "1"
test_type = TestType.EXISTING_UNIT_TEST test_type = TestType.EXISTING_UNIT_TEST
test_files = TestFiles( test_config = TestConfig(
tests_root=tests_root,
tests_project_rootdir=project_root_path,
project_root_path=project_root_path,
test_framework="pytest",
pytest_cmd="pytest",
)
func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config)
func_optimizer.test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
instrumented_behavior_file_path=test_path, instrumented_behavior_file_path=test_path,
@ -983,10 +982,10 @@ class AnotherHelperClass:
} }
instrument_codeflash_capture(fto, file_path_to_helper_classes, tests_root) instrument_codeflash_capture(fto, file_path_to_helper_classes, tests_root)
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
@ -994,7 +993,7 @@ class AnotherHelperClass:
) )
# Remove instrumentation # Remove instrumentation
opt.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path)
assert len(test_results.test_results) == 4 assert len(test_results.test_results) == 4
assert test_results[0].id.test_function_name == "test_helper_classes" assert test_results[0].id.test_function_name == "test_helper_classes"
@ -1035,17 +1034,17 @@ class MyClass:
Path(helper_path_2): {"HelperClass2", "AnotherHelperClass"}, Path(helper_path_2): {"HelperClass2", "AnotherHelperClass"},
} }
instrument_codeflash_capture(fto, file_path_to_helper_classes, tests_root) instrument_codeflash_capture(fto, file_path_to_helper_classes, tests_root)
modified_test_results, coverage_data = opt.run_and_parse_tests( modified_test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
testing_time=0.1, testing_time=0.1,
) )
# Remove instrumentation # Remove instrumentation
opt.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path)
# Now, this fto_code mutates the instance so it should fail # Now, this fto_code mutates the instance so it should fail
mutated_fto_code = """ mutated_fto_code = """
@ -1074,17 +1073,17 @@ class MyClass:
Path(helper_path_2): {"HelperClass2", "AnotherHelperClass"}, Path(helper_path_2): {"HelperClass2", "AnotherHelperClass"},
} }
instrument_codeflash_capture(fto, file_path_to_helper_classes, tests_root) instrument_codeflash_capture(fto, file_path_to_helper_classes, tests_root)
mutated_test_results, coverage_data = opt.run_and_parse_tests( mutated_test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
testing_time=0.1, testing_time=0.1,
) )
# Remove instrumentation # Remove instrumentation
opt.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path)
assert not compare_test_results(test_results, mutated_test_results) assert not compare_test_results(test_results, mutated_test_results)
# This fto code stopped using a helper class. it should still pass # This fto code stopped using a helper class. it should still pass
@ -1112,17 +1111,17 @@ class MyClass:
Path(helper_path_2): {"HelperClass2", "AnotherHelperClass"}, Path(helper_path_2): {"HelperClass2", "AnotherHelperClass"},
} }
instrument_codeflash_capture(fto, file_path_to_helper_classes, tests_root) instrument_codeflash_capture(fto, file_path_to_helper_classes, tests_root)
no_helper1_test_results, coverage_data = opt.run_and_parse_tests( no_helper1_test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
testing_time=0.1, testing_time=0.1,
) )
# Remove instrumentation # Remove instrumentation
opt.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path)
assert compare_test_results(test_results, no_helper1_test_results) assert compare_test_results(test_results, no_helper1_test_results)
finally: finally:

View file

@ -1,5 +1,4 @@
import pathlib import pathlib
from argparse import Namespace
from dataclasses import dataclass from dataclasses import dataclass
import pytest import pytest
@ -7,7 +6,8 @@ from codeflash.discovery.functions_to_optimize import FunctionToOptimize
from codeflash.either import is_successful from codeflash.either import is_successful
from codeflash.models.models import FunctionParent from codeflash.models.models import FunctionParent
from codeflash.optimization.function_context import get_function_variables_definitions from codeflash.optimization.function_context import get_function_variables_definitions
from codeflash.optimization.optimizer import Optimizer from codeflash.optimization.function_optimizer import FunctionOptimizer
from codeflash.verification.verification_utils import TestConfig
def calculate_something(data): def calculate_something(data):
@ -184,17 +184,7 @@ class Graph:
def test_class_method_dependencies() -> None: def test_class_method_dependencies() -> None:
file_path = pathlib.Path(__file__).resolve() file_path = pathlib.Path(__file__).resolve()
opt = Optimizer(
Namespace(
project_root=file_path.parent.resolve(),
disable_telemetry=True,
tests_root="tests",
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=file_path.parent.resolve(),
)
)
function_to_optimize = FunctionToOptimize( function_to_optimize = FunctionToOptimize(
function_name="topologicalSort", function_name="topologicalSort",
file_path=str(file_path), file_path=str(file_path),
@ -202,9 +192,19 @@ def test_class_method_dependencies() -> None:
starting_line=None, starting_line=None,
ending_line=None, ending_line=None,
) )
func_optimizer = FunctionOptimizer(
function_to_optimize=function_to_optimize,
test_cfg=TestConfig(
tests_root=file_path,
tests_project_rootdir=file_path.parent,
project_root_path=file_path.parent,
test_framework="pytest",
pytest_cmd="pytest",
),
)
with open(file_path) as f: with open(file_path) as f:
original_code = f.read() original_code = f.read()
ctx_result = opt.get_code_optimization_context(function_to_optimize, opt.args.project_root, original_code) ctx_result = func_optimizer.get_code_optimization_context()
if not is_successful(ctx_result): if not is_successful(ctx_result):
pytest.fail() pytest.fail()
code_context = ctx_result.unwrap() code_context = ctx_result.unwrap()
@ -280,17 +280,7 @@ def test_decorator_dependencies() -> None:
def test_recursive_function_context() -> None: def test_recursive_function_context() -> None:
file_path = pathlib.Path(__file__).resolve() file_path = pathlib.Path(__file__).resolve()
opt = Optimizer(
Namespace(
project_root=file_path.parent.resolve(),
disable_telemetry=True,
tests_root="tests",
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=file_path.parent.resolve(),
)
)
function_to_optimize = FunctionToOptimize( function_to_optimize = FunctionToOptimize(
function_name="recursive", function_name="recursive",
file_path=str(file_path), file_path=str(file_path),
@ -298,9 +288,20 @@ def test_recursive_function_context() -> None:
starting_line=None, starting_line=None,
ending_line=None, ending_line=None,
) )
func_optimizer = FunctionOptimizer(
function_to_optimize=function_to_optimize,
test_cfg=TestConfig(
tests_root=file_path,
tests_project_rootdir=file_path.parent,
project_root_path=file_path.parent,
test_framework="pytest",
pytest_cmd="pytest",
),
)
with open(file_path) as f: with open(file_path) as f:
original_code = f.read() original_code = f.read()
ctx_result = opt.get_code_optimization_context(function_to_optimize, opt.args.project_root, original_code)
ctx_result = func_optimizer.get_code_optimization_context()
if not is_successful(ctx_result): if not is_successful(ctx_result):
pytest.fail() pytest.fail()
code_context = ctx_result.unwrap() code_context = ctx_result.unwrap()

View file

@ -3,10 +3,13 @@ from argparse import Namespace
from pathlib import Path from pathlib import Path
import pytest import pytest
from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.discovery.functions_to_optimize import FunctionToOptimize
from codeflash.either import is_successful from codeflash.either import is_successful
from codeflash.models.models import FunctionParent from codeflash.models.models import FunctionParent
from codeflash.optimization.function_optimizer import FunctionOptimizer
from codeflash.optimization.optimizer import Optimizer from codeflash.optimization.optimizer import Optimizer
from codeflash.verification.verification_utils import TestConfig
class HelperClass: class HelperClass:
@ -31,6 +34,7 @@ def test_get_outside_method_helper() -> None:
experiment_id=None, experiment_id=None,
) )
) )
function_to_optimize = FunctionToOptimize( function_to_optimize = FunctionToOptimize(
function_name="OptimizeMe", file_path=file_path, parents=[], starting_line=None, ending_line=None function_name="OptimizeMe", file_path=file_path, parents=[], starting_line=None, ending_line=None
) )
@ -51,7 +55,7 @@ _KEY_T = TypeVar("_KEY_T")
_STORE_T = TypeVar("_STORE_T") _STORE_T = TypeVar("_STORE_T")
class AbstractCacheBackend(CacheBackend, Protocol[_KEY_T, _STORE_T]): class AbstractCacheBackend(CacheBackend, Protocol[_KEY_T, _STORE_T]):
"""Interface for cache backends used by the persistent cache decorator.""" """Interface for cache backends used by the persistent cache decorator."""
def __init__(self) -> None: ... def __init__(self) -> None: ...
def hash_key( def hash_key(
@ -213,17 +217,6 @@ class _PersistentCache(Generic[_P, _R, _CacheBackendT]):
f.write(code) f.write(code)
f.flush() f.flush()
file_path = Path(f.name).resolve() file_path = Path(f.name).resolve()
opt = Optimizer(
Namespace(
project_root=file_path.parent.resolve(),
disable_telemetry=True,
tests_root="tests",
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=Path().resolve(),
)
)
function_to_optimize = FunctionToOptimize( function_to_optimize = FunctionToOptimize(
function_name="__call__", function_name="__call__",
file_path=file_path, file_path=file_path,
@ -231,17 +224,25 @@ class _PersistentCache(Generic[_P, _R, _CacheBackendT]):
starting_line=None, starting_line=None,
ending_line=None, ending_line=None,
) )
test_config = TestConfig(
tests_root="tests",
tests_project_rootdir=Path.cwd(),
project_root_path=file_path.parent.resolve(),
test_framework="pytest",
pytest_cmd="pytest",
)
func_optimizer = FunctionOptimizer(function_to_optimize=function_to_optimize, test_cfg=test_config)
with open(file_path) as f: with open(file_path) as f:
original_code = f.read() original_code = f.read()
ctx_result = opt.get_code_optimization_context(function_to_optimize, opt.args.project_root, original_code) ctx_result = func_optimizer.get_code_optimization_context()
if not is_successful(ctx_result): if not is_successful(ctx_result):
pytest.fail() pytest.fail()
code_context = ctx_result.unwrap() code_context = ctx_result.unwrap()
assert code_context.helper_functions[0].qualified_name == "AbstractCacheBackend.get_cache_or_call" assert code_context.helper_functions[0].qualified_name == "AbstractCacheBackend.get_cache_or_call"
assert ( assert (
code_context.code_to_optimize_with_helpers code_context.code_to_optimize_with_helpers
== '''_R = TypeVar("_R") == '''_R = TypeVar("_R")
class AbstractCacheBackend(CacheBackend, Protocol[_KEY_T, _STORE_T]): class AbstractCacheBackend(CacheBackend, Protocol[_KEY_T, _STORE_T]):
def __init__(self) -> None: ... def __init__(self) -> None: ...
@ -338,29 +339,27 @@ class _PersistentCache(Generic[_P, _R, _CacheBackendT]):
def test_bubble_sort_deps() -> None: def test_bubble_sort_deps() -> None:
file_path = (Path(__file__) / ".." / ".." / "code_to_optimize" / "bubble_sort_deps.py").resolve() file_path = (Path(__file__) / ".." / ".." / "code_to_optimize" / "bubble_sort_deps.py").resolve()
opt = Optimizer(
Namespace(
project_root=file_path.parent.parent.resolve(),
disable_telemetry=True,
tests_root=str(file_path.parent / "tests"),
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=file_path.parent.resolve(),
)
)
function_to_optimize = FunctionToOptimize( function_to_optimize = FunctionToOptimize(
function_name="sorter_deps", file_path=file_path, parents=[], starting_line=None, ending_line=None function_name="sorter_deps", file_path=file_path, parents=[], starting_line=None, ending_line=None
) )
test_config = TestConfig(
tests_root=str(file_path.parent / "tests"),
tests_project_rootdir=file_path.parent.resolve(),
project_root_path=file_path.parent.parent.resolve(),
test_framework="pytest",
pytest_cmd="pytest",
)
func_optimizer = FunctionOptimizer(function_to_optimize=function_to_optimize, test_cfg=test_config)
with open(file_path) as f: with open(file_path) as f:
original_code = f.read() original_code = f.read()
ctx_result = opt.get_code_optimization_context(function_to_optimize, opt.args.project_root, original_code) ctx_result = func_optimizer.get_code_optimization_context()
if not is_successful(ctx_result): if not is_successful(ctx_result):
pytest.fail() pytest.fail()
code_context = ctx_result.unwrap() code_context = ctx_result.unwrap()
assert ( assert (
code_context.code_to_optimize_with_helpers code_context.code_to_optimize_with_helpers
== """def dep1_comparer(arr, j: int) -> bool: == """def dep1_comparer(arr, j: int) -> bool:
return arr[j] > arr[j + 1] return arr[j] > arr[j + 1]
def dep2_swap(arr, j): def dep2_swap(arr, j):
@ -378,7 +377,7 @@ def sorter_deps(arr):
) )
assert len(code_context.helper_functions) == 2 assert len(code_context.helper_functions) == 2
assert ( assert (
code_context.helper_functions[0].fully_qualified_name code_context.helper_functions[0].fully_qualified_name
== "code_to_optimize.bubble_sort_dep1_helper.dep1_comparer" == "code_to_optimize.bubble_sort_dep1_helper.dep1_comparer"
) )
assert code_context.helper_functions[1].fully_qualified_name == "code_to_optimize.bubble_sort_dep2_swap.dep2_swap" assert code_context.helper_functions[1].fully_qualified_name == "code_to_optimize.bubble_sort_dep2_swap.dep2_swap"

View file

@ -1,6 +1,7 @@
from textwrap import dedent from textwrap import dedent
import pytest import pytest
from codeflash.context.code_context_extractor import get_read_only_code from codeflash.context.code_context_extractor import get_read_only_code
@ -66,7 +67,7 @@ def test_dunder_methods_remove_docstring() -> None:
expected = """ expected = """
class TestClass: class TestClass:
def __str__(self): def __str__(self):
return f"Value: {self.x}" return f"Value: {self.x}"
""" """
@ -91,7 +92,7 @@ def test_class_remove_docstring() -> None:
expected = """ expected = """
class TestClass: class TestClass:
def __str__(self): def __str__(self):
return f"Value: {self.x}" return f"Value: {self.x}"
""" """
@ -118,7 +119,7 @@ def test_mixed_remove_docstring() -> None:
expected = """ expected = """
class TestClass: class TestClass:
def __str__(self): def __str__(self):
return f"Value: {self.x}" return f"Value: {self.x}"
""" """
@ -655,7 +656,7 @@ def test_simplified_complete_implementation() -> None:
processor = DataProcessor(sample_data) processor = DataProcessor(sample_data)
class ResultHandler: class ResultHandler:
def __str__(self) -> str: def __str__(self) -> str:
return f"ResultHandler(cache_size={len(self.cache)})" return f"ResultHandler(cache_size={len(self.cache)})"

View file

@ -46,7 +46,7 @@ codeflash_wrap_string = """def codeflash_wrap(wrapped, test_module_name, test_cl
""" """
def test_function_full_instrumentation() -> None: def test_bubble_sort_behavior_results() -> None:
code = """from code_to_optimize.bubble_sort import sorter code = """from code_to_optimize.bubble_sort import sorter
@ -147,7 +147,9 @@ def test_sort():
test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_TEST_ITERATION"] = "0"
test_env["CODEFLASH_LOOP_INDEX"] = "1" test_env["CODEFLASH_LOOP_INDEX"] = "1"
test_type = TestType.EXISTING_UNIT_TEST test_type = TestType.EXISTING_UNIT_TEST
test_files = TestFiles(
func_optimizer = opt.create_function_optimizer(func)
func_optimizer.test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
instrumented_behavior_file_path=test_path, instrumented_behavior_file_path=test_path,
@ -157,10 +159,10 @@ def test_sort():
) )
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
@ -318,7 +320,8 @@ def test_sort():
test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_TEST_ITERATION"] = "0"
test_env["CODEFLASH_LOOP_INDEX"] = "1" test_env["CODEFLASH_LOOP_INDEX"] = "1"
test_type = TestType.EXISTING_UNIT_TEST test_type = TestType.EXISTING_UNIT_TEST
test_files = TestFiles( func_optimizer = opt.create_function_optimizer(fto)
func_optimizer.test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
instrumented_behavior_file_path=test_path, instrumented_behavior_file_path=test_path,
@ -328,10 +331,10 @@ def test_sort():
) )
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,
@ -412,11 +415,21 @@ class BubbleSorter:
test_project_root=project_root_path, test_project_root=project_root_path,
) )
) )
func_optimizer = opt.create_function_optimizer(fto)
new_test_results, coverage_data = opt.run_and_parse_tests( func_optimizer.test_files = TestFiles(
test_files=[
TestFile(
instrumented_behavior_file_path=test_path,
test_type=test_type,
original_file_path=test_path,
benchmarking_file_path=test_path_perf,
)
]
)
new_test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=func_optimizer.test_files,
optimization_iteration=0, optimization_iteration=0,
pytest_min_loops=1, pytest_min_loops=1,
pytest_max_loops=1, pytest_max_loops=1,

View file

@ -5,7 +5,6 @@ import math
import os import os
import sys import sys
import tempfile import tempfile
from argparse import Namespace
from pathlib import Path from pathlib import Path
from codeflash.code_utils.code_utils import get_run_tmp_file from codeflash.code_utils.code_utils import get_run_tmp_file
@ -15,8 +14,9 @@ from codeflash.code_utils.instrument_existing_tests import (
) )
from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.discovery.functions_to_optimize import FunctionToOptimize
from codeflash.models.models import CodePosition, FunctionParent, TestFile, TestFiles, TestingMode, TestsInFile from codeflash.models.models import CodePosition, FunctionParent, TestFile, TestFiles, TestingMode, TestsInFile
from codeflash.optimization.optimizer import Optimizer from codeflash.optimization.function_optimizer import FunctionOptimizer
from codeflash.verification.test_results import TestType from codeflash.verification.test_results import TestType
from codeflash.verification.verification_utils import TestConfig
codeflash_wrap_string = """def codeflash_wrap(wrapped, test_module_name, test_class_name, test_name, function_name, line_id, loop_index, codeflash_cur, codeflash_con, *args, **kwargs): codeflash_wrap_string = """def codeflash_wrap(wrapped, test_module_name, test_class_name, test_name, function_name, line_id, loop_index, codeflash_cur, codeflash_con, *args, **kwargs):
test_id = f'{{test_module_name}}:{{test_class_name}}:{{test_name}}:{{line_id}}:{{loop_index}}' test_id = f'{{test_module_name}}:{{test_class_name}}:{{test_name}}:{{line_id}}:{{loop_index}}'
@ -160,7 +160,7 @@ class TestPigLatin(unittest.TestCase):
with tempfile.NamedTemporaryFile(mode="w") as f: with tempfile.NamedTemporaryFile(mode="w") as f:
f.write(code) f.write(code)
f.flush() f.flush()
func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path(f.name))
original_cwd = Path.cwd() original_cwd = Path.cwd()
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
os.chdir(run_cwd) os.chdir(run_cwd)
@ -351,12 +351,12 @@ def test_sort():
try: try:
with test_path.open("w") as f: with test_path.open("w") as f:
f.write(code) f.write(code)
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/bubble_sort.py").resolve()
tests_root = Path(__file__).parent.resolve() / "../code_to_optimize/tests/pytest/" tests_root = Path(__file__).parent.resolve() / "../code_to_optimize/tests/pytest/"
project_root_path = (Path(__file__).parent / "..").resolve() project_root_path = (Path(__file__).parent / "..").resolve()
original_cwd = Path.cwd() original_cwd = Path.cwd()
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_path)
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test = inject_profiling_into_existing_test( success, new_test = inject_profiling_into_existing_test(
test_path, test_path,
@ -394,18 +394,14 @@ def test_sort():
# Overwrite old test with new instrumented test # Overwrite old test with new instrumented test
opt = Optimizer( test_config = TestConfig(
Namespace( tests_root=tests_root,
project_root=project_root_path, tests_project_rootdir=project_root_path,
disable_telemetry=True, project_root_path=project_root_path,
tests_root=tests_root, test_framework="pytest",
test_framework="pytest", pytest_cmd="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_env = os.environ.copy() test_env = os.environ.copy()
test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_TEST_ITERATION"] = "0"
test_env["CODEFLASH_LOOP_INDEX"] = "1" test_env["CODEFLASH_LOOP_INDEX"] = "1"
@ -420,7 +416,7 @@ def test_sort():
) )
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -455,7 +451,7 @@ def test_sort():
with test_path_perf.open("w") as f: with test_path_perf.open("w") as f:
f.write(new_perf_test) f.write(new_perf_test)
test_results_perf, _ = opt.run_and_parse_tests( test_results_perf, _ = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -556,6 +552,7 @@ def test_sort_parametrized(input, expected_output):
assert output == expected_output assert output == expected_output
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/bubble_sort.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/pytest/test_perfinjector_bubble_sort_parametrized_results_temp.py" / "../code_to_optimize/tests/pytest/test_perfinjector_bubble_sort_parametrized_results_temp.py"
@ -573,7 +570,7 @@ def test_sort_parametrized(input, expected_output):
original_cwd = Path.cwd() original_cwd = Path.cwd()
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_path)
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test = inject_profiling_into_existing_test( success, new_test = inject_profiling_into_existing_test(
test_path, [CodePosition(14, 13)], func, project_root_path, "pytest", mode=TestingMode.BEHAVIOR test_path, [CodePosition(14, 13)], func, project_root_path, "pytest", mode=TestingMode.BEHAVIOR
@ -614,19 +611,15 @@ def test_sort_parametrized(input, expected_output):
) )
] ]
) )
test_config = TestConfig(
opt = Optimizer( tests_root=tests_root,
Namespace( tests_project_rootdir=project_root_path,
project_root=project_root_path, project_root_path=project_root_path,
disable_telemetry=True, test_framework="pytest",
tests_root=tests_root, pytest_cmd="pytest",
test_framework="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
test_results, coverage_data = opt.run_and_parse_tests( func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -668,7 +661,7 @@ def test_sort_parametrized(input, expected_output):
assert test_results[2].runtime > 0 assert test_results[2].runtime > 0
assert test_results[2].did_pass assert test_results[2].did_pass
test_results_perf, coverage_data = opt.run_and_parse_tests( test_results_perf, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -783,7 +776,7 @@ def test_sort_parametrized_loop(input, expected_output):
assert output == expected_output assert output == expected_output
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/bubble_sort.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/pytest/test_perfinjector_bubble_sort_parametrized_loop_results_temp.py" / "../code_to_optimize/tests/pytest/test_perfinjector_bubble_sort_parametrized_loop_results_temp.py"
@ -805,7 +798,7 @@ def test_sort_parametrized_loop(input, expected_output):
original_cwd = Path.cwd() original_cwd = Path.cwd()
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_path)
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test = inject_profiling_into_existing_test( success, new_test = inject_profiling_into_existing_test(
test_path, [CodePosition(15, 17)], func, project_root_path, "pytest", mode=TestingMode.BEHAVIOR test_path, [CodePosition(15, 17)], func, project_root_path, "pytest", mode=TestingMode.BEHAVIOR
@ -857,18 +850,16 @@ def test_sort_parametrized_loop(input, expected_output):
) )
] ]
) )
opt = Optimizer(
Namespace( test_config = TestConfig(
project_root=project_root_path, tests_root=tests_root,
disable_telemetry=True, tests_project_rootdir=project_root_path,
tests_root=tests_root, project_root_path=project_root_path,
test_framework="pytest", test_framework="pytest",
pytest_cmd="pytest", pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
test_results, coverage_data = opt.run_and_parse_tests( func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -944,7 +935,7 @@ def test_sort_parametrized_loop(input, expected_output):
assert test_results[5].runtime > 0 assert test_results[5].runtime > 0
assert test_results[5].did_pass assert test_results[5].did_pass
test_results, _ = opt.run_and_parse_tests( test_results, _ = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -1097,7 +1088,7 @@ def test_sort():
assert output == expected_output assert output == expected_output
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/bubble_sort.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/pytest/test_perfinjector_bubble_sort_loop_results_temp.py" / "../code_to_optimize/tests/pytest/test_perfinjector_bubble_sort_loop_results_temp.py"
@ -1119,7 +1110,7 @@ def test_sort():
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
original_cwd = Path.cwd() original_cwd = Path.cwd()
func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_path)
os.chdir(str(run_cwd)) os.chdir(str(run_cwd))
success, new_test_behavior = inject_profiling_into_existing_test( success, new_test_behavior = inject_profiling_into_existing_test(
test_path, [CodePosition(11, 17)], func, project_root_path, "pytest", mode=TestingMode.BEHAVIOR test_path, [CodePosition(11, 17)], func, project_root_path, "pytest", mode=TestingMode.BEHAVIOR
@ -1169,18 +1160,15 @@ def test_sort():
] ]
) )
opt = Optimizer( test_config = TestConfig(
Namespace( tests_root=tests_root,
project_root=project_root_path, tests_project_rootdir=project_root_path,
disable_telemetry=True, project_root_path=project_root_path,
tests_root=tests_root, test_framework="pytest",
test_framework="pytest", pytest_cmd="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
test_results, coverage_data = opt.run_and_parse_tests( func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -1222,7 +1210,7 @@ def test_sort():
) )
assert test_results[2].runtime > 0 assert test_results[2].runtime > 0
assert test_results[2].did_pass assert test_results[2].did_pass
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -1358,7 +1346,7 @@ class TestPigLatin(unittest.TestCase):
self.assertEqual(output, list(range(50))) self.assertEqual(output, list(range(50)))
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/bubble_sort.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/unittest/test_perfinjector_bubble_sort_unittest_results_temp.py" / "../code_to_optimize/tests/unittest/test_perfinjector_bubble_sort_unittest_results_temp.py"
@ -1380,7 +1368,7 @@ class TestPigLatin(unittest.TestCase):
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
original_cwd = Path.cwd() original_cwd = Path.cwd()
func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_path)
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test_behavior = inject_profiling_into_existing_test( success, new_test_behavior = inject_profiling_into_existing_test(
test_path, test_path,
@ -1440,18 +1428,15 @@ class TestPigLatin(unittest.TestCase):
) )
] ]
) )
opt = Optimizer( test_config = TestConfig(
Namespace( tests_root=tests_root,
project_root=project_root_path, tests_project_rootdir=project_root_path,
disable_telemetry=True, project_root_path=project_root_path,
tests_root=tests_root, test_framework="unittest",
test_framework="unittest", pytest_cmd="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
test_results, coverage_data = opt.run_and_parse_tests( func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -1493,7 +1478,7 @@ class TestPigLatin(unittest.TestCase):
) )
assert test_results[2].runtime > 0 assert test_results[2].runtime > 0
assert test_results[2].did_pass assert test_results[2].did_pass
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -1617,7 +1602,7 @@ class TestPigLatin(unittest.TestCase):
self.assertEqual(output, expected_output) self.assertEqual(output, expected_output)
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/bubble_sort.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/unittest/test_perfinjector_bubble_sort_unittest_parametrized_results_temp.py" / "../code_to_optimize/tests/unittest/test_perfinjector_bubble_sort_unittest_parametrized_results_temp.py"
@ -1638,7 +1623,7 @@ class TestPigLatin(unittest.TestCase):
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
original_cwd = Path.cwd() original_cwd = Path.cwd()
func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_path)
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test_behavior = inject_profiling_into_existing_test( success, new_test_behavior = inject_profiling_into_existing_test(
test_path, [CodePosition(16, 17)], func, project_root_path, "unittest", mode=TestingMode.BEHAVIOR test_path, [CodePosition(16, 17)], func, project_root_path, "unittest", mode=TestingMode.BEHAVIOR
@ -1690,18 +1675,15 @@ class TestPigLatin(unittest.TestCase):
) )
] ]
) )
opt = Optimizer( test_config = TestConfig(
Namespace( tests_root=tests_root,
project_root=project_root_path, tests_project_rootdir=project_root_path,
disable_telemetry=True, project_root_path=project_root_path,
tests_root=tests_root, test_framework="unittest",
test_framework="unittest", pytest_cmd="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
test_results, coverage_data = opt.run_and_parse_tests( func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -1744,7 +1726,7 @@ class TestPigLatin(unittest.TestCase):
assert test_results[2].runtime > 0 assert test_results[2].runtime > 0
assert test_results[2].did_pass assert test_results[2].did_pass
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -1874,7 +1856,7 @@ class TestPigLatin(unittest.TestCase):
self.assertEqual(output, expected_output) self.assertEqual(output, expected_output)
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/bubble_sort.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/unittest/test_perfinjector_bubble_sort_unittest_loop_results_temp.py" / "../code_to_optimize/tests/unittest/test_perfinjector_bubble_sort_unittest_loop_results_temp.py"
@ -1896,7 +1878,7 @@ class TestPigLatin(unittest.TestCase):
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
original_cwd = Path.cwd() original_cwd = Path.cwd()
func = FunctionToOptimize(function_name="sorter", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_path)
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test_behavior = inject_profiling_into_existing_test( success, new_test_behavior = inject_profiling_into_existing_test(
test_path, [CodePosition(14, 21)], func, project_root_path, "unittest", mode=TestingMode.BEHAVIOR test_path, [CodePosition(14, 21)], func, project_root_path, "unittest", mode=TestingMode.BEHAVIOR
@ -1944,19 +1926,15 @@ class TestPigLatin(unittest.TestCase):
) )
] ]
) )
test_config = TestConfig(
opt = Optimizer( tests_root=tests_root,
Namespace( tests_project_rootdir=project_root_path,
project_root=project_root_path, project_root_path=project_root_path,
disable_telemetry=True, test_framework="unittest",
tests_root=tests_root, pytest_cmd="pytest",
test_framework="unittest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
test_results, coverage_data = opt.run_and_parse_tests( func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_results, coverage_data = func_optimizer.run_and_parse_tests(
test_env=test_env, test_env=test_env,
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_files=test_files, test_files=test_files,
@ -1999,7 +1977,7 @@ class TestPigLatin(unittest.TestCase):
assert test_results[2].runtime > 0 assert test_results[2].runtime > 0
assert test_results[2].did_pass assert test_results[2].did_pass
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
test_env=test_env, test_env=test_env,
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_files=test_files, test_files=test_files,
@ -2127,7 +2105,7 @@ class TestPigLatin(unittest.TestCase):
self.assertEqual(output, expected_output) self.assertEqual(output, expected_output)
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/bubble_sort.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/unittest/test_perfinjector_bubble_sort_unittest_parametrized_loop_results_temp.py" / "../code_to_optimize/tests/unittest/test_perfinjector_bubble_sort_unittest_parametrized_loop_results_temp.py"
@ -2148,7 +2126,7 @@ class TestPigLatin(unittest.TestCase):
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
original_cwd = Path.cwd() original_cwd = Path.cwd()
f = FunctionToOptimize(function_name="sorter", file_path=Path("module.py"), parents=[]) f = FunctionToOptimize(function_name="sorter", file_path=code_path, parents=[])
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test_behavior = inject_profiling_into_existing_test( success, new_test_behavior = inject_profiling_into_existing_test(
test_path, [CodePosition(17, 21)], f, project_root_path, "unittest", mode=TestingMode.BEHAVIOR test_path, [CodePosition(17, 21)], f, project_root_path, "unittest", mode=TestingMode.BEHAVIOR
@ -2197,18 +2175,15 @@ class TestPigLatin(unittest.TestCase):
) )
] ]
) )
opt = Optimizer( test_config = TestConfig(
Namespace( tests_root=tests_root,
project_root=project_root_path, tests_project_rootdir=project_root_path,
disable_telemetry=True, project_root_path=project_root_path,
tests_root=tests_root, test_framework="unittest",
test_framework="unittest", pytest_cmd="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
test_results, coverage_data = opt.run_and_parse_tests( func_optimizer = FunctionOptimizer(function_to_optimize=f, test_cfg=test_config)
test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -2283,7 +2258,7 @@ class TestPigLatin(unittest.TestCase):
) )
assert test_results[5].runtime > 0 assert test_results[5].runtime > 0
assert test_results[5].did_pass assert test_results[5].did_pass
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -2776,7 +2751,7 @@ def test_code_replacement10() -> None:
func = FunctionToOptimize( func = FunctionToOptimize(
function_name="get_code_optimization_context", function_name="get_code_optimization_context",
parents=[FunctionParent("Optimizer", "ClassDef")], parents=[FunctionParent("Optimizer", "ClassDef")],
file_path=Path("module.py"), file_path=Path(f.name),
) )
original_cwd = Path.cwd() original_cwd = Path.cwd()
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
@ -2824,7 +2799,7 @@ def test_sleepfunc_sequence_short(n, expected_total_sleep_time):
assert output == expected_total_sleep_time assert output == expected_total_sleep_time
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/sleeptime.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/pytest/test_time_correction_instrumentation_temp.py" / "../code_to_optimize/tests/pytest/test_time_correction_instrumentation_temp.py"
@ -2837,7 +2812,7 @@ def test_sleepfunc_sequence_short(n, expected_total_sleep_time):
project_root_path = (Path(__file__).parent.resolve() / "../").resolve() project_root_path = (Path(__file__).parent.resolve() / "../").resolve()
original_cwd = Path.cwd() original_cwd = Path.cwd()
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
func = FunctionToOptimize(function_name="accurate_sleepfunc", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="accurate_sleepfunc", parents=[], file_path=code_path)
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test = inject_profiling_into_existing_test( success, new_test = inject_profiling_into_existing_test(
test_path, [CodePosition(8, 13)], func, project_root_path, "pytest", mode=TestingMode.PERFORMANCE test_path, [CodePosition(8, 13)], func, project_root_path, "pytest", mode=TestingMode.PERFORMANCE
@ -2858,17 +2833,14 @@ def test_sleepfunc_sequence_short(n, expected_total_sleep_time):
with test_path.open("w") as f: with test_path.open("w") as f:
f.write(new_test) f.write(new_test)
opt = Optimizer( test_config = TestConfig(
Namespace( tests_root=tests_root,
project_root=project_root_path, tests_project_rootdir=project_root_path,
disable_telemetry=True, project_root_path=project_root_path,
tests_root=tests_root, test_framework="pytest",
test_framework="pytest", pytest_cmd="pytest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
) )
func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_files = TestFiles( test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
@ -2879,7 +2851,7 @@ def test_sleepfunc_sequence_short(n, expected_total_sleep_time):
) )
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -2946,7 +2918,7 @@ class TestPigLatin(unittest.TestCase):
output = codeflash_wrap(accurate_sleepfunc, '{module_path}', 'TestPigLatin', 'test_sleepfunc_sequence_short', 'accurate_sleepfunc', '0', codeflash_loop_index, n) output = codeflash_wrap(accurate_sleepfunc, '{module_path}', 'TestPigLatin', 'test_sleepfunc_sequence_short', 'accurate_sleepfunc', '0', codeflash_loop_index, n)
""" """
) )
code_path = (Path(__file__).parent.resolve() / "../code_to_optimize/sleeptime.py").resolve()
test_path = ( test_path = (
Path(__file__).parent.resolve() Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/unittest/test_time_correction_instrumentation_unittest_temp.py" / "../code_to_optimize/tests/unittest/test_time_correction_instrumentation_unittest_temp.py"
@ -2959,7 +2931,7 @@ class TestPigLatin(unittest.TestCase):
project_root_path = (Path(__file__).parent.resolve() / "../").resolve() project_root_path = (Path(__file__).parent.resolve() / "../").resolve()
original_cwd = Path.cwd() original_cwd = Path.cwd()
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
func = FunctionToOptimize(function_name="accurate_sleepfunc", parents=[], file_path=Path("module.py")) func = FunctionToOptimize(function_name="accurate_sleepfunc", parents=[], file_path=code_path)
os.chdir(run_cwd) os.chdir(run_cwd)
success, new_test = inject_profiling_into_existing_test( success, new_test = inject_profiling_into_existing_test(
test_path, [CodePosition(12, 17)], func, project_root_path, "unittest", mode=TestingMode.PERFORMANCE test_path, [CodePosition(12, 17)], func, project_root_path, "unittest", mode=TestingMode.PERFORMANCE
@ -2980,17 +2952,6 @@ class TestPigLatin(unittest.TestCase):
with test_path.open("w") as f: with test_path.open("w") as f:
f.write(new_test) f.write(new_test)
opt = Optimizer(
Namespace(
project_root=project_root_path,
disable_telemetry=True,
tests_root=tests_root,
test_framework="unittest",
pytest_cmd="pytest",
experiment_id=None,
test_project_root=project_root_path,
)
)
test_files = TestFiles( test_files = TestFiles(
test_files=[ test_files=[
TestFile( TestFile(
@ -3009,7 +2970,15 @@ class TestPigLatin(unittest.TestCase):
) )
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( test_config = TestConfig(
tests_root=tests_root,
tests_project_rootdir=project_root_path,
project_root_path=project_root_path,
test_framework="unittest",
pytest_cmd="pytest",
)
func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config)
test_results, coverage_data = func_optimizer.run_and_parse_tests(
testing_type=TestingMode.PERFORMANCE, testing_type=TestingMode.PERFORMANCE,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,

View file

@ -118,9 +118,14 @@ def test_single_element_list():
) )
# Init paths # Init paths
tests_root = (Path(__file__).parent.resolve() / "../code_to_optimize/tests/pytest/").resolve() test_path = (
test_path = tests_root / "test_aiservice_behavior_results_temp.py" Path(__file__).parent.resolve() / "../code_to_optimize/tests/pytest/test_aiservice_behavior_results_temp.py"
test_path_perf = tests_root / "test_aiservice_behavior_results_perf_temp.py" ).resolve()
test_path_perf = (
Path(__file__).parent.resolve()
/ "../code_to_optimize/tests/pytest/test_aiservice_behavior_results_perf_temp.py"
).resolve()
tests_root = Path(__file__).parent.resolve() / "../code_to_optimize/tests/pytest/"
project_root_path = (Path(__file__).parent / "..").resolve() project_root_path = (Path(__file__).parent / "..").resolve()
run_cwd = Path(__file__).parent.parent.resolve() run_cwd = Path(__file__).parent.parent.resolve()
os.chdir(run_cwd) os.chdir(run_cwd)
@ -161,7 +166,9 @@ def test_single_element_list():
] ]
) )
a = BubbleSorter() a = BubbleSorter()
test_results, coverage_data = opt.run_and_parse_tests( function_to_optimize = FunctionToOptimize("sorter", fto_path, [FunctionParent("BubbleSorter", "ClassDef")])
func_opt = opt.create_function_optimizer(function_to_optimize)
test_results, coverage_data = func_opt.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -194,7 +201,8 @@ class BubbleSorter:
return arr return arr
""" """
fto_path.write_text(optimized_code_mutated_attr, "utf-8") fto_path.write_text(optimized_code_mutated_attr, "utf-8")
test_results_mutated_attr, coverage_data = opt.run_and_parse_tests( func_opt = opt.create_function_optimizer(function_to_optimize)
test_results_mutated_attr, coverage_data = func_opt.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -293,7 +301,8 @@ def test_single_element_list():
) )
] ]
) )
test_results, coverage_data = opt.run_and_parse_tests( func_opt = opt.create_function_optimizer(function_to_optimize)
test_results, coverage_data = func_opt.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -357,7 +366,8 @@ class BubbleSorter:
test_project_root=project_root_path, test_project_root=project_root_path,
) )
) )
test_results_mutated_attr, coverage_data = opt.run_and_parse_tests( func_opt = opt.create_function_optimizer(function_to_optimize)
test_results_mutated_attr, coverage_data = func_opt.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,
@ -403,7 +413,8 @@ class BubbleSorter:
test_project_root=project_root_path, test_project_root=project_root_path,
) )
) )
test_results_new_attr, coverage_data = opt.run_and_parse_tests( func_opt = opt.create_function_optimizer(function_to_optimize)
test_results_new_attr, coverage_data = func_opt.run_and_parse_tests(
testing_type=TestingMode.BEHAVIOR, testing_type=TestingMode.BEHAVIOR,
test_env=test_env, test_env=test_env,
test_files=test_files, test_files=test_files,