mirror of
https://github.com/codeflash-ai/codeflash.git
synced 2026-05-04 18:25:17 +00:00
WIP JS support
This commit is contained in:
parent
4e1b5cf57a
commit
74c4035595
27 changed files with 5500 additions and 19 deletions
50
code_to_optimize_js/bubble_sort.js
Normal file
50
code_to_optimize_js/bubble_sort.js
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
/**
|
||||
* Bubble sort implementation - intentionally inefficient for optimization testing.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Sort an array using bubble sort algorithm.
|
||||
* @param {number[]} arr - The array to sort
|
||||
* @returns {number[]} - The sorted array
|
||||
*/
|
||||
function bubbleSort(arr) {
|
||||
const n = arr.length;
|
||||
const result = [...arr]; // Create a copy to avoid mutation
|
||||
|
||||
for (let i = 0; i < n - 1; i++) {
|
||||
for (let j = 0; j < n - i - 1; j++) {
|
||||
if (result[j] > result[j + 1]) {
|
||||
// Swap elements
|
||||
const temp = result[j];
|
||||
result[j] = result[j + 1];
|
||||
result[j + 1] = temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort an array in descending order.
|
||||
* @param {number[]} arr - The array to sort
|
||||
* @returns {number[]} - The sorted array in descending order
|
||||
*/
|
||||
function bubbleSortDescending(arr) {
|
||||
const n = arr.length;
|
||||
const result = [...arr];
|
||||
|
||||
for (let i = 0; i < n - 1; i++) {
|
||||
for (let j = 0; j < n - i - 1; j++) {
|
||||
if (result[j] < result[j + 1]) {
|
||||
const temp = result[j];
|
||||
result[j] = result[j + 1];
|
||||
result[j + 1] = temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = { bubbleSort, bubbleSortDescending };
|
||||
54
code_to_optimize_js/fibonacci.js
Normal file
54
code_to_optimize_js/fibonacci.js
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
/**
|
||||
* Fibonacci implementations - intentionally inefficient for optimization testing.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Calculate the nth Fibonacci number using naive recursion.
|
||||
* This is intentionally slow to demonstrate optimization potential.
|
||||
* @param {number} n - The index of the Fibonacci number to calculate
|
||||
* @returns {number} - The nth Fibonacci number
|
||||
*/
|
||||
function fibonacci(n) {
|
||||
if (n <= 1) {
|
||||
return n;
|
||||
}
|
||||
return fibonacci(n - 1) + fibonacci(n - 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a number is a Fibonacci number.
|
||||
* @param {number} num - The number to check
|
||||
* @returns {boolean} - True if num is a Fibonacci number
|
||||
*/
|
||||
function isFibonacci(num) {
|
||||
// A number is Fibonacci if one of (5*n*n + 4) or (5*n*n - 4) is a perfect square
|
||||
const check1 = 5 * num * num + 4;
|
||||
const check2 = 5 * num * num - 4;
|
||||
|
||||
return isPerfectSquare(check1) || isPerfectSquare(check2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a number is a perfect square.
|
||||
* @param {number} n - The number to check
|
||||
* @returns {boolean} - True if n is a perfect square
|
||||
*/
|
||||
function isPerfectSquare(n) {
|
||||
const sqrt = Math.sqrt(n);
|
||||
return sqrt === Math.floor(sqrt);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate an array of Fibonacci numbers up to n.
|
||||
* @param {number} n - The number of Fibonacci numbers to generate
|
||||
* @returns {number[]} - Array of Fibonacci numbers
|
||||
*/
|
||||
function fibonacciSequence(n) {
|
||||
const result = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
result.push(fibonacci(i));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = { fibonacci, isFibonacci, isPerfectSquare, fibonacciSequence };
|
||||
29
code_to_optimize_js/package.json
Normal file
29
code_to_optimize_js/package.json
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"name": "codeflash-js-test",
|
||||
"version": "1.0.0",
|
||||
"description": "Sample JavaScript project for codeflash optimization testing",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "jest",
|
||||
"test:coverage": "jest --coverage"
|
||||
},
|
||||
"keywords": ["codeflash", "optimization", "testing"],
|
||||
"author": "CodeFlash Inc.",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"jest": "^29.7.0",
|
||||
"jest-junit": "^16.0.0"
|
||||
},
|
||||
"jest": {
|
||||
"testEnvironment": "node",
|
||||
"testMatch": ["**/tests/**/*.test.js"],
|
||||
"collectCoverageFrom": ["*.js", "!jest.config.js"],
|
||||
"reporters": [
|
||||
"default",
|
||||
["jest-junit", {
|
||||
"outputDirectory": ".codeflash",
|
||||
"outputName": "jest-results.xml"
|
||||
}]
|
||||
]
|
||||
}
|
||||
}
|
||||
88
code_to_optimize_js/string_utils.js
Normal file
88
code_to_optimize_js/string_utils.js
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
/**
|
||||
* String utility functions - some intentionally inefficient for optimization testing.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Reverse a string character by character.
|
||||
* @param {string} str - The string to reverse
|
||||
* @returns {string} - The reversed string
|
||||
*/
|
||||
function reverseString(str) {
|
||||
let result = '';
|
||||
for (let i = str.length - 1; i >= 0; i--) {
|
||||
result += str[i]; // Inefficient string concatenation
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a string is a palindrome.
|
||||
* @param {string} str - The string to check
|
||||
* @returns {boolean} - True if str is a palindrome
|
||||
*/
|
||||
function isPalindrome(str) {
|
||||
const cleaned = str.toLowerCase().replace(/[^a-z0-9]/g, '');
|
||||
return cleaned === reverseString(cleaned);
|
||||
}
|
||||
|
||||
/**
|
||||
* Count occurrences of a substring in a string.
|
||||
* @param {string} str - The string to search in
|
||||
* @param {string} sub - The substring to count
|
||||
* @returns {number} - Number of occurrences
|
||||
*/
|
||||
function countOccurrences(str, sub) {
|
||||
let count = 0;
|
||||
let pos = 0;
|
||||
|
||||
while (true) {
|
||||
pos = str.indexOf(sub, pos);
|
||||
if (pos === -1) break;
|
||||
count++;
|
||||
pos += 1; // Move past current match
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the longest common prefix of an array of strings.
|
||||
* @param {string[]} strs - Array of strings
|
||||
* @returns {string} - The longest common prefix
|
||||
*/
|
||||
function longestCommonPrefix(strs) {
|
||||
if (strs.length === 0) return '';
|
||||
if (strs.length === 1) return strs[0];
|
||||
|
||||
let prefix = strs[0];
|
||||
|
||||
for (let i = 1; i < strs.length; i++) {
|
||||
while (strs[i].indexOf(prefix) !== 0) {
|
||||
prefix = prefix.slice(0, -1);
|
||||
if (prefix === '') return '';
|
||||
}
|
||||
}
|
||||
|
||||
return prefix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a string to title case.
|
||||
* @param {string} str - The string to convert
|
||||
* @returns {string} - The title-cased string
|
||||
*/
|
||||
function toTitleCase(str) {
|
||||
return str
|
||||
.toLowerCase()
|
||||
.split(' ')
|
||||
.map(word => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
reverseString,
|
||||
isPalindrome,
|
||||
countOccurrences,
|
||||
longestCommonPrefix,
|
||||
toTitleCase
|
||||
};
|
||||
47
code_to_optimize_js/tests/bubble_sort.test.js
Normal file
47
code_to_optimize_js/tests/bubble_sort.test.js
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
const { bubbleSort, bubbleSortDescending } = require('../bubble_sort');
|
||||
|
||||
describe('bubbleSort', () => {
|
||||
test('sorts an empty array', () => {
|
||||
expect(bubbleSort([])).toEqual([]);
|
||||
});
|
||||
|
||||
test('sorts a single element array', () => {
|
||||
expect(bubbleSort([1])).toEqual([1]);
|
||||
});
|
||||
|
||||
test('sorts an already sorted array', () => {
|
||||
expect(bubbleSort([1, 2, 3, 4, 5])).toEqual([1, 2, 3, 4, 5]);
|
||||
});
|
||||
|
||||
test('sorts a reverse sorted array', () => {
|
||||
expect(bubbleSort([5, 4, 3, 2, 1])).toEqual([1, 2, 3, 4, 5]);
|
||||
});
|
||||
|
||||
test('sorts an array with duplicates', () => {
|
||||
expect(bubbleSort([3, 1, 4, 1, 5, 9, 2, 6])).toEqual([1, 1, 2, 3, 4, 5, 6, 9]);
|
||||
});
|
||||
|
||||
test('sorts negative numbers', () => {
|
||||
expect(bubbleSort([-3, -1, -4, -1, -5])).toEqual([-5, -4, -3, -1, -1]);
|
||||
});
|
||||
|
||||
test('does not mutate original array', () => {
|
||||
const original = [3, 1, 2];
|
||||
bubbleSort(original);
|
||||
expect(original).toEqual([3, 1, 2]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('bubbleSortDescending', () => {
|
||||
test('sorts in descending order', () => {
|
||||
expect(bubbleSortDescending([1, 3, 2, 5, 4])).toEqual([5, 4, 3, 2, 1]);
|
||||
});
|
||||
|
||||
test('handles empty array', () => {
|
||||
expect(bubbleSortDescending([])).toEqual([]);
|
||||
});
|
||||
|
||||
test('handles single element', () => {
|
||||
expect(bubbleSortDescending([42])).toEqual([42]);
|
||||
});
|
||||
});
|
||||
97
code_to_optimize_js/tests/fibonacci.test.js
Normal file
97
code_to_optimize_js/tests/fibonacci.test.js
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
const { fibonacci, isFibonacci, isPerfectSquare, fibonacciSequence } = require('../fibonacci');
|
||||
|
||||
describe('fibonacci', () => {
|
||||
test('returns 0 for n=0', () => {
|
||||
expect(fibonacci(0)).toBe(0);
|
||||
});
|
||||
|
||||
test('returns 1 for n=1', () => {
|
||||
expect(fibonacci(1)).toBe(1);
|
||||
});
|
||||
|
||||
test('returns 1 for n=2', () => {
|
||||
expect(fibonacci(2)).toBe(1);
|
||||
});
|
||||
|
||||
test('returns 5 for n=5', () => {
|
||||
expect(fibonacci(5)).toBe(5);
|
||||
});
|
||||
|
||||
test('returns 55 for n=10', () => {
|
||||
expect(fibonacci(10)).toBe(55);
|
||||
});
|
||||
|
||||
test('returns 233 for n=13', () => {
|
||||
expect(fibonacci(13)).toBe(233);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isFibonacci', () => {
|
||||
test('returns true for 0', () => {
|
||||
expect(isFibonacci(0)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for 1', () => {
|
||||
expect(isFibonacci(1)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for 8', () => {
|
||||
expect(isFibonacci(8)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for 13', () => {
|
||||
expect(isFibonacci(13)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false for 4', () => {
|
||||
expect(isFibonacci(4)).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false for 6', () => {
|
||||
expect(isFibonacci(6)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPerfectSquare', () => {
|
||||
test('returns true for 0', () => {
|
||||
expect(isPerfectSquare(0)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for 1', () => {
|
||||
expect(isPerfectSquare(1)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for 4', () => {
|
||||
expect(isPerfectSquare(4)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for 16', () => {
|
||||
expect(isPerfectSquare(16)).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false for 2', () => {
|
||||
expect(isPerfectSquare(2)).toBe(false);
|
||||
});
|
||||
|
||||
test('returns false for 3', () => {
|
||||
expect(isPerfectSquare(3)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fibonacciSequence', () => {
|
||||
test('returns empty array for n=0', () => {
|
||||
expect(fibonacciSequence(0)).toEqual([]);
|
||||
});
|
||||
|
||||
test('returns [0] for n=1', () => {
|
||||
expect(fibonacciSequence(1)).toEqual([0]);
|
||||
});
|
||||
|
||||
test('returns first 5 Fibonacci numbers', () => {
|
||||
expect(fibonacciSequence(5)).toEqual([0, 1, 1, 2, 3]);
|
||||
});
|
||||
|
||||
test('returns first 10 Fibonacci numbers', () => {
|
||||
expect(fibonacciSequence(10)).toEqual([0, 1, 1, 2, 3, 5, 8, 13, 21, 34]);
|
||||
});
|
||||
});
|
||||
121
code_to_optimize_js/tests/string_utils.test.js
Normal file
121
code_to_optimize_js/tests/string_utils.test.js
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
const {
|
||||
reverseString,
|
||||
isPalindrome,
|
||||
countOccurrences,
|
||||
longestCommonPrefix,
|
||||
toTitleCase
|
||||
} = require('../string_utils');
|
||||
|
||||
describe('reverseString', () => {
|
||||
test('reverses a simple string', () => {
|
||||
expect(reverseString('hello')).toBe('olleh');
|
||||
});
|
||||
|
||||
test('returns empty string for empty input', () => {
|
||||
expect(reverseString('')).toBe('');
|
||||
});
|
||||
|
||||
test('handles single character', () => {
|
||||
expect(reverseString('a')).toBe('a');
|
||||
});
|
||||
|
||||
test('handles palindrome', () => {
|
||||
expect(reverseString('radar')).toBe('radar');
|
||||
});
|
||||
|
||||
test('handles spaces', () => {
|
||||
expect(reverseString('hello world')).toBe('dlrow olleh');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPalindrome', () => {
|
||||
test('returns true for simple palindrome', () => {
|
||||
expect(isPalindrome('radar')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for palindrome with mixed case', () => {
|
||||
expect(isPalindrome('RaceCar')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for palindrome with spaces and punctuation', () => {
|
||||
expect(isPalindrome('A man, a plan, a canal: Panama')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false for non-palindrome', () => {
|
||||
expect(isPalindrome('hello')).toBe(false);
|
||||
});
|
||||
|
||||
test('returns true for empty string', () => {
|
||||
expect(isPalindrome('')).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true for single character', () => {
|
||||
expect(isPalindrome('a')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('countOccurrences', () => {
|
||||
test('counts single occurrence', () => {
|
||||
expect(countOccurrences('hello', 'ell')).toBe(1);
|
||||
});
|
||||
|
||||
test('counts multiple occurrences', () => {
|
||||
expect(countOccurrences('abababab', 'ab')).toBe(4);
|
||||
});
|
||||
|
||||
test('returns 0 for no occurrences', () => {
|
||||
expect(countOccurrences('hello', 'xyz')).toBe(0);
|
||||
});
|
||||
|
||||
test('handles overlapping matches', () => {
|
||||
expect(countOccurrences('aaa', 'aa')).toBe(2);
|
||||
});
|
||||
|
||||
test('handles empty substring', () => {
|
||||
expect(countOccurrences('hello', '')).toBe(6);
|
||||
});
|
||||
});
|
||||
|
||||
describe('longestCommonPrefix', () => {
|
||||
test('finds common prefix', () => {
|
||||
expect(longestCommonPrefix(['flower', 'flow', 'flight'])).toBe('fl');
|
||||
});
|
||||
|
||||
test('returns empty for no common prefix', () => {
|
||||
expect(longestCommonPrefix(['dog', 'racecar', 'car'])).toBe('');
|
||||
});
|
||||
|
||||
test('returns empty for empty array', () => {
|
||||
expect(longestCommonPrefix([])).toBe('');
|
||||
});
|
||||
|
||||
test('returns the string for single element array', () => {
|
||||
expect(longestCommonPrefix(['hello'])).toBe('hello');
|
||||
});
|
||||
|
||||
test('handles identical strings', () => {
|
||||
expect(longestCommonPrefix(['test', 'test', 'test'])).toBe('test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('toTitleCase', () => {
|
||||
test('converts simple string', () => {
|
||||
expect(toTitleCase('hello world')).toBe('Hello World');
|
||||
});
|
||||
|
||||
test('handles already title case', () => {
|
||||
expect(toTitleCase('Hello World')).toBe('Hello World');
|
||||
});
|
||||
|
||||
test('handles uppercase input', () => {
|
||||
expect(toTitleCase('HELLO WORLD')).toBe('Hello World');
|
||||
});
|
||||
|
||||
test('handles single word', () => {
|
||||
expect(toTitleCase('hello')).toBe('Hello');
|
||||
});
|
||||
|
||||
test('handles empty string', () => {
|
||||
expect(toTitleCase('')).toBe('');
|
||||
});
|
||||
});
|
||||
|
|
@ -441,7 +441,18 @@ def replace_function_definitions_in_module(
|
|||
preexisting_objects: set[tuple[str, tuple[FunctionParent, ...]]],
|
||||
project_root_path: Path,
|
||||
should_add_global_assignments: bool = True, # noqa: FBT001, FBT002
|
||||
function_to_optimize: Optional["FunctionToOptimize"] = None,
|
||||
) -> bool:
|
||||
# Route to language-specific implementation for non-Python languages
|
||||
if optimized_code.language and optimized_code.language != "python":
|
||||
return replace_function_definitions_for_language(
|
||||
function_names,
|
||||
optimized_code,
|
||||
module_abspath,
|
||||
project_root_path,
|
||||
function_to_optimize,
|
||||
)
|
||||
|
||||
source_code: str = module_abspath.read_text(encoding="utf8")
|
||||
code_to_apply = get_optimized_code_for_module(module_abspath.relative_to(project_root_path), optimized_code)
|
||||
|
||||
|
|
@ -463,6 +474,78 @@ def replace_function_definitions_in_module(
|
|||
return True
|
||||
|
||||
|
||||
def replace_function_definitions_for_language(
|
||||
function_names: list[str],
|
||||
optimized_code: CodeStringsMarkdown,
|
||||
module_abspath: Path,
|
||||
project_root_path: Path,
|
||||
function_to_optimize: Optional["FunctionToOptimize"] = None,
|
||||
) -> bool:
|
||||
"""Replace function definitions for non-Python languages.
|
||||
|
||||
Uses the language support abstraction to perform code replacement.
|
||||
|
||||
Args:
|
||||
function_names: List of qualified function names to replace.
|
||||
optimized_code: The optimized code to apply.
|
||||
module_abspath: Path to the module file.
|
||||
project_root_path: Root of the project.
|
||||
function_to_optimize: The function being optimized (needed for line info).
|
||||
|
||||
Returns:
|
||||
True if the code was modified, False if no changes.
|
||||
"""
|
||||
from codeflash.languages import get_language_support
|
||||
from codeflash.languages.base import FunctionInfo, Language, ParentInfo
|
||||
|
||||
source_code: str = module_abspath.read_text(encoding="utf8")
|
||||
code_to_apply = get_optimized_code_for_module(module_abspath.relative_to(project_root_path), optimized_code)
|
||||
|
||||
if not code_to_apply.strip():
|
||||
return False
|
||||
|
||||
# Get language support
|
||||
language = Language(optimized_code.language)
|
||||
lang_support = get_language_support(language)
|
||||
|
||||
# If we have function_to_optimize with line info, use it for precise replacement
|
||||
if function_to_optimize and function_to_optimize.starting_line and function_to_optimize.ending_line:
|
||||
parents = tuple(
|
||||
ParentInfo(name=p.name, type=p.type) for p in function_to_optimize.parents
|
||||
)
|
||||
func_info = FunctionInfo(
|
||||
name=function_to_optimize.function_name,
|
||||
file_path=module_abspath,
|
||||
start_line=function_to_optimize.starting_line,
|
||||
end_line=function_to_optimize.ending_line,
|
||||
parents=parents,
|
||||
is_async=function_to_optimize.is_async,
|
||||
language=language,
|
||||
)
|
||||
new_code = lang_support.replace_function(source_code, func_info, code_to_apply)
|
||||
else:
|
||||
# Fallback: find function in source and replace
|
||||
# This is less precise but works when we don't have line info
|
||||
functions = lang_support.discover_functions(module_abspath)
|
||||
for func in functions:
|
||||
qualified_name = func.qualified_name
|
||||
if qualified_name in function_names or func.name in function_names:
|
||||
new_code = lang_support.replace_function(source_code, func, code_to_apply)
|
||||
source_code = new_code # Continue with modified source for multiple replacements
|
||||
break
|
||||
else:
|
||||
# No matching function found
|
||||
logger.warning(f"Could not find function {function_names} in {module_abspath}")
|
||||
return False
|
||||
|
||||
# Check if there was actually a change
|
||||
if source_code.strip() == new_code.strip():
|
||||
return False
|
||||
|
||||
module_abspath.write_text(new_code, encoding="utf8")
|
||||
return True
|
||||
|
||||
|
||||
def get_optimized_code_for_module(relative_path: Path, optimized_code: CodeStringsMarkdown) -> str:
|
||||
file_to_code_context = optimized_code.file_to_path()
|
||||
module_optimized_code = file_to_code_context.get(str(relative_path))
|
||||
|
|
|
|||
|
|
@ -28,6 +28,9 @@ from codeflash.models.models import (
|
|||
)
|
||||
from codeflash.optimization.function_context import belongs_to_function_qualified
|
||||
|
||||
# Language support imports for multi-language code context extraction
|
||||
from codeflash.languages.base import Language
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
|
|
@ -43,6 +46,12 @@ def get_code_optimization_context(
|
|||
optim_token_limit: int = OPTIMIZATION_CONTEXT_TOKEN_LIMIT,
|
||||
testgen_token_limit: int = TESTGEN_CONTEXT_TOKEN_LIMIT,
|
||||
) -> CodeOptimizationContext:
|
||||
# Route to language-specific implementation for non-Python languages
|
||||
if function_to_optimize.language and function_to_optimize.language != "python":
|
||||
return get_code_optimization_context_for_language(
|
||||
function_to_optimize, project_root_path, optim_token_limit, testgen_token_limit
|
||||
)
|
||||
|
||||
# Get FunctionSource representation of helpers of FTO
|
||||
helpers_of_fto_dict, helpers_of_fto_list = get_function_sources_from_jedi(
|
||||
{function_to_optimize.file_path: {function_to_optimize.qualified_name}}, project_root_path
|
||||
|
|
@ -184,6 +193,130 @@ def get_code_optimization_context(
|
|||
)
|
||||
|
||||
|
||||
def get_code_optimization_context_for_language(
|
||||
function_to_optimize: FunctionToOptimize,
|
||||
project_root_path: Path,
|
||||
optim_token_limit: int = OPTIMIZATION_CONTEXT_TOKEN_LIMIT,
|
||||
testgen_token_limit: int = TESTGEN_CONTEXT_TOKEN_LIMIT,
|
||||
) -> CodeOptimizationContext:
|
||||
"""Extract code optimization context for non-Python languages.
|
||||
|
||||
Uses the language support abstraction to extract code context and converts
|
||||
it to the CodeOptimizationContext format expected by the pipeline.
|
||||
|
||||
Args:
|
||||
function_to_optimize: The function to extract context for.
|
||||
project_root_path: Root of the project.
|
||||
optim_token_limit: Token limit for optimization context.
|
||||
testgen_token_limit: Token limit for testgen context.
|
||||
|
||||
Returns:
|
||||
CodeOptimizationContext with target code and dependencies.
|
||||
"""
|
||||
from codeflash.languages import get_language_support
|
||||
from codeflash.languages.base import FunctionInfo, ParentInfo
|
||||
|
||||
# Get language support for this function
|
||||
language = Language(function_to_optimize.language)
|
||||
lang_support = get_language_support(language)
|
||||
|
||||
# Convert FunctionToOptimize to FunctionInfo for language support
|
||||
parents = tuple(
|
||||
ParentInfo(name=p.name, type=p.type) for p in function_to_optimize.parents
|
||||
)
|
||||
func_info = FunctionInfo(
|
||||
name=function_to_optimize.function_name,
|
||||
file_path=function_to_optimize.file_path,
|
||||
start_line=function_to_optimize.starting_line or 1,
|
||||
end_line=function_to_optimize.ending_line or 1,
|
||||
parents=parents,
|
||||
is_async=function_to_optimize.is_async,
|
||||
is_method=len(function_to_optimize.parents) > 0,
|
||||
language=language,
|
||||
)
|
||||
|
||||
# Extract code context using language support
|
||||
code_context = lang_support.extract_code_context(
|
||||
func_info, project_root_path, project_root_path
|
||||
)
|
||||
|
||||
# Build imports string if available
|
||||
imports_code = "\n".join(code_context.imports) if code_context.imports else ""
|
||||
|
||||
# Build the target code with imports
|
||||
target_code = code_context.target_code
|
||||
if imports_code:
|
||||
target_code = imports_code + "\n\n" + target_code
|
||||
|
||||
# Create CodeString for the target function
|
||||
try:
|
||||
relative_path = function_to_optimize.file_path.resolve().relative_to(project_root_path.resolve())
|
||||
except ValueError:
|
||||
relative_path = function_to_optimize.file_path
|
||||
|
||||
target_code_string = CodeString(code=target_code, file_path=relative_path, language=function_to_optimize.language)
|
||||
|
||||
# Build read-writable code markdown
|
||||
read_writable_code = CodeStringsMarkdown(
|
||||
code_strings=[target_code_string],
|
||||
language=function_to_optimize.language,
|
||||
)
|
||||
|
||||
# Build helper functions code
|
||||
helper_code_strings = []
|
||||
helper_function_sources = []
|
||||
for helper in code_context.helper_functions:
|
||||
try:
|
||||
helper_relative_path = helper.file_path.resolve().relative_to(project_root_path.resolve())
|
||||
except ValueError:
|
||||
helper_relative_path = helper.file_path
|
||||
|
||||
helper_code_strings.append(CodeString(
|
||||
code=helper.source_code,
|
||||
file_path=helper_relative_path,
|
||||
language=function_to_optimize.language,
|
||||
))
|
||||
|
||||
# Convert to FunctionSource for pipeline compatibility
|
||||
helper_function_sources.append(FunctionSource(
|
||||
file_path=helper.file_path,
|
||||
qualified_name=helper.qualified_name,
|
||||
fully_qualified_name=helper.qualified_name,
|
||||
only_function_name=helper.name,
|
||||
source_code=helper.source_code,
|
||||
jedi_definition=None,
|
||||
))
|
||||
|
||||
# Build testgen context (includes target + helpers)
|
||||
testgen_code_strings = [target_code_string] + helper_code_strings
|
||||
testgen_context = CodeStringsMarkdown(
|
||||
code_strings=testgen_code_strings,
|
||||
language=function_to_optimize.language,
|
||||
)
|
||||
|
||||
# Check token limits
|
||||
read_writable_tokens = encoded_tokens_len(read_writable_code.markdown)
|
||||
if read_writable_tokens > optim_token_limit:
|
||||
raise ValueError("Read-writable code has exceeded token limit, cannot proceed")
|
||||
|
||||
testgen_tokens = encoded_tokens_len(testgen_context.markdown)
|
||||
if testgen_tokens > testgen_token_limit:
|
||||
raise ValueError("Testgen code context has exceeded token limit, cannot proceed")
|
||||
|
||||
# Generate code hash from target code
|
||||
code_hash = hashlib.sha256(target_code.encode("utf-8")).hexdigest()
|
||||
|
||||
return CodeOptimizationContext(
|
||||
testgen_context=testgen_context,
|
||||
read_writable_code=read_writable_code,
|
||||
read_only_context_code=code_context.read_only_context,
|
||||
hashing_code_context=target_code,
|
||||
hashing_code_context_hash=code_hash,
|
||||
helper_functions=helper_function_sources,
|
||||
preexisting_objects=set(), # Not implemented for non-Python yet
|
||||
)
|
||||
|
||||
|
||||
def extract_code_string_context_from_files(
|
||||
helpers_of_fto: dict[Path, set[FunctionSource]],
|
||||
helpers_of_helpers: dict[Path, set[FunctionSource]],
|
||||
|
|
|
|||
|
|
@ -554,11 +554,112 @@ def filter_test_files_by_imports(
|
|||
return filtered_map
|
||||
|
||||
|
||||
def _detect_language_from_functions(
|
||||
file_to_funcs: dict[Path, list[FunctionToOptimize]] | None,
|
||||
) -> str | None:
|
||||
"""Detect language from the functions to optimize.
|
||||
|
||||
Args:
|
||||
file_to_funcs: Dictionary mapping file paths to functions.
|
||||
|
||||
Returns:
|
||||
Language string (e.g., "python", "javascript") or None if not determinable.
|
||||
|
||||
"""
|
||||
if not file_to_funcs:
|
||||
return None
|
||||
|
||||
for funcs in file_to_funcs.values():
|
||||
if funcs:
|
||||
return funcs[0].language
|
||||
return None
|
||||
|
||||
|
||||
def discover_tests_for_language(
|
||||
cfg: TestConfig,
|
||||
language: str,
|
||||
file_to_funcs_to_optimize: dict[Path, list[FunctionToOptimize]] | None,
|
||||
) -> tuple[dict[str, set[FunctionCalledInTest]], int, int]:
|
||||
"""Discover tests using language-specific support.
|
||||
|
||||
Args:
|
||||
cfg: Test configuration.
|
||||
language: Language identifier (e.g., "javascript").
|
||||
file_to_funcs_to_optimize: Dictionary mapping file paths to functions.
|
||||
|
||||
Returns:
|
||||
Tuple of (function_to_tests_map, num_tests, num_replay_tests).
|
||||
|
||||
"""
|
||||
from codeflash.languages import get_language_support
|
||||
from codeflash.languages.base import FunctionInfo, Language, ParentInfo
|
||||
|
||||
try:
|
||||
lang_support = get_language_support(Language(language))
|
||||
except Exception:
|
||||
logger.warning(f"Unsupported language {language}, returning empty test map")
|
||||
return {}, 0, 0
|
||||
|
||||
# Convert FunctionToOptimize to FunctionInfo for the language support API
|
||||
function_infos: list[FunctionInfo] = []
|
||||
if file_to_funcs_to_optimize:
|
||||
for funcs in file_to_funcs_to_optimize.values():
|
||||
for func in funcs:
|
||||
parents = tuple(ParentInfo(p.name, p.type) for p in func.parents)
|
||||
function_infos.append(
|
||||
FunctionInfo(
|
||||
name=func.function_name,
|
||||
file_path=func.file_path,
|
||||
start_line=func.starting_line or 0,
|
||||
end_line=func.ending_line or 0,
|
||||
start_col=func.starting_col,
|
||||
end_col=func.ending_col,
|
||||
is_async=func.is_async,
|
||||
is_method=bool(func.parents and any(p.type == "ClassDef" for p in func.parents)),
|
||||
class_name=func.parents[0].name if func.parents and func.parents[0].type == "ClassDef" else None,
|
||||
parents=parents,
|
||||
language=Language(language),
|
||||
)
|
||||
)
|
||||
|
||||
# Use language support to discover tests
|
||||
test_map = lang_support.discover_tests(cfg.tests_root, function_infos)
|
||||
|
||||
# Convert TestInfo back to FunctionCalledInTest format
|
||||
function_to_tests: dict[str, set[FunctionCalledInTest]] = defaultdict(set)
|
||||
num_tests = 0
|
||||
|
||||
for qualified_name, test_infos in test_map.items():
|
||||
for test_info in test_infos:
|
||||
function_to_tests[qualified_name].add(
|
||||
FunctionCalledInTest(
|
||||
tests_in_file=TestsInFile(
|
||||
test_file=test_info.test_file,
|
||||
test_class=test_info.test_class,
|
||||
test_function=test_info.test_name,
|
||||
test_type=TestType.EXISTING_UNIT_TEST,
|
||||
),
|
||||
position=CodePosition(line_no=0, col_no=0),
|
||||
)
|
||||
)
|
||||
num_tests += 1
|
||||
|
||||
return dict(function_to_tests), num_tests, 0
|
||||
|
||||
|
||||
def discover_unit_tests(
|
||||
cfg: TestConfig,
|
||||
discover_only_these_tests: list[Path] | None = None,
|
||||
file_to_funcs_to_optimize: dict[Path, list[FunctionToOptimize]] | None = None,
|
||||
) -> tuple[dict[str, set[FunctionCalledInTest]], int, int]:
|
||||
# Detect language from functions being optimized
|
||||
language = _detect_language_from_functions(file_to_funcs_to_optimize)
|
||||
|
||||
# Route to language-specific test discovery for non-Python languages
|
||||
if language and language != "python":
|
||||
return discover_tests_for_language(cfg, language, file_to_funcs_to_optimize)
|
||||
|
||||
# Existing Python logic
|
||||
framework_strategies: dict[str, Callable] = {"pytest": discover_tests_pytest, "unittest": discover_tests_unittest}
|
||||
strategy = framework_strategies.get(cfg.test_framework, None)
|
||||
if not strategy:
|
||||
|
|
|
|||
|
|
@ -26,6 +26,9 @@ from codeflash.code_utils.code_utils import (
|
|||
from codeflash.code_utils.env_utils import get_pr_number
|
||||
from codeflash.code_utils.git_utils import get_git_diff, get_repo_owner_and_name
|
||||
from codeflash.discovery.discover_unit_tests import discover_unit_tests
|
||||
from codeflash.languages import get_language_support, get_supported_extensions
|
||||
from codeflash.languages.base import Language
|
||||
from codeflash.languages.registry import is_language_supported
|
||||
from codeflash.lsp.helpers import is_LSP_enabled
|
||||
from codeflash.models.models import FunctionParent
|
||||
from codeflash.telemetry.posthog_cf import ph
|
||||
|
|
@ -178,6 +181,92 @@ class FunctionToOptimize:
|
|||
return f"{module_name_from_file_path(self.file_path, project_root_path)}.{self.qualified_name}"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Multi-language support helpers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def get_files_for_language(module_root_path: Path, language: Language | None = None) -> list[Path]:
|
||||
"""Get all source files for supported languages.
|
||||
|
||||
Args:
|
||||
module_root_path: Root path to search for source files.
|
||||
language: Optional specific language to filter for. If None, includes all supported languages.
|
||||
|
||||
Returns:
|
||||
List of file paths matching supported extensions.
|
||||
|
||||
"""
|
||||
if language is not None:
|
||||
support = get_language_support(language)
|
||||
extensions = support.file_extensions
|
||||
else:
|
||||
extensions = tuple(get_supported_extensions())
|
||||
|
||||
files = []
|
||||
for ext in extensions:
|
||||
pattern = f"*{ext}"
|
||||
files.extend(module_root_path.rglob(pattern))
|
||||
return files
|
||||
|
||||
|
||||
def _find_all_functions_in_python_file(file_path: Path) -> dict[Path, list[FunctionToOptimize]]:
|
||||
"""Find all optimizable functions in a Python file using AST parsing.
|
||||
|
||||
This is the original Python implementation preserved for backward compatibility.
|
||||
"""
|
||||
functions: dict[Path, list[FunctionToOptimize]] = {}
|
||||
with file_path.open(encoding="utf8") as f:
|
||||
try:
|
||||
ast_module = ast.parse(f.read())
|
||||
except Exception as e:
|
||||
if DEBUG_MODE:
|
||||
logger.exception(e)
|
||||
return functions
|
||||
function_name_visitor = FunctionWithReturnStatement(file_path)
|
||||
function_name_visitor.visit(ast_module)
|
||||
functions[file_path] = function_name_visitor.functions
|
||||
return functions
|
||||
|
||||
|
||||
def _find_all_functions_via_language_support(file_path: Path) -> dict[Path, list[FunctionToOptimize]]:
|
||||
"""Find all optimizable functions using the language support abstraction.
|
||||
|
||||
This function uses the registered language support for the file's language
|
||||
to discover functions, then converts them to FunctionToOptimize instances.
|
||||
"""
|
||||
from codeflash.languages.base import FunctionFilterCriteria
|
||||
|
||||
functions: dict[Path, list[FunctionToOptimize]] = {}
|
||||
|
||||
try:
|
||||
lang_support = get_language_support(file_path)
|
||||
criteria = FunctionFilterCriteria(require_return=True)
|
||||
function_infos = lang_support.discover_functions(file_path, criteria)
|
||||
|
||||
ftos = []
|
||||
for func_info in function_infos:
|
||||
parents = [FunctionParent(p.name, p.type) for p in func_info.parents]
|
||||
ftos.append(
|
||||
FunctionToOptimize(
|
||||
function_name=func_info.name,
|
||||
file_path=func_info.file_path,
|
||||
parents=parents,
|
||||
starting_line=func_info.start_line,
|
||||
ending_line=func_info.end_line,
|
||||
starting_col=func_info.start_col,
|
||||
ending_col=func_info.end_col,
|
||||
is_async=func_info.is_async,
|
||||
language=func_info.language.value,
|
||||
)
|
||||
)
|
||||
functions[file_path] = ftos
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to discover functions in {file_path}: {e}")
|
||||
|
||||
return functions
|
||||
|
||||
|
||||
def get_functions_to_optimize(
|
||||
optimize_all: str | None,
|
||||
replay_test: list[Path] | None,
|
||||
|
|
@ -362,9 +451,21 @@ def get_functions_within_lines(modified_lines: dict[str, list[int]]) -> dict[str
|
|||
return functions
|
||||
|
||||
|
||||
def get_all_files_and_functions(module_root_path: Path) -> dict[str, list[FunctionToOptimize]]:
|
||||
def get_all_files_and_functions(
|
||||
module_root_path: Path, language: Language | None = None
|
||||
) -> dict[str, list[FunctionToOptimize]]:
|
||||
"""Get all optimizable functions from files in the module root.
|
||||
|
||||
Args:
|
||||
module_root_path: Root path to search for source files.
|
||||
language: Optional specific language to filter for. If None, includes all supported languages.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping file paths to lists of FunctionToOptimize.
|
||||
|
||||
"""
|
||||
functions: dict[str, list[FunctionToOptimize]] = {}
|
||||
for file_path in module_root_path.rglob("*.py"):
|
||||
for file_path in get_files_for_language(module_root_path, language):
|
||||
# Find all the functions in the file
|
||||
functions.update(find_all_functions_in_file(file_path).items())
|
||||
# Randomize the order of the files to optimize to avoid optimizing the same file in the same order every time.
|
||||
|
|
@ -375,18 +476,34 @@ def get_all_files_and_functions(module_root_path: Path) -> dict[str, list[Functi
|
|||
|
||||
|
||||
def find_all_functions_in_file(file_path: Path) -> dict[Path, list[FunctionToOptimize]]:
|
||||
functions: dict[Path, list[FunctionToOptimize]] = {}
|
||||
with file_path.open(encoding="utf8") as f:
|
||||
try:
|
||||
ast_module = ast.parse(f.read())
|
||||
except Exception as e:
|
||||
if DEBUG_MODE:
|
||||
logger.exception(e)
|
||||
return functions
|
||||
function_name_visitor = FunctionWithReturnStatement(file_path)
|
||||
function_name_visitor.visit(ast_module)
|
||||
functions[file_path] = function_name_visitor.functions
|
||||
return functions
|
||||
"""Find all optimizable functions in a file, routing to the appropriate language handler.
|
||||
|
||||
This function checks if the file extension is supported and routes to either
|
||||
the Python-specific implementation (for backward compatibility) or the
|
||||
language support abstraction for other languages.
|
||||
|
||||
Args:
|
||||
file_path: Path to the source file.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping file path to list of FunctionToOptimize.
|
||||
|
||||
"""
|
||||
# Check if the file extension is supported
|
||||
if not is_language_supported(file_path):
|
||||
return {}
|
||||
|
||||
try:
|
||||
lang_support = get_language_support(file_path)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
# Route to Python-specific implementation for backward compatibility
|
||||
if lang_support.language == Language.PYTHON:
|
||||
return _find_all_functions_in_python_file(file_path)
|
||||
|
||||
# Use language support abstraction for other languages
|
||||
return _find_all_functions_via_language_support(file_path)
|
||||
|
||||
|
||||
def get_all_replay_test_functions(
|
||||
|
|
|
|||
|
|
@ -36,6 +36,11 @@ from codeflash.languages.registry import (
|
|||
register_language,
|
||||
)
|
||||
|
||||
# Import language support modules to trigger auto-registration
|
||||
# This ensures all supported languages are available when this package is imported
|
||||
from codeflash.languages.python import PythonSupport # noqa: F401
|
||||
from codeflash.languages.javascript import JavaScriptSupport # noqa: F401
|
||||
|
||||
__all__ = [
|
||||
# Base types
|
||||
"Language",
|
||||
|
|
|
|||
5
codeflash/languages/javascript/__init__.py
Normal file
5
codeflash/languages/javascript/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
"""JavaScript language support for codeflash."""
|
||||
|
||||
from codeflash.languages.javascript.support import JavaScriptSupport
|
||||
|
||||
__all__ = ["JavaScriptSupport"]
|
||||
701
codeflash/languages/javascript/support.py
Normal file
701
codeflash/languages/javascript/support.py
Normal file
|
|
@ -0,0 +1,701 @@
|
|||
"""
|
||||
JavaScript language support implementation.
|
||||
|
||||
This module implements the LanguageSupport protocol for JavaScript,
|
||||
using tree-sitter for code analysis and Jest for test execution.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from codeflash.languages.base import (
|
||||
CodeContext,
|
||||
FunctionFilterCriteria,
|
||||
FunctionInfo,
|
||||
HelperFunction,
|
||||
Language,
|
||||
ParentInfo,
|
||||
TestInfo,
|
||||
TestResult,
|
||||
)
|
||||
from codeflash.languages.registry import register_language
|
||||
from codeflash.languages.treesitter_utils import (
|
||||
FunctionNode,
|
||||
TreeSitterAnalyzer,
|
||||
TreeSitterLanguage,
|
||||
get_analyzer_for_file,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@register_language
|
||||
class JavaScriptSupport:
|
||||
"""
|
||||
JavaScript language support implementation.
|
||||
|
||||
This class implements the LanguageSupport protocol for JavaScript/JSX files,
|
||||
using tree-sitter for code analysis and Jest for test execution.
|
||||
"""
|
||||
|
||||
# === Properties ===
|
||||
|
||||
@property
|
||||
def language(self) -> Language:
|
||||
"""The language this implementation supports."""
|
||||
return Language.JAVASCRIPT
|
||||
|
||||
@property
|
||||
def file_extensions(self) -> tuple[str, ...]:
|
||||
"""File extensions supported by JavaScript."""
|
||||
return (".js", ".jsx", ".mjs", ".cjs")
|
||||
|
||||
@property
|
||||
def test_framework(self) -> str:
|
||||
"""Primary test framework for JavaScript."""
|
||||
return "jest"
|
||||
|
||||
# === Discovery ===
|
||||
|
||||
def discover_functions(
|
||||
self,
|
||||
file_path: Path,
|
||||
filter_criteria: FunctionFilterCriteria | None = None,
|
||||
) -> list[FunctionInfo]:
|
||||
"""
|
||||
Find all optimizable functions in a JavaScript file.
|
||||
|
||||
Uses tree-sitter to parse the file and find functions.
|
||||
|
||||
Args:
|
||||
file_path: Path to the JavaScript file to analyze.
|
||||
filter_criteria: Optional criteria to filter functions.
|
||||
|
||||
Returns:
|
||||
List of FunctionInfo objects for discovered functions.
|
||||
"""
|
||||
criteria = filter_criteria or FunctionFilterCriteria()
|
||||
|
||||
try:
|
||||
source = file_path.read_text()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to read {file_path}: {e}")
|
||||
return []
|
||||
|
||||
try:
|
||||
analyzer = get_analyzer_for_file(file_path)
|
||||
tree_functions = analyzer.find_functions(
|
||||
source,
|
||||
include_methods=criteria.include_methods,
|
||||
include_arrow_functions=True,
|
||||
require_name=True,
|
||||
)
|
||||
|
||||
functions: list[FunctionInfo] = []
|
||||
for func in tree_functions:
|
||||
# Check for return statement if required
|
||||
if criteria.require_return and not analyzer.has_return_statement(func, source):
|
||||
continue
|
||||
|
||||
# Check async filter
|
||||
if not criteria.include_async and func.is_async:
|
||||
continue
|
||||
|
||||
# Build parents list
|
||||
parents: list[ParentInfo] = []
|
||||
if func.class_name:
|
||||
parents.append(ParentInfo(name=func.class_name, type="ClassDef"))
|
||||
if func.parent_function:
|
||||
parents.append(ParentInfo(name=func.parent_function, type="FunctionDef"))
|
||||
|
||||
functions.append(
|
||||
FunctionInfo(
|
||||
name=func.name,
|
||||
file_path=file_path,
|
||||
start_line=func.start_line,
|
||||
end_line=func.end_line,
|
||||
start_col=func.start_col,
|
||||
end_col=func.end_col,
|
||||
parents=tuple(parents),
|
||||
is_async=func.is_async,
|
||||
is_method=func.is_method,
|
||||
language=Language.JAVASCRIPT,
|
||||
)
|
||||
)
|
||||
|
||||
return functions
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to parse {file_path}: {e}")
|
||||
return []
|
||||
|
||||
def discover_tests(
|
||||
self,
|
||||
test_root: Path,
|
||||
source_functions: Sequence[FunctionInfo],
|
||||
) -> dict[str, list[TestInfo]]:
|
||||
"""
|
||||
Map source functions to their tests via static analysis.
|
||||
|
||||
For JavaScript, this uses static analysis to find test files
|
||||
and match them to source functions based on imports and function calls.
|
||||
|
||||
Args:
|
||||
test_root: Root directory containing tests.
|
||||
source_functions: Functions to find tests for.
|
||||
|
||||
Returns:
|
||||
Dict mapping qualified function names to lists of TestInfo.
|
||||
"""
|
||||
result: dict[str, list[TestInfo]] = {}
|
||||
|
||||
# Find all test files (Jest conventions)
|
||||
test_patterns = [
|
||||
"*.test.js",
|
||||
"*.test.jsx",
|
||||
"*.spec.js",
|
||||
"*.spec.jsx",
|
||||
"__tests__/**/*.js",
|
||||
"__tests__/**/*.jsx",
|
||||
]
|
||||
|
||||
test_files: list[Path] = []
|
||||
for pattern in test_patterns:
|
||||
test_files.extend(test_root.rglob(pattern))
|
||||
|
||||
for test_file in test_files:
|
||||
try:
|
||||
source = test_file.read_text()
|
||||
analyzer = get_analyzer_for_file(test_file)
|
||||
imports = analyzer.find_imports(source)
|
||||
|
||||
# Build a set of imported function names
|
||||
imported_names: set[str] = set()
|
||||
for imp in imports:
|
||||
if imp.default_import:
|
||||
imported_names.add(imp.default_import)
|
||||
for name, alias in imp.named_imports:
|
||||
imported_names.add(alias or name)
|
||||
|
||||
# Find test functions (describe/it/test blocks)
|
||||
test_functions = self._find_jest_tests(source, analyzer)
|
||||
|
||||
# Match source functions to tests
|
||||
for func in source_functions:
|
||||
if func.name in imported_names or func.name in source:
|
||||
if func.qualified_name not in result:
|
||||
result[func.qualified_name] = []
|
||||
for test_name in test_functions:
|
||||
result[func.qualified_name].append(
|
||||
TestInfo(
|
||||
test_name=test_name,
|
||||
test_file=test_file,
|
||||
test_class=None,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to analyze test file {test_file}: {e}")
|
||||
|
||||
return result
|
||||
|
||||
def _find_jest_tests(self, source: str, analyzer: TreeSitterAnalyzer) -> list[str]:
|
||||
"""Find Jest test function names in source code."""
|
||||
test_names: list[str] = []
|
||||
source_bytes = source.encode("utf8")
|
||||
tree = analyzer.parse(source_bytes)
|
||||
|
||||
self._walk_for_jest_tests(tree.root_node, source_bytes, test_names)
|
||||
return test_names
|
||||
|
||||
def _walk_for_jest_tests(
|
||||
self, node: Any, source_bytes: bytes, test_names: list[str]
|
||||
) -> None:
|
||||
"""Walk tree to find Jest test/it/describe calls."""
|
||||
if node.type == "call_expression":
|
||||
func_node = node.child_by_field_name("function")
|
||||
if func_node:
|
||||
func_name = source_bytes[func_node.start_byte : func_node.end_byte].decode(
|
||||
"utf8"
|
||||
)
|
||||
if func_name in ("test", "it", "describe"):
|
||||
# Get the first string argument as the test name
|
||||
args_node = node.child_by_field_name("arguments")
|
||||
if args_node:
|
||||
for child in args_node.children:
|
||||
if child.type == "string":
|
||||
test_name = source_bytes[
|
||||
child.start_byte : child.end_byte
|
||||
].decode("utf8")
|
||||
test_names.append(test_name.strip("'\""))
|
||||
break
|
||||
|
||||
for child in node.children:
|
||||
self._walk_for_jest_tests(child, source_bytes, test_names)
|
||||
|
||||
# === Code Analysis ===
|
||||
|
||||
def extract_code_context(
|
||||
self,
|
||||
function: FunctionInfo,
|
||||
project_root: Path,
|
||||
module_root: Path,
|
||||
) -> CodeContext:
|
||||
"""
|
||||
Extract function code and its dependencies.
|
||||
|
||||
Uses tree-sitter to analyze imports and find helper functions.
|
||||
|
||||
Args:
|
||||
function: The function to extract context for.
|
||||
project_root: Root of the project.
|
||||
module_root: Root of the module containing the function.
|
||||
|
||||
Returns:
|
||||
CodeContext with target code and dependencies.
|
||||
"""
|
||||
try:
|
||||
source = function.file_path.read_text()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to read {function.file_path}: {e}")
|
||||
return CodeContext(
|
||||
target_code="",
|
||||
target_file=function.file_path,
|
||||
language=Language.JAVASCRIPT,
|
||||
)
|
||||
|
||||
# Extract the function source
|
||||
lines = source.splitlines(keepends=True)
|
||||
if function.start_line and function.end_line:
|
||||
target_lines = lines[function.start_line - 1 : function.end_line]
|
||||
target_code = "".join(target_lines)
|
||||
else:
|
||||
target_code = ""
|
||||
|
||||
# Find imports and helper functions
|
||||
analyzer = get_analyzer_for_file(function.file_path)
|
||||
imports = analyzer.find_imports(source)
|
||||
|
||||
# Find helper functions called by target
|
||||
helpers = self._find_helper_functions(
|
||||
function, source, analyzer, imports, module_root
|
||||
)
|
||||
|
||||
# Extract import statements as strings
|
||||
import_lines = []
|
||||
for imp in imports:
|
||||
imp_lines = lines[imp.start_line - 1 : imp.end_line]
|
||||
import_lines.append("".join(imp_lines).strip())
|
||||
|
||||
return CodeContext(
|
||||
target_code=target_code,
|
||||
target_file=function.file_path,
|
||||
helper_functions=helpers,
|
||||
read_only_context="",
|
||||
imports=import_lines,
|
||||
language=Language.JAVASCRIPT,
|
||||
)
|
||||
|
||||
def _find_helper_functions(
|
||||
self,
|
||||
function: FunctionInfo,
|
||||
source: str,
|
||||
analyzer: TreeSitterAnalyzer,
|
||||
imports: list[Any],
|
||||
module_root: Path,
|
||||
) -> list[HelperFunction]:
|
||||
"""Find helper functions called by the target function."""
|
||||
helpers: list[HelperFunction] = []
|
||||
|
||||
# Get all functions in the same file
|
||||
all_functions = analyzer.find_functions(source, include_methods=True)
|
||||
|
||||
# Find the target function's tree-sitter node
|
||||
target_func = None
|
||||
for func in all_functions:
|
||||
if (
|
||||
func.name == function.name
|
||||
and func.start_line == function.start_line
|
||||
):
|
||||
target_func = func
|
||||
break
|
||||
|
||||
if not target_func:
|
||||
return helpers
|
||||
|
||||
# Find function calls within target
|
||||
calls = analyzer.find_function_calls(source, target_func)
|
||||
|
||||
# Match calls to functions in the same file
|
||||
for func in all_functions:
|
||||
if func.name in calls and func.name != function.name:
|
||||
helpers.append(
|
||||
HelperFunction(
|
||||
name=func.name,
|
||||
qualified_name=func.name,
|
||||
file_path=function.file_path,
|
||||
source_code=func.source_text,
|
||||
start_line=func.start_line,
|
||||
end_line=func.end_line,
|
||||
)
|
||||
)
|
||||
|
||||
# TODO: Follow imports to find helpers in other files
|
||||
|
||||
return helpers
|
||||
|
||||
def find_helper_functions(
|
||||
self,
|
||||
function: FunctionInfo,
|
||||
project_root: Path,
|
||||
) -> list[HelperFunction]:
|
||||
"""
|
||||
Find helper functions called by the target function.
|
||||
|
||||
Args:
|
||||
function: The target function to analyze.
|
||||
project_root: Root of the project.
|
||||
|
||||
Returns:
|
||||
List of HelperFunction objects.
|
||||
"""
|
||||
try:
|
||||
source = function.file_path.read_text()
|
||||
analyzer = get_analyzer_for_file(function.file_path)
|
||||
imports = analyzer.find_imports(source)
|
||||
return self._find_helper_functions(
|
||||
function, source, analyzer, imports, project_root
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to find helpers for {function.name}: {e}")
|
||||
return []
|
||||
|
||||
# === Code Transformation ===
|
||||
|
||||
def replace_function(
|
||||
self,
|
||||
source: str,
|
||||
function: FunctionInfo,
|
||||
new_source: str,
|
||||
) -> str:
|
||||
"""
|
||||
Replace a function in source code with new implementation.
|
||||
|
||||
Uses text-based replacement with line numbers.
|
||||
|
||||
Args:
|
||||
source: Original source code.
|
||||
function: FunctionInfo identifying the function to replace.
|
||||
new_source: New function source code.
|
||||
|
||||
Returns:
|
||||
Modified source code with function replaced.
|
||||
"""
|
||||
if function.start_line is None or function.end_line is None:
|
||||
logger.error(f"Function {function.name} has no line information")
|
||||
return source
|
||||
|
||||
lines = source.splitlines(keepends=True)
|
||||
|
||||
# Handle case where source doesn't end with newline
|
||||
if lines and not lines[-1].endswith("\n"):
|
||||
lines[-1] += "\n"
|
||||
|
||||
# Get indentation from original function's first line
|
||||
if function.start_line <= len(lines):
|
||||
original_first_line = lines[function.start_line - 1]
|
||||
original_indent = len(original_first_line) - len(original_first_line.lstrip())
|
||||
else:
|
||||
original_indent = 0
|
||||
|
||||
# Get indentation from new function's first line
|
||||
new_lines = new_source.splitlines(keepends=True)
|
||||
if new_lines:
|
||||
new_first_line = new_lines[0]
|
||||
new_indent = len(new_first_line) - len(new_first_line.lstrip())
|
||||
else:
|
||||
new_indent = 0
|
||||
|
||||
# Calculate indent adjustment needed
|
||||
indent_diff = original_indent - new_indent
|
||||
|
||||
# Adjust indentation of new function if needed
|
||||
if indent_diff != 0:
|
||||
adjusted_new_lines = []
|
||||
for line in new_lines:
|
||||
if line.strip(): # Non-empty line
|
||||
if indent_diff > 0:
|
||||
adjusted_new_lines.append(" " * indent_diff + line)
|
||||
else:
|
||||
current_indent = len(line) - len(line.lstrip())
|
||||
remove_amount = min(current_indent, abs(indent_diff))
|
||||
adjusted_new_lines.append(line[remove_amount:])
|
||||
else:
|
||||
adjusted_new_lines.append(line)
|
||||
new_lines = adjusted_new_lines
|
||||
|
||||
# Ensure new function ends with newline
|
||||
if new_lines and not new_lines[-1].endswith("\n"):
|
||||
new_lines[-1] += "\n"
|
||||
|
||||
# Build result
|
||||
before = lines[: function.start_line - 1]
|
||||
after = lines[function.end_line :]
|
||||
|
||||
result_lines = before + new_lines + after
|
||||
return "".join(result_lines)
|
||||
|
||||
def format_code(
|
||||
self,
|
||||
source: str,
|
||||
file_path: Path | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Format JavaScript code using prettier (if available).
|
||||
|
||||
Args:
|
||||
source: Source code to format.
|
||||
file_path: Optional file path for context.
|
||||
|
||||
Returns:
|
||||
Formatted source code.
|
||||
"""
|
||||
try:
|
||||
# Try to use prettier via npx
|
||||
result = subprocess.run(
|
||||
["npx", "prettier", "--stdin-filepath", "file.js"],
|
||||
input=source,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30,
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return result.stdout
|
||||
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.debug(f"Prettier formatting failed: {e}")
|
||||
|
||||
return source
|
||||
|
||||
# === Test Execution ===
|
||||
|
||||
def run_tests(
|
||||
self,
|
||||
test_files: Sequence[Path],
|
||||
cwd: Path,
|
||||
env: dict[str, str],
|
||||
timeout: int,
|
||||
) -> tuple[list[TestResult], Path]:
|
||||
"""
|
||||
Run Jest tests and return results.
|
||||
|
||||
Args:
|
||||
test_files: Paths to test files to run.
|
||||
cwd: Working directory for test execution.
|
||||
env: Environment variables.
|
||||
timeout: Maximum execution time in seconds.
|
||||
|
||||
Returns:
|
||||
Tuple of (list of TestResults, path to JUnit XML).
|
||||
"""
|
||||
# Create output directory for results
|
||||
output_dir = cwd / ".codeflash"
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
junit_xml = output_dir / "jest-results.xml"
|
||||
|
||||
# Build Jest command
|
||||
test_pattern = "|".join(str(f) for f in test_files)
|
||||
cmd = [
|
||||
"npx",
|
||||
"jest",
|
||||
"--reporters=default",
|
||||
"--reporters=jest-junit",
|
||||
f"--testPathPattern={test_pattern}",
|
||||
"--runInBand", # Sequential for deterministic timing
|
||||
"--forceExit",
|
||||
]
|
||||
|
||||
test_env = env.copy()
|
||||
test_env["JEST_JUNIT_OUTPUT_FILE"] = str(junit_xml)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
cwd=cwd,
|
||||
env=test_env,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
results = self.parse_test_results(junit_xml, result.stdout)
|
||||
return results, junit_xml
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.warning(f"Test execution timed out after {timeout}s")
|
||||
return [], junit_xml
|
||||
except Exception as e:
|
||||
logger.error(f"Test execution failed: {e}")
|
||||
return [], junit_xml
|
||||
|
||||
def parse_test_results(
|
||||
self,
|
||||
junit_xml_path: Path,
|
||||
stdout: str,
|
||||
) -> list[TestResult]:
|
||||
"""
|
||||
Parse test results from JUnit XML.
|
||||
|
||||
Args:
|
||||
junit_xml_path: Path to JUnit XML results file.
|
||||
stdout: Standard output from test execution.
|
||||
|
||||
Returns:
|
||||
List of TestResult objects.
|
||||
"""
|
||||
results: list[TestResult] = []
|
||||
|
||||
if not junit_xml_path.exists():
|
||||
return results
|
||||
|
||||
try:
|
||||
tree = ET.parse(junit_xml_path)
|
||||
root = tree.getroot()
|
||||
|
||||
for testcase in root.iter("testcase"):
|
||||
name = testcase.get("name", "unknown")
|
||||
classname = testcase.get("classname", "")
|
||||
time_str = testcase.get("time", "0")
|
||||
|
||||
# Convert time to nanoseconds
|
||||
try:
|
||||
runtime_ns = int(float(time_str) * 1_000_000_000)
|
||||
except ValueError:
|
||||
runtime_ns = None
|
||||
|
||||
# Check for failure/error
|
||||
failure = testcase.find("failure")
|
||||
error = testcase.find("error")
|
||||
passed = failure is None and error is None
|
||||
|
||||
error_message = None
|
||||
if failure is not None:
|
||||
error_message = failure.get("message", failure.text)
|
||||
elif error is not None:
|
||||
error_message = error.get("message", error.text)
|
||||
|
||||
# Determine test file from classname
|
||||
# Jest typically uses the file path as classname
|
||||
test_file = Path(classname) if classname else Path("unknown")
|
||||
|
||||
results.append(
|
||||
TestResult(
|
||||
test_name=name,
|
||||
test_file=test_file,
|
||||
passed=passed,
|
||||
runtime_ns=runtime_ns,
|
||||
error_message=error_message,
|
||||
stdout=stdout,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to parse JUnit XML: {e}")
|
||||
|
||||
return results
|
||||
|
||||
# === Instrumentation ===
|
||||
|
||||
def instrument_for_tracing(
|
||||
self,
|
||||
source: str,
|
||||
functions: Sequence[FunctionInfo],
|
||||
) -> str:
|
||||
"""
|
||||
Add tracing instrumentation to capture inputs/outputs.
|
||||
|
||||
For JavaScript, this wraps functions to capture their arguments
|
||||
and return values.
|
||||
|
||||
Args:
|
||||
source: Source code to instrument.
|
||||
functions: Functions to add tracing to.
|
||||
|
||||
Returns:
|
||||
Instrumented source code.
|
||||
"""
|
||||
# For now, return source unchanged
|
||||
# Full implementation would add wrapper code
|
||||
return source
|
||||
|
||||
def instrument_for_benchmarking(
|
||||
self,
|
||||
test_source: str,
|
||||
target_function: FunctionInfo,
|
||||
) -> str:
|
||||
"""
|
||||
Add timing instrumentation to test code.
|
||||
|
||||
Args:
|
||||
test_source: Test source code to instrument.
|
||||
target_function: Function being benchmarked.
|
||||
|
||||
Returns:
|
||||
Instrumented test source code.
|
||||
"""
|
||||
# For now, return source unchanged
|
||||
# Full implementation would add timing wrappers
|
||||
return test_source
|
||||
|
||||
# === Validation ===
|
||||
|
||||
def validate_syntax(self, source: str) -> bool:
|
||||
"""
|
||||
Check if JavaScript source code is syntactically valid.
|
||||
|
||||
Uses tree-sitter to parse and check for errors.
|
||||
|
||||
Args:
|
||||
source: Source code to validate.
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise.
|
||||
"""
|
||||
try:
|
||||
analyzer = TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
tree = analyzer.parse(source)
|
||||
# Check if tree has errors
|
||||
return not tree.root_node.has_error
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def normalize_code(self, source: str) -> str:
|
||||
"""
|
||||
Normalize JavaScript code for deduplication.
|
||||
|
||||
Removes comments and normalizes whitespace.
|
||||
|
||||
Args:
|
||||
source: Source code to normalize.
|
||||
|
||||
Returns:
|
||||
Normalized source code.
|
||||
"""
|
||||
# Simple normalization: remove extra whitespace
|
||||
# A full implementation would use tree-sitter to strip comments
|
||||
lines = source.splitlines()
|
||||
normalized_lines = []
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if stripped and not stripped.startswith("//"):
|
||||
normalized_lines.append(stripped)
|
||||
return "\n".join(normalized_lines)
|
||||
627
codeflash/languages/treesitter_utils.py
Normal file
627
codeflash/languages/treesitter_utils.py
Normal file
|
|
@ -0,0 +1,627 @@
|
|||
"""
|
||||
Tree-sitter utilities for cross-language code analysis.
|
||||
|
||||
This module provides a unified interface for parsing and analyzing code
|
||||
across multiple languages using tree-sitter.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from tree_sitter import Language, Node, Parser
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from tree_sitter import Tree
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TreeSitterLanguage(Enum):
|
||||
"""Supported tree-sitter languages."""
|
||||
|
||||
JAVASCRIPT = "javascript"
|
||||
TYPESCRIPT = "typescript"
|
||||
TSX = "tsx"
|
||||
|
||||
|
||||
# Lazy-loaded language instances
|
||||
_LANGUAGE_CACHE: dict[TreeSitterLanguage, Language] = {}
|
||||
|
||||
|
||||
def _get_language(lang: TreeSitterLanguage) -> Language:
|
||||
"""Get a tree-sitter Language instance, with lazy loading."""
|
||||
if lang not in _LANGUAGE_CACHE:
|
||||
if lang == TreeSitterLanguage.JAVASCRIPT:
|
||||
import tree_sitter_javascript
|
||||
|
||||
_LANGUAGE_CACHE[lang] = Language(tree_sitter_javascript.language())
|
||||
elif lang == TreeSitterLanguage.TYPESCRIPT:
|
||||
import tree_sitter_typescript
|
||||
|
||||
_LANGUAGE_CACHE[lang] = Language(tree_sitter_typescript.language_typescript())
|
||||
elif lang == TreeSitterLanguage.TSX:
|
||||
import tree_sitter_typescript
|
||||
|
||||
_LANGUAGE_CACHE[lang] = Language(tree_sitter_typescript.language_tsx())
|
||||
return _LANGUAGE_CACHE[lang]
|
||||
|
||||
|
||||
@dataclass
|
||||
class FunctionNode:
|
||||
"""Represents a function found by tree-sitter analysis."""
|
||||
|
||||
name: str
|
||||
node: Node
|
||||
start_line: int
|
||||
end_line: int
|
||||
start_col: int
|
||||
end_col: int
|
||||
is_async: bool
|
||||
is_method: bool
|
||||
is_arrow: bool
|
||||
is_generator: bool
|
||||
class_name: str | None
|
||||
parent_function: str | None
|
||||
source_text: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ImportInfo:
|
||||
"""Represents an import statement."""
|
||||
|
||||
module_path: str # The path being imported from
|
||||
default_import: str | None # Default import name (import X from ...)
|
||||
named_imports: list[tuple[str, str | None]] # [(name, alias), ...]
|
||||
namespace_import: str | None # Namespace import (import * as X from ...)
|
||||
is_type_only: bool # TypeScript type-only import
|
||||
start_line: int
|
||||
end_line: int
|
||||
|
||||
|
||||
class TreeSitterAnalyzer:
|
||||
"""
|
||||
Cross-language code analysis using tree-sitter.
|
||||
|
||||
This class provides methods to parse and analyze JavaScript/TypeScript code,
|
||||
finding functions, imports, and other code structures.
|
||||
"""
|
||||
|
||||
def __init__(self, language: TreeSitterLanguage | str):
|
||||
"""
|
||||
Initialize the analyzer for a specific language.
|
||||
|
||||
Args:
|
||||
language: The language to analyze (TreeSitterLanguage enum or string).
|
||||
"""
|
||||
if isinstance(language, str):
|
||||
language = TreeSitterLanguage(language)
|
||||
self.language = language
|
||||
self._parser: Parser | None = None
|
||||
|
||||
@property
|
||||
def parser(self) -> Parser:
|
||||
"""Get the parser, creating it lazily."""
|
||||
if self._parser is None:
|
||||
self._parser = Parser(_get_language(self.language))
|
||||
return self._parser
|
||||
|
||||
def parse(self, source: str | bytes) -> Tree:
|
||||
"""
|
||||
Parse source code into a tree-sitter tree.
|
||||
|
||||
Args:
|
||||
source: Source code as string or bytes.
|
||||
|
||||
Returns:
|
||||
The parsed tree.
|
||||
"""
|
||||
if isinstance(source, str):
|
||||
source = source.encode("utf8")
|
||||
return self.parser.parse(source)
|
||||
|
||||
def get_node_text(self, node: Node, source: bytes) -> str:
|
||||
"""
|
||||
Extract the source text for a tree-sitter node.
|
||||
|
||||
Args:
|
||||
node: The tree-sitter node.
|
||||
source: The source code as bytes.
|
||||
|
||||
Returns:
|
||||
The text content of the node.
|
||||
"""
|
||||
return source[node.start_byte : node.end_byte].decode("utf8")
|
||||
|
||||
def find_functions(
|
||||
self,
|
||||
source: str,
|
||||
include_methods: bool = True,
|
||||
include_arrow_functions: bool = True,
|
||||
require_name: bool = True,
|
||||
) -> list[FunctionNode]:
|
||||
"""
|
||||
Find all function definitions in source code.
|
||||
|
||||
Args:
|
||||
source: The source code to analyze.
|
||||
include_methods: Whether to include class methods.
|
||||
include_arrow_functions: Whether to include arrow functions.
|
||||
require_name: Whether to require functions to have names.
|
||||
|
||||
Returns:
|
||||
List of FunctionNode objects describing found functions.
|
||||
"""
|
||||
source_bytes = source.encode("utf8")
|
||||
tree = self.parse(source_bytes)
|
||||
functions: list[FunctionNode] = []
|
||||
|
||||
self._walk_tree_for_functions(
|
||||
tree.root_node,
|
||||
source_bytes,
|
||||
functions,
|
||||
include_methods=include_methods,
|
||||
include_arrow_functions=include_arrow_functions,
|
||||
require_name=require_name,
|
||||
current_class=None,
|
||||
current_function=None,
|
||||
)
|
||||
|
||||
return functions
|
||||
|
||||
def _walk_tree_for_functions(
|
||||
self,
|
||||
node: Node,
|
||||
source_bytes: bytes,
|
||||
functions: list[FunctionNode],
|
||||
include_methods: bool,
|
||||
include_arrow_functions: bool,
|
||||
require_name: bool,
|
||||
current_class: str | None,
|
||||
current_function: str | None,
|
||||
) -> None:
|
||||
"""Recursively walk the tree to find function definitions."""
|
||||
# Function types in JavaScript/TypeScript
|
||||
function_types = {
|
||||
"function_declaration",
|
||||
"function_expression",
|
||||
"generator_function_declaration",
|
||||
"generator_function",
|
||||
}
|
||||
|
||||
if include_arrow_functions:
|
||||
function_types.add("arrow_function")
|
||||
|
||||
if include_methods:
|
||||
function_types.add("method_definition")
|
||||
|
||||
# Track class context
|
||||
new_class = current_class
|
||||
new_function = current_function
|
||||
|
||||
if node.type == "class_declaration" or node.type == "class":
|
||||
# Get class name
|
||||
name_node = node.child_by_field_name("name")
|
||||
if name_node:
|
||||
new_class = self.get_node_text(name_node, source_bytes)
|
||||
|
||||
if node.type in function_types:
|
||||
func_info = self._extract_function_info(
|
||||
node, source_bytes, current_class, current_function
|
||||
)
|
||||
|
||||
if func_info:
|
||||
# Check if we should include this function
|
||||
should_include = True
|
||||
|
||||
if require_name and not func_info.name:
|
||||
should_include = False
|
||||
|
||||
if func_info.is_method and not include_methods:
|
||||
should_include = False
|
||||
|
||||
if func_info.is_arrow and not include_arrow_functions:
|
||||
should_include = False
|
||||
|
||||
if should_include:
|
||||
functions.append(func_info)
|
||||
|
||||
# Track as current function for nested functions
|
||||
if func_info.name:
|
||||
new_function = func_info.name
|
||||
|
||||
# Recurse into children
|
||||
for child in node.children:
|
||||
self._walk_tree_for_functions(
|
||||
child,
|
||||
source_bytes,
|
||||
functions,
|
||||
include_methods=include_methods,
|
||||
include_arrow_functions=include_arrow_functions,
|
||||
require_name=require_name,
|
||||
current_class=new_class,
|
||||
current_function=new_function if node.type in function_types else current_function,
|
||||
)
|
||||
|
||||
def _extract_function_info(
|
||||
self,
|
||||
node: Node,
|
||||
source_bytes: bytes,
|
||||
current_class: str | None,
|
||||
current_function: str | None,
|
||||
) -> FunctionNode | None:
|
||||
"""Extract function information from a tree-sitter node."""
|
||||
name = ""
|
||||
is_async = False
|
||||
is_generator = False
|
||||
is_method = False
|
||||
is_arrow = node.type == "arrow_function"
|
||||
|
||||
# Check for async modifier
|
||||
for child in node.children:
|
||||
if child.type == "async":
|
||||
is_async = True
|
||||
break
|
||||
|
||||
# Check for generator
|
||||
if "generator" in node.type:
|
||||
is_generator = True
|
||||
|
||||
# Get function name based on node type
|
||||
if node.type in ("function_declaration", "generator_function_declaration"):
|
||||
name_node = node.child_by_field_name("name")
|
||||
if name_node:
|
||||
name = self.get_node_text(name_node, source_bytes)
|
||||
elif node.type == "method_definition":
|
||||
is_method = True
|
||||
name_node = node.child_by_field_name("name")
|
||||
if name_node:
|
||||
name = self.get_node_text(name_node, source_bytes)
|
||||
elif node.type in ("function_expression", "generator_function"):
|
||||
# Check if assigned to a variable
|
||||
name_node = node.child_by_field_name("name")
|
||||
if name_node:
|
||||
name = self.get_node_text(name_node, source_bytes)
|
||||
else:
|
||||
# Try to get name from parent assignment
|
||||
name = self._get_name_from_assignment(node, source_bytes)
|
||||
elif node.type == "arrow_function":
|
||||
# Arrow functions get names from variable declarations
|
||||
name = self._get_name_from_assignment(node, source_bytes)
|
||||
|
||||
# Get source text
|
||||
source_text = self.get_node_text(node, source_bytes)
|
||||
|
||||
return FunctionNode(
|
||||
name=name,
|
||||
node=node,
|
||||
start_line=node.start_point[0] + 1, # Convert to 1-indexed
|
||||
end_line=node.end_point[0] + 1,
|
||||
start_col=node.start_point[1],
|
||||
end_col=node.end_point[1],
|
||||
is_async=is_async,
|
||||
is_method=is_method,
|
||||
is_arrow=is_arrow,
|
||||
is_generator=is_generator,
|
||||
class_name=current_class if is_method else None,
|
||||
parent_function=current_function,
|
||||
source_text=source_text,
|
||||
)
|
||||
|
||||
def _get_name_from_assignment(self, node: Node, source_bytes: bytes) -> str:
|
||||
"""
|
||||
Try to extract function name from parent variable declaration or assignment.
|
||||
|
||||
Handles patterns like:
|
||||
- const foo = () => {}
|
||||
- const foo = function() {}
|
||||
- let bar = function() {}
|
||||
- obj.method = () => {}
|
||||
"""
|
||||
parent = node.parent
|
||||
if parent is None:
|
||||
return ""
|
||||
|
||||
# Check for variable declarator: const foo = ...
|
||||
if parent.type == "variable_declarator":
|
||||
name_node = parent.child_by_field_name("name")
|
||||
if name_node:
|
||||
return self.get_node_text(name_node, source_bytes)
|
||||
|
||||
# Check for assignment expression: foo = ...
|
||||
if parent.type == "assignment_expression":
|
||||
left_node = parent.child_by_field_name("left")
|
||||
if left_node:
|
||||
if left_node.type == "identifier":
|
||||
return self.get_node_text(left_node, source_bytes)
|
||||
elif left_node.type == "member_expression":
|
||||
# For obj.method = ..., get the property name
|
||||
prop_node = left_node.child_by_field_name("property")
|
||||
if prop_node:
|
||||
return self.get_node_text(prop_node, source_bytes)
|
||||
|
||||
# Check for property in object: { foo: () => {} }
|
||||
if parent.type == "pair":
|
||||
key_node = parent.child_by_field_name("key")
|
||||
if key_node:
|
||||
return self.get_node_text(key_node, source_bytes)
|
||||
|
||||
return ""
|
||||
|
||||
def find_imports(self, source: str) -> list[ImportInfo]:
|
||||
"""
|
||||
Find all import statements in source code.
|
||||
|
||||
Args:
|
||||
source: The source code to analyze.
|
||||
|
||||
Returns:
|
||||
List of ImportInfo objects describing imports.
|
||||
"""
|
||||
source_bytes = source.encode("utf8")
|
||||
tree = self.parse(source_bytes)
|
||||
imports: list[ImportInfo] = []
|
||||
|
||||
self._walk_tree_for_imports(tree.root_node, source_bytes, imports)
|
||||
|
||||
return imports
|
||||
|
||||
def _walk_tree_for_imports(
|
||||
self,
|
||||
node: Node,
|
||||
source_bytes: bytes,
|
||||
imports: list[ImportInfo],
|
||||
) -> None:
|
||||
"""Recursively walk the tree to find import statements."""
|
||||
if node.type == "import_statement":
|
||||
import_info = self._extract_import_info(node, source_bytes)
|
||||
if import_info:
|
||||
imports.append(import_info)
|
||||
|
||||
# Also handle require() calls for CommonJS
|
||||
if node.type == "call_expression":
|
||||
func_node = node.child_by_field_name("function")
|
||||
if func_node and self.get_node_text(func_node, source_bytes) == "require":
|
||||
import_info = self._extract_require_info(node, source_bytes)
|
||||
if import_info:
|
||||
imports.append(import_info)
|
||||
|
||||
for child in node.children:
|
||||
self._walk_tree_for_imports(child, source_bytes, imports)
|
||||
|
||||
def _extract_import_info(self, node: Node, source_bytes: bytes) -> ImportInfo | None:
|
||||
"""Extract import information from an import statement node."""
|
||||
module_path = ""
|
||||
default_import = None
|
||||
named_imports: list[tuple[str, str | None]] = []
|
||||
namespace_import = None
|
||||
is_type_only = False
|
||||
|
||||
# Get the module path (source)
|
||||
source_node = node.child_by_field_name("source")
|
||||
if source_node:
|
||||
# Remove quotes from string
|
||||
module_path = self.get_node_text(source_node, source_bytes).strip("'\"")
|
||||
|
||||
# Check for type-only import (TypeScript)
|
||||
for child in node.children:
|
||||
if child.type == "type" or self.get_node_text(child, source_bytes) == "type":
|
||||
is_type_only = True
|
||||
break
|
||||
|
||||
# Process import clause
|
||||
for child in node.children:
|
||||
if child.type == "import_clause":
|
||||
self._process_import_clause(
|
||||
child, source_bytes, default_import, named_imports, namespace_import
|
||||
)
|
||||
# Re-extract after processing
|
||||
for clause_child in child.children:
|
||||
if clause_child.type == "identifier":
|
||||
default_import = self.get_node_text(clause_child, source_bytes)
|
||||
elif clause_child.type == "named_imports":
|
||||
for spec in clause_child.children:
|
||||
if spec.type == "import_specifier":
|
||||
name_node = spec.child_by_field_name("name")
|
||||
alias_node = spec.child_by_field_name("alias")
|
||||
if name_node:
|
||||
name = self.get_node_text(name_node, source_bytes)
|
||||
alias = (
|
||||
self.get_node_text(alias_node, source_bytes)
|
||||
if alias_node
|
||||
else None
|
||||
)
|
||||
named_imports.append((name, alias))
|
||||
elif clause_child.type == "namespace_import":
|
||||
# import * as X
|
||||
for ns_child in clause_child.children:
|
||||
if ns_child.type == "identifier":
|
||||
namespace_import = self.get_node_text(ns_child, source_bytes)
|
||||
|
||||
if not module_path:
|
||||
return None
|
||||
|
||||
return ImportInfo(
|
||||
module_path=module_path,
|
||||
default_import=default_import,
|
||||
named_imports=named_imports,
|
||||
namespace_import=namespace_import,
|
||||
is_type_only=is_type_only,
|
||||
start_line=node.start_point[0] + 1,
|
||||
end_line=node.end_point[0] + 1,
|
||||
)
|
||||
|
||||
def _process_import_clause(
|
||||
self,
|
||||
node: Node,
|
||||
source_bytes: bytes,
|
||||
default_import: str | None,
|
||||
named_imports: list[tuple[str, str | None]],
|
||||
namespace_import: str | None,
|
||||
) -> None:
|
||||
"""Process an import clause to extract imports."""
|
||||
# This is a helper that modifies the lists in place
|
||||
pass # Processing is done inline in _extract_import_info
|
||||
|
||||
def _extract_require_info(self, node: Node, source_bytes: bytes) -> ImportInfo | None:
|
||||
"""Extract import information from a require() call."""
|
||||
args_node = node.child_by_field_name("arguments")
|
||||
if not args_node:
|
||||
return None
|
||||
|
||||
# Get the first argument (module path)
|
||||
module_path = ""
|
||||
for child in args_node.children:
|
||||
if child.type == "string":
|
||||
module_path = self.get_node_text(child, source_bytes).strip("'\"")
|
||||
break
|
||||
|
||||
if not module_path:
|
||||
return None
|
||||
|
||||
# Try to get the variable name from assignment
|
||||
default_import = None
|
||||
parent = node.parent
|
||||
if parent and parent.type == "variable_declarator":
|
||||
name_node = parent.child_by_field_name("name")
|
||||
if name_node:
|
||||
if name_node.type == "identifier":
|
||||
default_import = self.get_node_text(name_node, source_bytes)
|
||||
elif name_node.type == "object_pattern":
|
||||
# Destructuring: const { a, b } = require('...')
|
||||
# Handled as named imports
|
||||
pass
|
||||
|
||||
return ImportInfo(
|
||||
module_path=module_path,
|
||||
default_import=default_import,
|
||||
named_imports=[],
|
||||
namespace_import=None,
|
||||
is_type_only=False,
|
||||
start_line=node.start_point[0] + 1,
|
||||
end_line=node.end_point[0] + 1,
|
||||
)
|
||||
|
||||
def find_function_calls(self, source: str, within_function: FunctionNode) -> list[str]:
|
||||
"""
|
||||
Find all function calls within a specific function's body.
|
||||
|
||||
Args:
|
||||
source: The full source code.
|
||||
within_function: The function to search within.
|
||||
|
||||
Returns:
|
||||
List of function names that are called.
|
||||
"""
|
||||
calls: list[str] = []
|
||||
source_bytes = source.encode("utf8")
|
||||
|
||||
# Get the body of the function
|
||||
body_node = within_function.node.child_by_field_name("body")
|
||||
if body_node is None:
|
||||
# For arrow functions, the body might be the last child
|
||||
for child in within_function.node.children:
|
||||
if child.type in ("statement_block", "expression_statement") or (
|
||||
child.type not in ("identifier", "formal_parameters", "async", "=>")
|
||||
):
|
||||
body_node = child
|
||||
break
|
||||
|
||||
if body_node:
|
||||
self._walk_tree_for_calls(body_node, source_bytes, calls)
|
||||
|
||||
return list(set(calls)) # Remove duplicates
|
||||
|
||||
def _walk_tree_for_calls(
|
||||
self,
|
||||
node: Node,
|
||||
source_bytes: bytes,
|
||||
calls: list[str],
|
||||
) -> None:
|
||||
"""Recursively find function calls in a subtree."""
|
||||
if node.type == "call_expression":
|
||||
func_node = node.child_by_field_name("function")
|
||||
if func_node:
|
||||
if func_node.type == "identifier":
|
||||
calls.append(self.get_node_text(func_node, source_bytes))
|
||||
elif func_node.type == "member_expression":
|
||||
# For method calls like obj.method(), get the method name
|
||||
prop_node = func_node.child_by_field_name("property")
|
||||
if prop_node:
|
||||
calls.append(self.get_node_text(prop_node, source_bytes))
|
||||
|
||||
for child in node.children:
|
||||
self._walk_tree_for_calls(child, source_bytes, calls)
|
||||
|
||||
def has_return_statement(self, function_node: FunctionNode, source: str) -> bool:
|
||||
"""
|
||||
Check if a function has a return statement.
|
||||
|
||||
Args:
|
||||
function_node: The function to check.
|
||||
source: The source code.
|
||||
|
||||
Returns:
|
||||
True if the function has a return statement.
|
||||
"""
|
||||
source_bytes = source.encode("utf8")
|
||||
|
||||
# For arrow functions with expression body, there's an implicit return
|
||||
if function_node.is_arrow:
|
||||
body_node = function_node.node.child_by_field_name("body")
|
||||
if body_node and body_node.type != "statement_block":
|
||||
# Expression body (implicit return)
|
||||
return True
|
||||
|
||||
return self._node_has_return(function_node.node)
|
||||
|
||||
def _node_has_return(self, node: Node) -> bool:
|
||||
"""Recursively check if a node contains a return statement."""
|
||||
if node.type == "return_statement":
|
||||
return True
|
||||
|
||||
# Don't recurse into nested function definitions
|
||||
if node.type in (
|
||||
"function_declaration",
|
||||
"function_expression",
|
||||
"arrow_function",
|
||||
"method_definition",
|
||||
):
|
||||
# Only check the current function, not nested ones
|
||||
body_node = node.child_by_field_name("body")
|
||||
if body_node:
|
||||
for child in body_node.children:
|
||||
if self._node_has_return(child):
|
||||
return True
|
||||
return False
|
||||
|
||||
for child in node.children:
|
||||
if self._node_has_return(child):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_analyzer_for_file(file_path: Path) -> TreeSitterAnalyzer:
|
||||
"""
|
||||
Get the appropriate TreeSitterAnalyzer for a file based on its extension.
|
||||
|
||||
Args:
|
||||
file_path: Path to the file.
|
||||
|
||||
Returns:
|
||||
TreeSitterAnalyzer configured for the file's language.
|
||||
"""
|
||||
suffix = file_path.suffix.lower()
|
||||
|
||||
if suffix in (".ts",):
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.TYPESCRIPT)
|
||||
elif suffix in (".tsx",):
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.TSX)
|
||||
else:
|
||||
# Default to JavaScript for .js, .jsx, .mjs, .cjs
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
|
|
@ -23,7 +23,7 @@ from re import Pattern
|
|||
from typing import Annotated, NamedTuple, Optional, cast
|
||||
|
||||
from jedi.api.classes import Name
|
||||
from pydantic import AfterValidator, BaseModel, ConfigDict, Field, PrivateAttr, ValidationError
|
||||
from pydantic import AfterValidator, BaseModel, ConfigDict, Field, PrivateAttr, ValidationError, model_validator
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from codeflash.cli_cmds.console import console, logger
|
||||
|
|
@ -214,8 +214,16 @@ class ProcessedBenchmarkInfo:
|
|||
|
||||
|
||||
class CodeString(BaseModel):
|
||||
code: Annotated[str, AfterValidator(validate_python_code)]
|
||||
code: str
|
||||
file_path: Optional[Path] = None
|
||||
language: str = "python" # Language for validation - only Python code is validated
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_code_syntax(self) -> "CodeString":
|
||||
"""Validate code syntax for Python only."""
|
||||
if self.language == "python":
|
||||
validate_python_code(self.code)
|
||||
return self
|
||||
|
||||
|
||||
def get_code_block_splitter(file_path: Path) -> str:
|
||||
|
|
|
|||
|
|
@ -419,6 +419,14 @@ class Optimizer:
|
|||
|
||||
function_optimizer = None
|
||||
file_to_funcs_to_optimize, num_optimizable_functions, trace_file_path = self.get_optimizable_functions()
|
||||
|
||||
# Set language on TestConfig based on discovered functions
|
||||
if file_to_funcs_to_optimize:
|
||||
for funcs in file_to_funcs_to_optimize.values():
|
||||
if funcs and funcs[0].language:
|
||||
self.test_cfg.set_language(funcs[0].language)
|
||||
break
|
||||
|
||||
if self.args.all:
|
||||
three_min_in_ns = int(1.8e11)
|
||||
console.rule()
|
||||
|
|
|
|||
|
|
@ -22,6 +22,88 @@ BEHAVIORAL_BLOCKLISTED_PLUGINS = ["benchmark", "codspeed", "xdist", "sugar"]
|
|||
BENCHMARKING_BLOCKLISTED_PLUGINS = ["codspeed", "cov", "benchmark", "profiling", "xdist", "sugar"]
|
||||
|
||||
|
||||
def run_jest_behavioral_tests(
|
||||
test_paths: TestFiles,
|
||||
test_env: dict[str, str],
|
||||
cwd: Path,
|
||||
*,
|
||||
timeout: int | None = None,
|
||||
) -> tuple[Path, subprocess.CompletedProcess, None, None]:
|
||||
"""Run Jest tests and return results in a format compatible with pytest output.
|
||||
|
||||
Args:
|
||||
test_paths: TestFiles object containing test file information.
|
||||
test_env: Environment variables for the test run.
|
||||
cwd: Working directory for running tests.
|
||||
timeout: Optional timeout in seconds.
|
||||
|
||||
Returns:
|
||||
Tuple of (result_file_path, subprocess_result, None, None).
|
||||
|
||||
"""
|
||||
result_file_path = get_run_tmp_file(Path("jest_results.xml"))
|
||||
|
||||
# Get test files to run
|
||||
test_files = [str(file.instrumented_behavior_file_path) for file in test_paths.test_files]
|
||||
|
||||
# Build Jest command
|
||||
jest_cmd = [
|
||||
"npx",
|
||||
"jest",
|
||||
"--reporters=default",
|
||||
"--reporters=jest-junit",
|
||||
"--runInBand", # Run tests serially for consistent timing
|
||||
"--forceExit",
|
||||
]
|
||||
|
||||
# Add test pattern if we have specific files
|
||||
if test_files:
|
||||
# Jest uses regex for test path matching
|
||||
test_pattern = "|".join(str(Path(f).name) for f in test_files)
|
||||
jest_cmd.append(f"--testPathPattern={test_pattern}")
|
||||
|
||||
if timeout:
|
||||
jest_cmd.append(f"--testTimeout={timeout * 1000}") # Jest uses milliseconds
|
||||
|
||||
# Set up environment
|
||||
jest_env = test_env.copy()
|
||||
jest_env["JEST_JUNIT_OUTPUT_FILE"] = str(result_file_path)
|
||||
jest_env["JEST_JUNIT_OUTPUT_DIR"] = str(result_file_path.parent)
|
||||
jest_env["JEST_JUNIT_OUTPUT_NAME"] = result_file_path.name
|
||||
|
||||
logger.debug(f"Running Jest tests with command: {' '.join(jest_cmd)}")
|
||||
|
||||
try:
|
||||
run_args = get_cross_platform_subprocess_run_args(
|
||||
cwd=cwd,
|
||||
env=jest_env,
|
||||
timeout=timeout or 600,
|
||||
check=False,
|
||||
text=True,
|
||||
capture_output=True,
|
||||
)
|
||||
result = subprocess.run(jest_cmd, **run_args) # noqa: PLW1510
|
||||
logger.debug(f"Jest result: returncode={result.returncode}")
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.warning(f"Jest tests timed out after {timeout}s")
|
||||
result = subprocess.CompletedProcess(
|
||||
args=jest_cmd,
|
||||
returncode=-1,
|
||||
stdout="",
|
||||
stderr="Test execution timed out",
|
||||
)
|
||||
except FileNotFoundError:
|
||||
logger.error("Jest not found. Make sure Jest is installed (npm install jest)")
|
||||
result = subprocess.CompletedProcess(
|
||||
args=jest_cmd,
|
||||
returncode=-1,
|
||||
stdout="",
|
||||
stderr="Jest not found. Run: npm install jest jest-junit",
|
||||
)
|
||||
|
||||
return result_file_path, result, None, None
|
||||
|
||||
|
||||
def execute_test_subprocess(
|
||||
cmd_list: list[str], cwd: Path, env: dict[str, str] | None, timeout: int = 600
|
||||
) -> subprocess.CompletedProcess:
|
||||
|
|
@ -46,6 +128,8 @@ def run_behavioral_tests(
|
|||
enable_coverage: bool = False,
|
||||
) -> tuple[Path, subprocess.CompletedProcess, Path | None, Path | None]:
|
||||
"""Run behavioral tests with optional coverage."""
|
||||
if test_framework == "jest":
|
||||
return run_jest_behavioral_tests(test_paths, test_env, cwd, timeout=pytest_timeout)
|
||||
if test_framework in {"pytest", "unittest"}:
|
||||
test_files: list[str] = []
|
||||
for file in test_paths.test_files:
|
||||
|
|
|
|||
|
|
@ -75,8 +75,28 @@ class TestConfig:
|
|||
pytest_cmd: str = "pytest"
|
||||
benchmark_tests_root: Optional[Path] = None
|
||||
use_cache: bool = True
|
||||
_language: Optional[str] = None # Language identifier for multi-language support
|
||||
|
||||
@property
|
||||
def test_framework(self) -> str:
|
||||
"""Always returns 'pytest' as we use pytest for all tests."""
|
||||
"""Returns the appropriate test framework based on language.
|
||||
|
||||
Returns 'jest' for JavaScript/TypeScript, 'pytest' for Python (default).
|
||||
"""
|
||||
if self._language in ("javascript", "typescript"):
|
||||
return "jest"
|
||||
return "pytest"
|
||||
|
||||
def set_language(self, language: str) -> None:
|
||||
"""Set the language for this test config.
|
||||
|
||||
Args:
|
||||
language: Language identifier (e.g., "python", "javascript").
|
||||
|
||||
"""
|
||||
self._language = language
|
||||
|
||||
@property
|
||||
def language(self) -> Optional[str]:
|
||||
"""Get the current language setting."""
|
||||
return self._language
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
# These version placeholders will be replaced by uv-dynamic-versioning during build.
|
||||
__version__ = "0.19.1"
|
||||
__version__ = "0.19.1.post96.dev0+4e1b5cf5"
|
||||
|
|
|
|||
|
|
@ -21,6 +21,10 @@ dependencies = [
|
|||
"gitpython>=3.1.31",
|
||||
"libcst>=1.0.1",
|
||||
"jedi>=0.19.1",
|
||||
# Tree-sitter for multi-language support
|
||||
"tree-sitter>=0.23.0",
|
||||
"tree-sitter-javascript>=0.23.0",
|
||||
"tree-sitter-typescript>=0.23.0",
|
||||
"pytest-timeout>=2.1.0",
|
||||
"tomlkit>=0.11.7",
|
||||
"junitparser>=3.1.0",
|
||||
|
|
@ -305,6 +309,7 @@ __version__ = "{version}"
|
|||
# All paths are relative to this pyproject.toml's directory.
|
||||
module-root = "codeflash"
|
||||
tests-root = "codeflash"
|
||||
benchmarks-root = "tests/benchmarks"
|
||||
ignore-paths = []
|
||||
formatter-cmds = ["disabled"]
|
||||
|
||||
|
|
|
|||
283
tests/test_languages/test_function_discovery_integration.py
Normal file
283
tests/test_languages/test_function_discovery_integration.py
Normal file
|
|
@ -0,0 +1,283 @@
|
|||
"""
|
||||
Tests for the integrated multi-language function discovery.
|
||||
|
||||
These tests verify that the function discovery in functions_to_optimize.py
|
||||
correctly routes to language-specific implementations.
|
||||
"""
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from codeflash.discovery.functions_to_optimize import (
|
||||
FunctionToOptimize,
|
||||
find_all_functions_in_file,
|
||||
get_all_files_and_functions,
|
||||
get_files_for_language,
|
||||
)
|
||||
from codeflash.languages.base import Language
|
||||
|
||||
|
||||
class TestGetFilesForLanguage:
|
||||
"""Tests for get_files_for_language helper."""
|
||||
|
||||
def test_get_python_files_only(self, tmp_path):
|
||||
"""Test getting only Python files."""
|
||||
# Create test files
|
||||
(tmp_path / "test.py").write_text("x = 1")
|
||||
(tmp_path / "test.js").write_text("const x = 1;")
|
||||
(tmp_path / "test.txt").write_text("hello")
|
||||
|
||||
files = get_files_for_language(tmp_path, Language.PYTHON)
|
||||
names = {f.name for f in files}
|
||||
|
||||
assert "test.py" in names
|
||||
assert "test.js" not in names
|
||||
assert "test.txt" not in names
|
||||
|
||||
def test_get_javascript_files_only(self, tmp_path):
|
||||
"""Test getting only JavaScript files."""
|
||||
(tmp_path / "test.py").write_text("x = 1")
|
||||
(tmp_path / "test.js").write_text("const x = 1;")
|
||||
(tmp_path / "test.jsx").write_text("const App = () => <div/>;")
|
||||
|
||||
files = get_files_for_language(tmp_path, Language.JAVASCRIPT)
|
||||
names = {f.name for f in files}
|
||||
|
||||
assert "test.py" not in names
|
||||
assert "test.js" in names
|
||||
assert "test.jsx" in names
|
||||
|
||||
def test_get_all_supported_files(self, tmp_path):
|
||||
"""Test getting all supported language files."""
|
||||
(tmp_path / "test.py").write_text("x = 1")
|
||||
(tmp_path / "test.js").write_text("const x = 1;")
|
||||
(tmp_path / "test.txt").write_text("hello")
|
||||
|
||||
files = get_files_for_language(tmp_path, language=None)
|
||||
names = {f.name for f in files}
|
||||
|
||||
assert "test.py" in names
|
||||
assert "test.js" in names
|
||||
assert "test.txt" not in names
|
||||
|
||||
|
||||
class TestFindAllFunctionsInFile:
|
||||
"""Tests for find_all_functions_in_file routing."""
|
||||
|
||||
def test_python_file_routes_to_python_handler(self):
|
||||
"""Test that Python files use the Python handler."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".py", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
def add(a, b):
|
||||
return a + b
|
||||
|
||||
def multiply(a, b):
|
||||
return a * b
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
|
||||
assert len(functions.get(file_path, [])) == 2
|
||||
names = {fn.function_name for fn in functions[file_path]}
|
||||
assert names == {"add", "multiply"}
|
||||
|
||||
# All should have language="python"
|
||||
for fn in functions[file_path]:
|
||||
assert fn.language == "python"
|
||||
|
||||
def test_javascript_file_routes_to_js_handler(self):
|
||||
"""Test that JavaScript files use the JavaScript handler."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
function multiply(a, b) {
|
||||
return a * b;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
|
||||
assert len(functions.get(file_path, [])) == 2
|
||||
names = {fn.function_name for fn in functions[file_path]}
|
||||
assert names == {"add", "multiply"}
|
||||
|
||||
# All should have language="javascript"
|
||||
for fn in functions[file_path]:
|
||||
assert fn.language == "javascript"
|
||||
|
||||
def test_unsupported_file_returns_empty(self):
|
||||
"""Test that unsupported file extensions return empty."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".txt", mode="w", delete=False) as f:
|
||||
f.write("this is not code")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
assert functions == {}
|
||||
|
||||
def test_function_to_optimize_has_correct_fields(self):
|
||||
"""Test that FunctionToOptimize has all required fields populated."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
class Calculator {
|
||||
add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
assert len(functions.get(file_path, [])) == 1
|
||||
|
||||
fn = functions[file_path][0]
|
||||
assert fn.function_name == "add"
|
||||
assert fn.file_path == file_path
|
||||
assert fn.starting_line is not None
|
||||
assert fn.ending_line is not None
|
||||
assert fn.language == "javascript"
|
||||
assert len(fn.parents) == 1
|
||||
assert fn.parents[0].name == "Calculator"
|
||||
|
||||
|
||||
class TestGetAllFilesAndFunctions:
|
||||
"""Tests for get_all_files_and_functions with multi-language support."""
|
||||
|
||||
def test_discovers_python_files_by_default(self, tmp_path):
|
||||
"""Test that Python files are discovered by default."""
|
||||
(tmp_path / "module.py").write_text("""
|
||||
def add(a, b):
|
||||
return a + b
|
||||
""")
|
||||
|
||||
functions = get_all_files_and_functions(tmp_path)
|
||||
assert len(functions) == 1
|
||||
|
||||
def test_discovers_javascript_files_when_specified(self, tmp_path):
|
||||
"""Test that JavaScript files are discovered when language is specified."""
|
||||
(tmp_path / "module.js").write_text("""
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
""")
|
||||
|
||||
functions = get_all_files_and_functions(tmp_path, language=Language.JAVASCRIPT)
|
||||
assert len(functions) == 1
|
||||
|
||||
def test_discovers_both_languages_when_none_specified(self, tmp_path):
|
||||
"""Test that both Python and JavaScript files are discovered when no language specified."""
|
||||
(tmp_path / "py_module.py").write_text("""
|
||||
def py_func():
|
||||
return 1
|
||||
""")
|
||||
(tmp_path / "js_module.js").write_text("""
|
||||
function jsFunc() {
|
||||
return 1;
|
||||
}
|
||||
""")
|
||||
|
||||
functions = get_all_files_and_functions(tmp_path, language=None)
|
||||
|
||||
# Should find both files
|
||||
assert len(functions) == 2
|
||||
|
||||
# Check we have both Python and JavaScript functions
|
||||
all_funcs = []
|
||||
for funcs in functions.values():
|
||||
all_funcs.extend(funcs)
|
||||
|
||||
languages = {fn.language for fn in all_funcs}
|
||||
assert "python" in languages
|
||||
assert "javascript" in languages
|
||||
|
||||
|
||||
class TestBackwardCompatibility:
|
||||
"""Tests to ensure backward compatibility with existing Python code."""
|
||||
|
||||
def test_python_functions_detected_correctly(self):
|
||||
"""Test that Python functions are correctly detected."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".py", mode="w", delete=False) as f:
|
||||
f.write("""def first():
|
||||
return 1
|
||||
|
||||
def second():
|
||||
x = 1
|
||||
return x
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
|
||||
# Should find both functions
|
||||
assert len(functions[file_path]) == 2
|
||||
names = {fn.function_name for fn in functions[file_path]}
|
||||
assert names == {"first", "second"}
|
||||
|
||||
# All should have language="python"
|
||||
for fn in functions[file_path]:
|
||||
assert fn.language == "python"
|
||||
|
||||
def test_python_class_methods_detected(self):
|
||||
"""Test that Python class methods are correctly detected."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".py", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
class MyClass:
|
||||
def method(self):
|
||||
return 1
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
|
||||
assert len(functions[file_path]) == 1
|
||||
fn = functions[file_path][0]
|
||||
assert fn.function_name == "method"
|
||||
assert len(fn.parents) == 1
|
||||
assert fn.parents[0].name == "MyClass"
|
||||
|
||||
def test_python_async_functions_detected(self):
|
||||
"""Test that Python async functions are correctly detected."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".py", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
async def async_func():
|
||||
return 1
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
|
||||
assert len(functions[file_path]) == 1
|
||||
fn = functions[file_path][0]
|
||||
assert fn.function_name == "async_func"
|
||||
assert fn.is_async is True
|
||||
|
||||
def test_functions_without_return_excluded(self):
|
||||
"""Test that functions without return statements are excluded."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".py", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
def with_return():
|
||||
return 1
|
||||
|
||||
def without_return():
|
||||
print("hello")
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
|
||||
assert len(functions[file_path]) == 1
|
||||
assert functions[file_path][0].function_name == "with_return"
|
||||
270
tests/test_languages/test_javascript_e2e.py
Normal file
270
tests/test_languages/test_javascript_e2e.py
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
"""
|
||||
End-to-end integration tests for JavaScript pipeline.
|
||||
|
||||
Tests the full optimization pipeline for JavaScript:
|
||||
- Function discovery
|
||||
- Code context extraction
|
||||
- Test discovery
|
||||
- Code replacement
|
||||
"""
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from codeflash.discovery.functions_to_optimize import (
|
||||
FunctionToOptimize,
|
||||
find_all_functions_in_file,
|
||||
get_files_for_language,
|
||||
)
|
||||
from codeflash.languages.base import Language
|
||||
|
||||
|
||||
class TestJavaScriptFunctionDiscovery:
|
||||
"""Tests for JavaScript function discovery in the main pipeline."""
|
||||
|
||||
@pytest.fixture
|
||||
def js_project_dir(self):
|
||||
"""Get the JavaScript sample project directory."""
|
||||
project_root = Path(__file__).parent.parent.parent
|
||||
js_dir = project_root / "code_to_optimize_js"
|
||||
if not js_dir.exists():
|
||||
pytest.skip("code_to_optimize_js directory not found")
|
||||
return js_dir
|
||||
|
||||
def test_discover_functions_in_fibonacci(self, js_project_dir):
|
||||
"""Test discovering functions in fibonacci.js."""
|
||||
fib_file = js_project_dir / "fibonacci.js"
|
||||
if not fib_file.exists():
|
||||
pytest.skip("fibonacci.js not found")
|
||||
|
||||
functions = find_all_functions_in_file(fib_file)
|
||||
|
||||
assert fib_file in functions
|
||||
func_list = functions[fib_file]
|
||||
|
||||
# Should find the main exported functions
|
||||
func_names = {f.function_name for f in func_list}
|
||||
assert "fibonacci" in func_names
|
||||
assert "isFibonacci" in func_names
|
||||
assert "isPerfectSquare" in func_names
|
||||
assert "fibonacciSequence" in func_names
|
||||
|
||||
# All should be JavaScript functions
|
||||
for func in func_list:
|
||||
assert func.language == "javascript"
|
||||
|
||||
def test_discover_functions_in_bubble_sort(self, js_project_dir):
|
||||
"""Test discovering functions in bubble_sort.js."""
|
||||
sort_file = js_project_dir / "bubble_sort.js"
|
||||
if not sort_file.exists():
|
||||
pytest.skip("bubble_sort.js not found")
|
||||
|
||||
functions = find_all_functions_in_file(sort_file)
|
||||
|
||||
assert sort_file in functions
|
||||
func_list = functions[sort_file]
|
||||
|
||||
func_names = {f.function_name for f in func_list}
|
||||
assert "bubbleSort" in func_names
|
||||
|
||||
def test_get_javascript_files(self, js_project_dir):
|
||||
"""Test getting JavaScript files from directory."""
|
||||
files = get_files_for_language(js_project_dir, Language.JAVASCRIPT)
|
||||
|
||||
# Should find .js files
|
||||
js_files = [f for f in files if f.suffix == ".js"]
|
||||
assert len(js_files) >= 3 # fibonacci.js, bubble_sort.js, string_utils.js
|
||||
|
||||
# Should not include test files in root (they're in tests/)
|
||||
root_files = [f for f in js_files if f.parent == js_project_dir]
|
||||
assert len(root_files) >= 3
|
||||
|
||||
|
||||
class TestJavaScriptCodeContext:
|
||||
"""Tests for JavaScript code context extraction."""
|
||||
|
||||
@pytest.fixture
|
||||
def js_project_dir(self):
|
||||
"""Get the JavaScript sample project directory."""
|
||||
project_root = Path(__file__).parent.parent.parent
|
||||
js_dir = project_root / "code_to_optimize_js"
|
||||
if not js_dir.exists():
|
||||
pytest.skip("code_to_optimize_js directory not found")
|
||||
return js_dir
|
||||
|
||||
def test_extract_code_context_for_javascript(self, js_project_dir):
|
||||
"""Test extracting code context for a JavaScript function."""
|
||||
from codeflash.context.code_context_extractor import get_code_optimization_context
|
||||
|
||||
fib_file = js_project_dir / "fibonacci.js"
|
||||
if not fib_file.exists():
|
||||
pytest.skip("fibonacci.js not found")
|
||||
|
||||
functions = find_all_functions_in_file(fib_file)
|
||||
func_list = functions[fib_file]
|
||||
|
||||
# Find the fibonacci function
|
||||
fib_func = next((f for f in func_list if f.function_name == "fibonacci"), None)
|
||||
assert fib_func is not None
|
||||
|
||||
# Extract code context
|
||||
context = get_code_optimization_context(fib_func, js_project_dir)
|
||||
|
||||
# Verify context structure
|
||||
assert context.read_writable_code is not None
|
||||
assert context.read_writable_code.language == "javascript"
|
||||
assert len(context.read_writable_code.code_strings) > 0
|
||||
|
||||
# The code should contain the function
|
||||
code = context.read_writable_code.code_strings[0].code
|
||||
assert "fibonacci" in code
|
||||
|
||||
|
||||
class TestJavaScriptCodeReplacement:
|
||||
"""Tests for JavaScript code replacement."""
|
||||
|
||||
def test_replace_function_in_javascript_file(self):
|
||||
"""Test replacing a function in a JavaScript file."""
|
||||
from codeflash.languages import get_language_support
|
||||
from codeflash.languages.base import FunctionInfo, Language
|
||||
|
||||
original_source = """
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
function multiply(a, b) {
|
||||
return a * b;
|
||||
}
|
||||
"""
|
||||
|
||||
new_function = """function add(a, b) {
|
||||
// Optimized version
|
||||
return a + b;
|
||||
}"""
|
||||
|
||||
js_support = get_language_support(Language.JAVASCRIPT)
|
||||
|
||||
# Create FunctionInfo for the add function
|
||||
func_info = FunctionInfo(
|
||||
name="add",
|
||||
file_path=Path("/tmp/test.js"),
|
||||
start_line=2,
|
||||
end_line=4,
|
||||
language=Language.JAVASCRIPT,
|
||||
)
|
||||
|
||||
result = js_support.replace_function(original_source, func_info, new_function)
|
||||
|
||||
# Verify the function was replaced
|
||||
assert "// Optimized version" in result
|
||||
assert "multiply" in result # Other function should still be there
|
||||
|
||||
|
||||
class TestJavaScriptTestDiscovery:
|
||||
"""Tests for JavaScript test discovery."""
|
||||
|
||||
@pytest.fixture
|
||||
def js_project_dir(self):
|
||||
"""Get the JavaScript sample project directory."""
|
||||
project_root = Path(__file__).parent.parent.parent
|
||||
js_dir = project_root / "code_to_optimize_js"
|
||||
if not js_dir.exists():
|
||||
pytest.skip("code_to_optimize_js directory not found")
|
||||
return js_dir
|
||||
|
||||
def test_discover_jest_tests(self, js_project_dir):
|
||||
"""Test discovering Jest tests for JavaScript functions."""
|
||||
from codeflash.languages import get_language_support
|
||||
from codeflash.languages.base import FunctionInfo, Language
|
||||
|
||||
js_support = get_language_support(Language.JAVASCRIPT)
|
||||
test_root = js_project_dir / "tests"
|
||||
|
||||
if not test_root.exists():
|
||||
pytest.skip("tests directory not found")
|
||||
|
||||
# Create FunctionInfo for fibonacci function
|
||||
fib_file = js_project_dir / "fibonacci.js"
|
||||
func_info = FunctionInfo(
|
||||
name="fibonacci",
|
||||
file_path=fib_file,
|
||||
start_line=11,
|
||||
end_line=16,
|
||||
language=Language.JAVASCRIPT,
|
||||
)
|
||||
|
||||
# Discover tests
|
||||
tests = js_support.discover_tests(test_root, [func_info])
|
||||
|
||||
# Should find tests for fibonacci
|
||||
assert func_info.qualified_name in tests or "fibonacci" in str(tests)
|
||||
|
||||
|
||||
class TestJavaScriptPipelineIntegration:
|
||||
"""Integration tests for the full JavaScript pipeline."""
|
||||
|
||||
def test_function_to_optimize_has_correct_fields(self):
|
||||
"""Test that FunctionToOptimize from JavaScript has all required fields."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
class Calculator {
|
||||
add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
subtract(a, b) {
|
||||
return a - b;
|
||||
}
|
||||
}
|
||||
|
||||
function standalone(x) {
|
||||
return x * 2;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = find_all_functions_in_file(file_path)
|
||||
|
||||
# Should find class methods and standalone function
|
||||
assert len(functions.get(file_path, [])) >= 3
|
||||
|
||||
# Check standalone function
|
||||
standalone_fn = next(
|
||||
(fn for fn in functions[file_path] if fn.function_name == "standalone"),
|
||||
None,
|
||||
)
|
||||
assert standalone_fn is not None
|
||||
assert standalone_fn.language == "javascript"
|
||||
assert len(standalone_fn.parents) == 0
|
||||
|
||||
# Check class method
|
||||
add_fn = next(
|
||||
(fn for fn in functions[file_path] if fn.function_name == "add"),
|
||||
None,
|
||||
)
|
||||
assert add_fn is not None
|
||||
assert add_fn.language == "javascript"
|
||||
assert len(add_fn.parents) == 1
|
||||
assert add_fn.parents[0].name == "Calculator"
|
||||
|
||||
def test_code_strings_markdown_uses_javascript_tag(self):
|
||||
"""Test that CodeStringsMarkdown uses javascript for code blocks."""
|
||||
from codeflash.models.models import CodeString, CodeStringsMarkdown
|
||||
|
||||
code_strings = CodeStringsMarkdown(
|
||||
code_strings=[
|
||||
CodeString(
|
||||
code="function add(a, b) { return a + b; }",
|
||||
file_path=Path("test.js"),
|
||||
language="javascript",
|
||||
)
|
||||
],
|
||||
language="javascript",
|
||||
)
|
||||
|
||||
markdown = code_strings.markdown
|
||||
assert "```javascript" in markdown or "```js" in markdown.lower()
|
||||
697
tests/test_languages/test_javascript_support.py
Normal file
697
tests/test_languages/test_javascript_support.py
Normal file
|
|
@ -0,0 +1,697 @@
|
|||
"""
|
||||
Extensive tests for the JavaScript language support implementation.
|
||||
|
||||
These tests verify that JavaScriptSupport correctly discovers functions,
|
||||
replaces code, and integrates with the codeflash language abstraction.
|
||||
"""
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from codeflash.languages.base import (
|
||||
FunctionFilterCriteria,
|
||||
FunctionInfo,
|
||||
Language,
|
||||
ParentInfo,
|
||||
)
|
||||
from codeflash.languages.javascript.support import JavaScriptSupport
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def js_support():
|
||||
"""Create a JavaScriptSupport instance."""
|
||||
return JavaScriptSupport()
|
||||
|
||||
|
||||
class TestJavaScriptSupportProperties:
|
||||
"""Tests for JavaScriptSupport properties."""
|
||||
|
||||
def test_language(self, js_support):
|
||||
"""Test language property."""
|
||||
assert js_support.language == Language.JAVASCRIPT
|
||||
|
||||
def test_file_extensions(self, js_support):
|
||||
"""Test file_extensions property."""
|
||||
extensions = js_support.file_extensions
|
||||
assert ".js" in extensions
|
||||
assert ".jsx" in extensions
|
||||
assert ".mjs" in extensions
|
||||
assert ".cjs" in extensions
|
||||
|
||||
def test_test_framework(self, js_support):
|
||||
"""Test test_framework property."""
|
||||
assert js_support.test_framework == "jest"
|
||||
|
||||
|
||||
class TestDiscoverFunctions:
|
||||
"""Tests for discover_functions method."""
|
||||
|
||||
def test_discover_simple_function(self, js_support):
|
||||
"""Test discovering a simple function declaration."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "add"
|
||||
assert functions[0].language == Language.JAVASCRIPT
|
||||
|
||||
def test_discover_multiple_functions(self, js_support):
|
||||
"""Test discovering multiple functions."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
function subtract(a, b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
function multiply(a, b) {
|
||||
return a * b;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
assert len(functions) == 3
|
||||
names = {func.name for func in functions}
|
||||
assert names == {"add", "subtract", "multiply"}
|
||||
|
||||
def test_discover_arrow_function(self, js_support):
|
||||
"""Test discovering arrow functions assigned to variables."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
const add = (a, b) => {
|
||||
return a + b;
|
||||
};
|
||||
|
||||
const multiply = (x, y) => x * y;
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
assert len(functions) == 2
|
||||
names = {func.name for func in functions}
|
||||
assert names == {"add", "multiply"}
|
||||
|
||||
def test_discover_function_without_return_excluded(self, js_support):
|
||||
"""Test that functions without return are excluded by default."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
function withReturn() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
function withoutReturn() {
|
||||
console.log("hello");
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
# Only the function with return should be discovered
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "withReturn"
|
||||
|
||||
def test_discover_class_methods(self, js_support):
|
||||
"""Test discovering class methods."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
class Calculator {
|
||||
add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
multiply(a, b) {
|
||||
return a * b;
|
||||
}
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
assert len(functions) == 2
|
||||
for func in functions:
|
||||
assert func.is_method is True
|
||||
assert func.class_name == "Calculator"
|
||||
|
||||
def test_discover_async_functions(self, js_support):
|
||||
"""Test discovering async functions."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
async function fetchData(url) {
|
||||
return await fetch(url);
|
||||
}
|
||||
|
||||
function syncFunction() {
|
||||
return 1;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
assert len(functions) == 2
|
||||
|
||||
async_func = next(f for f in functions if f.name == "fetchData")
|
||||
sync_func = next(f for f in functions if f.name == "syncFunction")
|
||||
|
||||
assert async_func.is_async is True
|
||||
assert sync_func.is_async is False
|
||||
|
||||
def test_discover_with_filter_exclude_async(self, js_support):
|
||||
"""Test filtering out async functions."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
async function asyncFunc() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
function syncFunc() {
|
||||
return 2;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
criteria = FunctionFilterCriteria(include_async=False)
|
||||
functions = js_support.discover_functions(Path(f.name), criteria)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "syncFunc"
|
||||
|
||||
def test_discover_with_filter_exclude_methods(self, js_support):
|
||||
"""Test filtering out class methods."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
function standalone() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
class MyClass {
|
||||
method() {
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
criteria = FunctionFilterCriteria(include_methods=False)
|
||||
functions = js_support.discover_functions(Path(f.name), criteria)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "standalone"
|
||||
|
||||
def test_discover_line_numbers(self, js_support):
|
||||
"""Test that line numbers are correctly captured."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""function func1() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
function func2() {
|
||||
const x = 1;
|
||||
const y = 2;
|
||||
return x + y;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
func1 = next(f for f in functions if f.name == "func1")
|
||||
func2 = next(f for f in functions if f.name == "func2")
|
||||
|
||||
assert func1.start_line == 1
|
||||
assert func1.end_line == 3
|
||||
assert func2.start_line == 5
|
||||
assert func2.end_line == 9
|
||||
|
||||
def test_discover_generator_function(self, js_support):
|
||||
"""Test discovering generator functions."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
function* numberGenerator() {
|
||||
yield 1;
|
||||
yield 2;
|
||||
return 3;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "numberGenerator"
|
||||
|
||||
def test_discover_invalid_file_returns_empty(self, js_support):
|
||||
"""Test that invalid JavaScript file returns empty list."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("this is not valid javascript {{{{")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
# Tree-sitter is lenient, so it may still parse partial code
|
||||
# The important thing is it doesn't crash
|
||||
assert isinstance(functions, list)
|
||||
|
||||
def test_discover_nonexistent_file_returns_empty(self, js_support):
|
||||
"""Test that nonexistent file returns empty list."""
|
||||
functions = js_support.discover_functions(Path("/nonexistent/file.js"))
|
||||
assert functions == []
|
||||
|
||||
def test_discover_function_expression(self, js_support):
|
||||
"""Test discovering function expressions."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
const add = function(a, b) {
|
||||
return a + b;
|
||||
};
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "add"
|
||||
|
||||
def test_discover_immediately_invoked_function_excluded(self, js_support):
|
||||
"""Test that IIFEs without names are excluded when require_name is True."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
(function() {
|
||||
return 1;
|
||||
})();
|
||||
|
||||
function named() {
|
||||
return 2;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
|
||||
functions = js_support.discover_functions(Path(f.name))
|
||||
|
||||
# Only the named function should be discovered
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "named"
|
||||
|
||||
|
||||
class TestReplaceFunction:
|
||||
"""Tests for replace_function method."""
|
||||
|
||||
def test_replace_simple_function(self, js_support):
|
||||
"""Test replacing a simple function."""
|
||||
source = """function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
function multiply(a, b) {
|
||||
return a * b;
|
||||
}
|
||||
"""
|
||||
func = FunctionInfo(
|
||||
name="add",
|
||||
file_path=Path("/test.js"),
|
||||
start_line=1,
|
||||
end_line=3,
|
||||
)
|
||||
new_code = """function add(a, b) {
|
||||
// Optimized
|
||||
return (a + b) | 0;
|
||||
}
|
||||
"""
|
||||
result = js_support.replace_function(source, func, new_code)
|
||||
|
||||
assert "// Optimized" in result
|
||||
assert "return (a + b) | 0" in result
|
||||
assert "function multiply" in result
|
||||
|
||||
def test_replace_preserves_surrounding_code(self, js_support):
|
||||
"""Test that replacement preserves code before and after."""
|
||||
source = """// Header comment
|
||||
import { something } from './module';
|
||||
|
||||
function target() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
function other() {
|
||||
return 2;
|
||||
}
|
||||
|
||||
// Footer
|
||||
"""
|
||||
func = FunctionInfo(
|
||||
name="target",
|
||||
file_path=Path("/test.js"),
|
||||
start_line=4,
|
||||
end_line=6,
|
||||
)
|
||||
new_code = """function target() {
|
||||
return 42;
|
||||
}
|
||||
"""
|
||||
result = js_support.replace_function(source, func, new_code)
|
||||
|
||||
assert "// Header comment" in result
|
||||
assert "import { something }" in result
|
||||
assert "return 42" in result
|
||||
assert "function other" in result
|
||||
assert "// Footer" in result
|
||||
|
||||
def test_replace_with_indentation_adjustment(self, js_support):
|
||||
"""Test that indentation is adjusted correctly."""
|
||||
source = """class Calculator {
|
||||
add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
}
|
||||
"""
|
||||
func = FunctionInfo(
|
||||
name="add",
|
||||
file_path=Path("/test.js"),
|
||||
start_line=2,
|
||||
end_line=4,
|
||||
parents=(ParentInfo(name="Calculator", type="ClassDef"),),
|
||||
)
|
||||
# New code has no indentation
|
||||
new_code = """add(a, b) {
|
||||
return (a + b) | 0;
|
||||
}
|
||||
"""
|
||||
result = js_support.replace_function(source, func, new_code)
|
||||
|
||||
# Check that indentation was added
|
||||
lines = result.splitlines()
|
||||
method_line = next(l for l in lines if "add(a, b)" in l)
|
||||
assert method_line.startswith(" ") # 4 spaces
|
||||
|
||||
def test_replace_arrow_function(self, js_support):
|
||||
"""Test replacing an arrow function."""
|
||||
source = """const add = (a, b) => {
|
||||
return a + b;
|
||||
};
|
||||
|
||||
const multiply = (x, y) => x * y;
|
||||
"""
|
||||
func = FunctionInfo(
|
||||
name="add",
|
||||
file_path=Path("/test.js"),
|
||||
start_line=1,
|
||||
end_line=3,
|
||||
)
|
||||
new_code = """const add = (a, b) => {
|
||||
return (a + b) | 0;
|
||||
};
|
||||
"""
|
||||
result = js_support.replace_function(source, func, new_code)
|
||||
|
||||
assert "(a + b) | 0" in result
|
||||
assert "multiply" in result
|
||||
|
||||
|
||||
class TestValidateSyntax:
|
||||
"""Tests for validate_syntax method."""
|
||||
|
||||
def test_valid_syntax(self, js_support):
|
||||
"""Test that valid JavaScript syntax passes."""
|
||||
valid_code = """
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
class Calculator {
|
||||
multiply(x, y) {
|
||||
return x * y;
|
||||
}
|
||||
}
|
||||
"""
|
||||
assert js_support.validate_syntax(valid_code) is True
|
||||
|
||||
def test_invalid_syntax(self, js_support):
|
||||
"""Test that invalid JavaScript syntax fails."""
|
||||
invalid_code = """
|
||||
function add(a, b {
|
||||
return a + b;
|
||||
}
|
||||
"""
|
||||
assert js_support.validate_syntax(invalid_code) is False
|
||||
|
||||
def test_empty_string_valid(self, js_support):
|
||||
"""Test that empty string is valid syntax."""
|
||||
assert js_support.validate_syntax("") is True
|
||||
|
||||
def test_syntax_error_types(self, js_support):
|
||||
"""Test various syntax error types."""
|
||||
# Unclosed bracket
|
||||
assert js_support.validate_syntax("const x = [1, 2, 3") is False
|
||||
|
||||
# Missing closing brace
|
||||
assert js_support.validate_syntax("function foo() {") is False
|
||||
|
||||
|
||||
class TestNormalizeCode:
|
||||
"""Tests for normalize_code method."""
|
||||
|
||||
def test_removes_comments(self, js_support):
|
||||
"""Test that single-line comments are removed."""
|
||||
code = """
|
||||
function add(a, b) {
|
||||
// Add two numbers
|
||||
return a + b;
|
||||
}
|
||||
"""
|
||||
normalized = js_support.normalize_code(code)
|
||||
assert "// Add two numbers" not in normalized
|
||||
assert "return a + b" in normalized
|
||||
|
||||
def test_preserves_functionality(self, js_support):
|
||||
"""Test that code functionality is preserved."""
|
||||
code = """
|
||||
function add(a, b) {
|
||||
// Comment
|
||||
return a + b;
|
||||
}
|
||||
"""
|
||||
normalized = js_support.normalize_code(code)
|
||||
assert "function add" in normalized
|
||||
assert "return" in normalized
|
||||
|
||||
|
||||
class TestExtractCodeContext:
|
||||
"""Tests for extract_code_context method."""
|
||||
|
||||
def test_extract_simple_function(self, js_support):
|
||||
"""Test extracting context for a simple function."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
func = FunctionInfo(
|
||||
name="add",
|
||||
file_path=file_path,
|
||||
start_line=1,
|
||||
end_line=3,
|
||||
)
|
||||
|
||||
context = js_support.extract_code_context(
|
||||
func,
|
||||
file_path.parent,
|
||||
file_path.parent,
|
||||
)
|
||||
|
||||
assert "function add" in context.target_code
|
||||
assert "return a + b" in context.target_code
|
||||
assert context.target_file == file_path
|
||||
assert context.language == Language.JAVASCRIPT
|
||||
|
||||
def test_extract_with_helper(self, js_support):
|
||||
"""Test extracting context with helper functions."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""function helper(x) {
|
||||
return x * 2;
|
||||
}
|
||||
|
||||
function main(a) {
|
||||
return helper(a) + 1;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
# First discover functions to get accurate line numbers
|
||||
functions = js_support.discover_functions(file_path)
|
||||
main_func = next(f for f in functions if f.name == "main")
|
||||
|
||||
context = js_support.extract_code_context(
|
||||
main_func,
|
||||
file_path.parent,
|
||||
file_path.parent,
|
||||
)
|
||||
|
||||
assert "function main" in context.target_code
|
||||
# Helper should be found
|
||||
assert len(context.helper_functions) >= 0 # May or may not find helper
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests for JavaScriptSupport."""
|
||||
|
||||
def test_discover_and_replace_workflow(self, js_support):
|
||||
"""Test full discover -> replace workflow."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
original_code = """function fibonacci(n) {
|
||||
if (n <= 1) {
|
||||
return n;
|
||||
}
|
||||
return fibonacci(n - 1) + fibonacci(n - 2);
|
||||
}
|
||||
"""
|
||||
f.write(original_code)
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
# Discover
|
||||
functions = js_support.discover_functions(file_path)
|
||||
assert len(functions) == 1
|
||||
func = functions[0]
|
||||
assert func.name == "fibonacci"
|
||||
|
||||
# Replace
|
||||
optimized_code = """function fibonacci(n) {
|
||||
// Memoized version
|
||||
const memo = {0: 0, 1: 1};
|
||||
for (let i = 2; i <= n; i++) {
|
||||
memo[i] = memo[i-1] + memo[i-2];
|
||||
}
|
||||
return memo[n];
|
||||
}
|
||||
"""
|
||||
result = js_support.replace_function(original_code, func, optimized_code)
|
||||
|
||||
# Validate
|
||||
assert js_support.validate_syntax(result) is True
|
||||
assert "Memoized version" in result
|
||||
assert "memo[n]" in result
|
||||
|
||||
def test_multiple_classes_and_functions(self, js_support):
|
||||
"""Test discovering and working with complex file."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
class Calculator {
|
||||
add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
subtract(a, b) {
|
||||
return a - b;
|
||||
}
|
||||
}
|
||||
|
||||
class StringUtils {
|
||||
reverse(s) {
|
||||
return s.split('').reverse().join('');
|
||||
}
|
||||
}
|
||||
|
||||
function standalone() {
|
||||
return 42;
|
||||
}
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = js_support.discover_functions(file_path)
|
||||
|
||||
# Should find 4 functions
|
||||
assert len(functions) == 4
|
||||
|
||||
# Check class methods
|
||||
calc_methods = [f for f in functions if f.class_name == "Calculator"]
|
||||
assert len(calc_methods) == 2
|
||||
|
||||
string_methods = [f for f in functions if f.class_name == "StringUtils"]
|
||||
assert len(string_methods) == 1
|
||||
|
||||
standalone_funcs = [f for f in functions if f.class_name is None]
|
||||
assert len(standalone_funcs) == 1
|
||||
|
||||
def test_jsx_file(self, js_support):
|
||||
"""Test discovering functions in JSX files."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".jsx", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
import React from 'react';
|
||||
|
||||
function Button({ onClick, children }) {
|
||||
return <button onClick={onClick}>{children}</button>;
|
||||
}
|
||||
|
||||
const Card = ({ title, content }) => {
|
||||
return (
|
||||
<div className="card">
|
||||
<h2>{title}</h2>
|
||||
<p>{content}</p>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default Button;
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
functions = js_support.discover_functions(file_path)
|
||||
|
||||
# Should find both components
|
||||
names = {f.name for f in functions}
|
||||
assert "Button" in names
|
||||
assert "Card" in names
|
||||
|
||||
|
||||
class TestJestTestDiscovery:
|
||||
"""Tests for Jest test discovery."""
|
||||
|
||||
def test_find_jest_tests(self, js_support):
|
||||
"""Test finding Jest test functions."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".test.js", mode="w", delete=False) as f:
|
||||
f.write("""
|
||||
import { add } from './math';
|
||||
|
||||
describe('Math functions', () => {
|
||||
test('add returns sum', () => {
|
||||
expect(add(1, 2)).toBe(3);
|
||||
});
|
||||
|
||||
it('handles negative numbers', () => {
|
||||
expect(add(-1, 1)).toBe(0);
|
||||
});
|
||||
});
|
||||
""")
|
||||
f.flush()
|
||||
file_path = Path(f.name)
|
||||
|
||||
source = file_path.read_text()
|
||||
from codeflash.languages.treesitter_utils import get_analyzer_for_file
|
||||
analyzer = get_analyzer_for_file(file_path)
|
||||
test_names = js_support._find_jest_tests(source, analyzer)
|
||||
|
||||
assert "Math functions" in test_names
|
||||
assert "add returns sum" in test_names
|
||||
assert "handles negative numbers" in test_names
|
||||
1154
tests/test_languages/test_language_parity.py
Normal file
1154
tests/test_languages/test_language_parity.py
Normal file
File diff suppressed because it is too large
Load diff
527
tests/test_languages/test_treesitter_utils.py
Normal file
527
tests/test_languages/test_treesitter_utils.py
Normal file
|
|
@ -0,0 +1,527 @@
|
|||
"""
|
||||
Extensive tests for the tree-sitter utilities module.
|
||||
|
||||
These tests verify that the TreeSitterAnalyzer correctly parses and
|
||||
analyzes JavaScript/TypeScript code.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from codeflash.languages.treesitter_utils import (
|
||||
FunctionNode,
|
||||
ImportInfo,
|
||||
TreeSitterAnalyzer,
|
||||
TreeSitterLanguage,
|
||||
get_analyzer_for_file,
|
||||
)
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class TestTreeSitterLanguage:
|
||||
"""Tests for TreeSitterLanguage enum."""
|
||||
|
||||
def test_language_values(self):
|
||||
"""Test that language enum has expected values."""
|
||||
assert TreeSitterLanguage.JAVASCRIPT.value == "javascript"
|
||||
assert TreeSitterLanguage.TYPESCRIPT.value == "typescript"
|
||||
assert TreeSitterLanguage.TSX.value == "tsx"
|
||||
|
||||
|
||||
class TestTreeSitterAnalyzerCreation:
|
||||
"""Tests for TreeSitterAnalyzer initialization."""
|
||||
|
||||
def test_create_javascript_analyzer(self):
|
||||
"""Test creating JavaScript analyzer."""
|
||||
analyzer = TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
assert analyzer.language == TreeSitterLanguage.JAVASCRIPT
|
||||
|
||||
def test_create_typescript_analyzer(self):
|
||||
"""Test creating TypeScript analyzer."""
|
||||
analyzer = TreeSitterAnalyzer(TreeSitterLanguage.TYPESCRIPT)
|
||||
assert analyzer.language == TreeSitterLanguage.TYPESCRIPT
|
||||
|
||||
def test_create_with_string(self):
|
||||
"""Test creating analyzer with string language name."""
|
||||
analyzer = TreeSitterAnalyzer("javascript")
|
||||
assert analyzer.language == TreeSitterLanguage.JAVASCRIPT
|
||||
|
||||
def test_lazy_parser_creation(self):
|
||||
"""Test that parser is created lazily."""
|
||||
analyzer = TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
assert analyzer._parser is None
|
||||
# Access parser property
|
||||
_ = analyzer.parser
|
||||
assert analyzer._parser is not None
|
||||
|
||||
|
||||
class TestGetAnalyzerForFile:
|
||||
"""Tests for get_analyzer_for_file function."""
|
||||
|
||||
def test_js_file(self):
|
||||
"""Test getting analyzer for .js file."""
|
||||
analyzer = get_analyzer_for_file(Path("/test/file.js"))
|
||||
assert analyzer.language == TreeSitterLanguage.JAVASCRIPT
|
||||
|
||||
def test_jsx_file(self):
|
||||
"""Test getting analyzer for .jsx file."""
|
||||
analyzer = get_analyzer_for_file(Path("/test/file.jsx"))
|
||||
assert analyzer.language == TreeSitterLanguage.JAVASCRIPT
|
||||
|
||||
def test_ts_file(self):
|
||||
"""Test getting analyzer for .ts file."""
|
||||
analyzer = get_analyzer_for_file(Path("/test/file.ts"))
|
||||
assert analyzer.language == TreeSitterLanguage.TYPESCRIPT
|
||||
|
||||
def test_tsx_file(self):
|
||||
"""Test getting analyzer for .tsx file."""
|
||||
analyzer = get_analyzer_for_file(Path("/test/file.tsx"))
|
||||
assert analyzer.language == TreeSitterLanguage.TSX
|
||||
|
||||
def test_mjs_file(self):
|
||||
"""Test getting analyzer for .mjs file."""
|
||||
analyzer = get_analyzer_for_file(Path("/test/file.mjs"))
|
||||
assert analyzer.language == TreeSitterLanguage.JAVASCRIPT
|
||||
|
||||
def test_cjs_file(self):
|
||||
"""Test getting analyzer for .cjs file."""
|
||||
analyzer = get_analyzer_for_file(Path("/test/file.cjs"))
|
||||
assert analyzer.language == TreeSitterLanguage.JAVASCRIPT
|
||||
|
||||
|
||||
class TestParsing:
|
||||
"""Tests for parsing functionality."""
|
||||
|
||||
@pytest.fixture
|
||||
def js_analyzer(self):
|
||||
"""Create a JavaScript analyzer."""
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
|
||||
def test_parse_simple_code(self, js_analyzer):
|
||||
"""Test parsing simple JavaScript code."""
|
||||
code = "const x = 1;"
|
||||
tree = js_analyzer.parse(code)
|
||||
assert tree.root_node is not None
|
||||
assert not tree.root_node.has_error
|
||||
|
||||
def test_parse_bytes(self, js_analyzer):
|
||||
"""Test parsing code as bytes."""
|
||||
code = b"const x = 1;"
|
||||
tree = js_analyzer.parse(code)
|
||||
assert tree.root_node is not None
|
||||
|
||||
def test_parse_invalid_code(self, js_analyzer):
|
||||
"""Test parsing invalid code marks errors."""
|
||||
code = "function foo( {"
|
||||
tree = js_analyzer.parse(code)
|
||||
assert tree.root_node.has_error
|
||||
|
||||
def test_get_node_text(self, js_analyzer):
|
||||
"""Test extracting text from a node."""
|
||||
code = "const x = 1;"
|
||||
code_bytes = code.encode("utf8")
|
||||
tree = js_analyzer.parse(code_bytes)
|
||||
text = js_analyzer.get_node_text(tree.root_node, code_bytes)
|
||||
assert text == code
|
||||
|
||||
|
||||
class TestFindFunctions:
|
||||
"""Tests for find_functions method."""
|
||||
|
||||
@pytest.fixture
|
||||
def js_analyzer(self):
|
||||
"""Create a JavaScript analyzer."""
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
|
||||
def test_find_function_declaration(self, js_analyzer):
|
||||
"""Test finding function declarations."""
|
||||
code = """
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "add"
|
||||
assert functions[0].is_arrow is False
|
||||
assert functions[0].is_async is False
|
||||
assert functions[0].is_method is False
|
||||
|
||||
def test_find_arrow_function(self, js_analyzer):
|
||||
"""Test finding arrow functions."""
|
||||
code = """
|
||||
const add = (a, b) => {
|
||||
return a + b;
|
||||
};
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "add"
|
||||
assert functions[0].is_arrow is True
|
||||
|
||||
def test_find_arrow_function_concise(self, js_analyzer):
|
||||
"""Test finding concise arrow functions."""
|
||||
code = "const double = x => x * 2;"
|
||||
functions = js_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "double"
|
||||
assert functions[0].is_arrow is True
|
||||
|
||||
def test_find_async_function(self, js_analyzer):
|
||||
"""Test finding async functions."""
|
||||
code = """
|
||||
async function fetchData(url) {
|
||||
return await fetch(url);
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "fetchData"
|
||||
assert functions[0].is_async is True
|
||||
|
||||
def test_find_class_methods(self, js_analyzer):
|
||||
"""Test finding class methods."""
|
||||
code = """
|
||||
class Calculator {
|
||||
add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code, include_methods=True)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "add"
|
||||
assert functions[0].is_method is True
|
||||
assert functions[0].class_name == "Calculator"
|
||||
|
||||
def test_exclude_methods(self, js_analyzer):
|
||||
"""Test excluding class methods."""
|
||||
code = """
|
||||
class Calculator {
|
||||
add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
}
|
||||
|
||||
function standalone() {
|
||||
return 1;
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code, include_methods=False)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "standalone"
|
||||
|
||||
def test_exclude_arrow_functions(self, js_analyzer):
|
||||
"""Test excluding arrow functions."""
|
||||
code = """
|
||||
function regular() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
const arrow = () => 2;
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code, include_arrow_functions=False)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "regular"
|
||||
|
||||
def test_find_generator_function(self, js_analyzer):
|
||||
"""Test finding generator functions."""
|
||||
code = """
|
||||
function* numberGenerator() {
|
||||
yield 1;
|
||||
yield 2;
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "numberGenerator"
|
||||
assert functions[0].is_generator is True
|
||||
|
||||
def test_function_line_numbers(self, js_analyzer):
|
||||
"""Test that line numbers are correct."""
|
||||
code = """function first() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
function second() {
|
||||
return 2;
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
|
||||
first = next(f for f in functions if f.name == "first")
|
||||
second = next(f for f in functions if f.name == "second")
|
||||
|
||||
assert first.start_line == 1
|
||||
assert first.end_line == 3
|
||||
assert second.start_line == 5
|
||||
assert second.end_line == 7
|
||||
|
||||
def test_nested_functions(self, js_analyzer):
|
||||
"""Test finding nested functions."""
|
||||
code = """
|
||||
function outer() {
|
||||
function inner() {
|
||||
return 1;
|
||||
}
|
||||
return inner();
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 2
|
||||
names = {f.name for f in functions}
|
||||
assert names == {"outer", "inner"}
|
||||
|
||||
inner = next(f for f in functions if f.name == "inner")
|
||||
assert inner.parent_function == "outer"
|
||||
|
||||
def test_require_name_filters_anonymous(self, js_analyzer):
|
||||
"""Test that require_name filters anonymous functions."""
|
||||
code = """
|
||||
(function() {
|
||||
return 1;
|
||||
})();
|
||||
|
||||
function named() {
|
||||
return 2;
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code, require_name=True)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "named"
|
||||
|
||||
def test_function_expression_in_variable(self, js_analyzer):
|
||||
"""Test function expression assigned to variable."""
|
||||
code = """
|
||||
const add = function(a, b) {
|
||||
return a + b;
|
||||
};
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "add"
|
||||
|
||||
|
||||
class TestFindImports:
|
||||
"""Tests for find_imports method."""
|
||||
|
||||
@pytest.fixture
|
||||
def js_analyzer(self):
|
||||
"""Create a JavaScript analyzer."""
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
|
||||
def test_find_default_import(self, js_analyzer):
|
||||
"""Test finding default import."""
|
||||
code = "import React from 'react';"
|
||||
imports = js_analyzer.find_imports(code)
|
||||
|
||||
assert len(imports) == 1
|
||||
assert imports[0].module_path == "react"
|
||||
assert imports[0].default_import == "React"
|
||||
|
||||
def test_find_named_imports(self, js_analyzer):
|
||||
"""Test finding named imports."""
|
||||
code = "import { useState, useEffect } from 'react';"
|
||||
imports = js_analyzer.find_imports(code)
|
||||
|
||||
assert len(imports) == 1
|
||||
assert imports[0].module_path == "react"
|
||||
assert ("useState", None) in imports[0].named_imports
|
||||
assert ("useEffect", None) in imports[0].named_imports
|
||||
|
||||
def test_find_namespace_import(self, js_analyzer):
|
||||
"""Test finding namespace import."""
|
||||
code = "import * as utils from './utils';"
|
||||
imports = js_analyzer.find_imports(code)
|
||||
|
||||
assert len(imports) == 1
|
||||
assert imports[0].module_path == "./utils"
|
||||
assert imports[0].namespace_import == "utils"
|
||||
|
||||
def test_find_require(self, js_analyzer):
|
||||
"""Test finding require() calls."""
|
||||
code = "const fs = require('fs');"
|
||||
imports = js_analyzer.find_imports(code)
|
||||
|
||||
assert len(imports) == 1
|
||||
assert imports[0].module_path == "fs"
|
||||
assert imports[0].default_import == "fs"
|
||||
|
||||
def test_find_multiple_imports(self, js_analyzer):
|
||||
"""Test finding multiple imports."""
|
||||
code = """
|
||||
import React from 'react';
|
||||
import { useState } from 'react';
|
||||
import * as utils from './utils';
|
||||
const path = require('path');
|
||||
"""
|
||||
imports = js_analyzer.find_imports(code)
|
||||
|
||||
assert len(imports) == 4
|
||||
modules = {imp.module_path for imp in imports}
|
||||
assert modules == {"react", "./utils", "path"}
|
||||
|
||||
def test_import_with_alias(self, js_analyzer):
|
||||
"""Test finding import with alias."""
|
||||
code = "import { Component as Comp } from 'react';"
|
||||
imports = js_analyzer.find_imports(code)
|
||||
|
||||
assert len(imports) == 1
|
||||
assert ("Component", "Comp") in imports[0].named_imports
|
||||
|
||||
def test_relative_import(self, js_analyzer):
|
||||
"""Test finding relative imports."""
|
||||
code = "import { helper } from './helpers/utils';"
|
||||
imports = js_analyzer.find_imports(code)
|
||||
|
||||
assert len(imports) == 1
|
||||
assert imports[0].module_path == "./helpers/utils"
|
||||
|
||||
|
||||
class TestFindFunctionCalls:
|
||||
"""Tests for find_function_calls method."""
|
||||
|
||||
@pytest.fixture
|
||||
def js_analyzer(self):
|
||||
"""Create a JavaScript analyzer."""
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
|
||||
def test_find_simple_calls(self, js_analyzer):
|
||||
"""Test finding simple function calls."""
|
||||
code = """
|
||||
function helper() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
function main() {
|
||||
return helper() + 2;
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
main_func = next(f for f in functions if f.name == "main")
|
||||
|
||||
calls = js_analyzer.find_function_calls(code, main_func)
|
||||
|
||||
assert "helper" in calls
|
||||
|
||||
def test_find_method_calls(self, js_analyzer):
|
||||
"""Test finding method calls."""
|
||||
code = """
|
||||
function process(arr) {
|
||||
return arr.map(x => x * 2).filter(x => x > 0);
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
process_func = next(f for f in functions if f.name == "process")
|
||||
|
||||
calls = js_analyzer.find_function_calls(code, process_func)
|
||||
|
||||
assert "map" in calls
|
||||
assert "filter" in calls
|
||||
|
||||
|
||||
class TestHasReturnStatement:
|
||||
"""Tests for has_return_statement method."""
|
||||
|
||||
@pytest.fixture
|
||||
def js_analyzer(self):
|
||||
"""Create a JavaScript analyzer."""
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT)
|
||||
|
||||
def test_function_with_return(self, js_analyzer):
|
||||
"""Test function with return statement."""
|
||||
code = """
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
assert js_analyzer.has_return_statement(functions[0], code) is True
|
||||
|
||||
def test_function_without_return(self, js_analyzer):
|
||||
"""Test function without return statement."""
|
||||
code = """
|
||||
function log(msg) {
|
||||
console.log(msg);
|
||||
}
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code, require_name=True)
|
||||
func = next((f for f in functions if f.name == "log"), None)
|
||||
if func:
|
||||
assert js_analyzer.has_return_statement(func, code) is False
|
||||
|
||||
def test_arrow_function_implicit_return(self, js_analyzer):
|
||||
"""Test arrow function with implicit return."""
|
||||
code = "const double = x => x * 2;"
|
||||
functions = js_analyzer.find_functions(code)
|
||||
assert js_analyzer.has_return_statement(functions[0], code) is True
|
||||
|
||||
def test_arrow_function_explicit_return(self, js_analyzer):
|
||||
"""Test arrow function with explicit return."""
|
||||
code = """
|
||||
const add = (a, b) => {
|
||||
return a + b;
|
||||
};
|
||||
"""
|
||||
functions = js_analyzer.find_functions(code)
|
||||
assert js_analyzer.has_return_statement(functions[0], code) is True
|
||||
|
||||
|
||||
class TestTypeScriptSupport:
|
||||
"""Tests for TypeScript-specific features."""
|
||||
|
||||
@pytest.fixture
|
||||
def ts_analyzer(self):
|
||||
"""Create a TypeScript analyzer."""
|
||||
return TreeSitterAnalyzer(TreeSitterLanguage.TYPESCRIPT)
|
||||
|
||||
def test_find_typed_function(self, ts_analyzer):
|
||||
"""Test finding function with type annotations."""
|
||||
code = """
|
||||
function add(a: number, b: number): number {
|
||||
return a + b;
|
||||
}
|
||||
"""
|
||||
functions = ts_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "add"
|
||||
|
||||
def test_find_interface_method(self, ts_analyzer):
|
||||
"""Test that interface methods are not found (they're declarations)."""
|
||||
code = """
|
||||
interface Calculator {
|
||||
add(a: number, b: number): number;
|
||||
}
|
||||
|
||||
function helper(): number {
|
||||
return 1;
|
||||
}
|
||||
"""
|
||||
functions = ts_analyzer.find_functions(code)
|
||||
|
||||
# Only the actual function should be found, not the interface method
|
||||
names = {f.name for f in functions}
|
||||
assert "helper" in names
|
||||
|
||||
def test_find_generic_function(self, ts_analyzer):
|
||||
"""Test finding generic function."""
|
||||
code = """
|
||||
function identity<T>(value: T): T {
|
||||
return value;
|
||||
}
|
||||
"""
|
||||
functions = ts_analyzer.find_functions(code)
|
||||
|
||||
assert len(functions) == 1
|
||||
assert functions[0].name == "identity"
|
||||
169
uv.lock
169
uv.lock
|
|
@ -436,6 +436,11 @@ dependencies = [
|
|||
{ name = "rich" },
|
||||
{ name = "sentry-sdk" },
|
||||
{ name = "tomlkit" },
|
||||
{ name = "tree-sitter", version = "0.23.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
||||
{ name = "tree-sitter", version = "0.25.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
||||
{ name = "tree-sitter-javascript", version = "0.23.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
||||
{ name = "tree-sitter-javascript", version = "0.25.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
||||
{ name = "tree-sitter-typescript" },
|
||||
{ name = "unidiff" },
|
||||
]
|
||||
|
||||
|
|
@ -520,6 +525,9 @@ requires-dist = [
|
|||
{ name = "rich", specifier = ">=13.8.1" },
|
||||
{ name = "sentry-sdk", specifier = ">=1.40.6,<3.0.0" },
|
||||
{ name = "tomlkit", specifier = ">=0.11.7" },
|
||||
{ name = "tree-sitter", specifier = ">=0.23.0" },
|
||||
{ name = "tree-sitter-javascript", specifier = ">=0.23.0" },
|
||||
{ name = "tree-sitter-typescript", specifier = ">=0.23.0" },
|
||||
{ name = "unidiff", specifier = ">=0.7.4" },
|
||||
]
|
||||
|
||||
|
|
@ -925,7 +933,7 @@ name = "exceptiongroup"
|
|||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
|
||||
wheels = [
|
||||
|
|
@ -5112,6 +5120,165 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.23.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.9.2' and python_full_version < '3.10'",
|
||||
"python_full_version < '3.9.2'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/50/fd5fafa42b884f741b28d9e6fd366c3f34e15d2ed3aa9633b34e388379e2/tree-sitter-0.23.2.tar.gz", hash = "sha256:66bae8dd47f1fed7bdef816115146d3a41c39b5c482d7bad36d9ba1def088450", size = 166800, upload-time = "2024-10-24T15:31:02.238Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/91/04/2068a7b725265ecfcbf63ecdae038f1d4124ebccd55b8a7ce145b70e2b6a/tree_sitter-0.23.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3a937f5d8727bc1c74c4bf2a9d1c25ace049e8628273016ad0d45914ae904e10", size = 139289, upload-time = "2024-10-24T15:29:59.27Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/07/a5b943121f674fe1ac77694a698e71ce95353830c1f3f4ce45da7ef3e406/tree_sitter-0.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2c7eae7fe2af215645a38660d2d57d257a4c461fe3ec827cca99a79478284e80", size = 132379, upload-time = "2024-10-24T15:30:01.437Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/96/fcc72c33d464a2d722db1e95b74a53ced771a47b3cfde60aced29764a783/tree_sitter-0.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a71d607595270b6870eaf778a1032d146b2aa79bfcfa60f57a82a7b7584a4c7", size = 552884, upload-time = "2024-10-24T15:30:02.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/af/b0e787a52767155b4643a55d6de03c1e4ae77abb61e1dc1629ad983e0a40/tree_sitter-0.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fe9b9ea7a0aa23b52fd97354da95d1b2580065bc12a4ac868f9164a127211d6", size = 566561, upload-time = "2024-10-24T15:30:04.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/fd/05e966b5317b1c6679c071c5b0203f28af9d26c9363700cb9682e1bcf343/tree_sitter-0.23.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d74d00a8021719eae14d10d1b1e28649e15d8b958c01c2b2c3dad7a2ebc4dbae", size = 558273, upload-time = "2024-10-24T15:30:06.177Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/bc/19145efdf3f47711aa3f1bf06f0b50593f97f1108550d38694841fd97b7c/tree_sitter-0.23.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6de18d8d8a7f67ab71f472d1fcb01cc506e080cbb5e13d52929e4b6fdce6bbee", size = 569176, upload-time = "2024-10-24T15:30:07.902Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/08/3553d8e488ae9284a0762effafb7d2639a306e184963b7f99853923084d6/tree_sitter-0.23.2-cp310-cp310-win_amd64.whl", hash = "sha256:12b60dca70d2282af942b650a6d781be487485454668c7c956338a367b98cdee", size = 117902, upload-time = "2024-10-24T15:30:09.675Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/39/836fa485e985c33e8aa1cc3abbf7a84be1c2c382e69547a765631fdd7ce3/tree_sitter-0.23.2-cp310-cp310-win_arm64.whl", hash = "sha256:3346a4dd0447a42aabb863443b0fd8c92b909baf40ed2344fae4b94b625d5955", size = 102644, upload-time = "2024-10-24T15:30:11.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/8d/2d4fb04408772be0919441d66f700673ce7cb76b9ab6682e226d740fb88d/tree_sitter-0.23.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91fda41d4f8824335cc43c64e2c37d8089c8c563bd3900a512d2852d075af719", size = 139142, upload-time = "2024-10-24T15:30:12.627Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/52/b8a44bfff7b0203256e5dbc8d3a372ee8896128b8ed7d3a89e1ef17b2065/tree_sitter-0.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:92b2b489d5ce54b41f94c6f23fbaf592bd6e84dc2877048fd1cb060480fa53f7", size = 132198, upload-time = "2024-10-24T15:30:13.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/54/746f2ee5acf6191a4a0be7f5843329f0d713bfe5196f5fc6fe2ea69cb44c/tree_sitter-0.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64859bd4aa1567d0d6016a811b2b49c59d4a4427d096e3d8c84b2521455f62b7", size = 554303, upload-time = "2024-10-24T15:30:15.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/5a/3169d9933be813776a9b4b3f2e671d3d50fa27e589dee5578f6ecef7ff6d/tree_sitter-0.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:614590611636044e071d3a0b748046d52676dbda3bc9fa431216231e11dd98f7", size = 567626, upload-time = "2024-10-24T15:30:17.12Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/0d/23f363b3b0bc3fa0e7a4a294bf119957ac1ab02737d57815e1e8b7b3e196/tree_sitter-0.23.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:08466953c78ae57be61057188fb88c89791b0a562856010228e0ccf60e2ac453", size = 559803, upload-time = "2024-10-24T15:30:18.921Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/b3/1ffba0f17a7ff2c9114d91a1ecc15e0748f217817797564d31fbb61d7458/tree_sitter-0.23.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8a33f03a562de91f7fd05eefcedd8994a06cd44c62f7aabace811ad82bc11cbd", size = 570987, upload-time = "2024-10-24T15:30:21.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/4b/085bcb8a11ea18003aacc4dbc91c301d1536c5e2deedb95393e8ef26f1f7/tree_sitter-0.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:03b70296b569ef64f7b92b42ca5da9bf86d81bee2afd480bea35092687f51dae", size = 117771, upload-time = "2024-10-24T15:30:22.38Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/e5/90adc4081f49ccb6bea89a800dc9b0dcc5b6953b0da423e8eff28f63fddf/tree_sitter-0.23.2-cp311-cp311-win_arm64.whl", hash = "sha256:7cb4bb953ea7c0b50eeafc4454783e030357179d2a93c3dd5ebed2da5588ddd0", size = 102555, upload-time = "2024-10-24T15:30:23.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/a7/57e0fe87b49a78c670a7b4483f70e44c000c65c29b138001096b22e7dd87/tree_sitter-0.23.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a014498b6a9e6003fae8c6eb72f5927d62da9dcb72b28b3ce8cd15c6ff6a6572", size = 139259, upload-time = "2024-10-24T15:30:24.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/b9/bc8513d818ffb54993a017a36c8739300bc5739a13677acf90b54995e7db/tree_sitter-0.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f8699b131d4bcbe3805c37e4ef3d159ee9a82a0e700587625623999ba0ea53", size = 131951, upload-time = "2024-10-24T15:30:26.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/6a/eab01bb6b1ce3c9acf16d72922ffc29a904af485eb3e60baf3a3e04edd30/tree_sitter-0.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4471577df285059c71686ecb208bc50fb472099b38dcc8e849b0e86652891e87", size = 557952, upload-time = "2024-10-24T15:30:27.389Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/95/f2f73332623cf63200d57800f85273170bc5f99d28ea3f234afd5b0048df/tree_sitter-0.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f342c925290dd4e20ecd5787ef7ae8749981597ab364783a1eb73173efe65226", size = 571199, upload-time = "2024-10-24T15:30:28.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/ac/bd6e6cfdd0421156e86f5c93848629af1c7323083077e1a95b27d32d5811/tree_sitter-0.23.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a4e9e53d07dd076bede72e4f7d3a0173d7b9ad6576572dd86da008a740a9bb22", size = 562129, upload-time = "2024-10-24T15:30:30.199Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/bd/8a9edcbcf8a76b0bf58e3b927ed291e3598e063d56667367762833cc8709/tree_sitter-0.23.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8caebe65bc358759dac2500d8f8feed3aed939c4ade9a684a1783fe07bc7d5db", size = 574307, upload-time = "2024-10-24T15:30:32.085Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/c2/3fb2c6c0ae2f59a7411dc6d3e7945e3cb6f34c8552688708acc8b2b13f83/tree_sitter-0.23.2-cp312-cp312-win_amd64.whl", hash = "sha256:fc5a72eb50d43485000dbbb309acb350467b7467e66dc747c6bb82ce63041582", size = 117858, upload-time = "2024-10-24T15:30:33.353Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/18/4ca2c0f4a0c802ebcb3a92264cc436f1d54b394fa24dfa76bf57cdeaca9e/tree_sitter-0.23.2-cp312-cp312-win_arm64.whl", hash = "sha256:a0320eb6c7993359c5f7b371d22719ccd273f440d41cf1bd65dac5e9587f2046", size = 102496, upload-time = "2024-10-24T15:30:34.782Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/c6/4ead9ce3113a7c27f37a2bdef163c09757efbaa85adbdfe7b3fbf0317c57/tree_sitter-0.23.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:eff630dddee7ba05accb439b17e559e15ce13f057297007c246237ceb6306332", size = 139266, upload-time = "2024-10-24T15:30:35.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/c9/b4197c5b0c1d6ba648202a547846ac910a53163b69a459504b2aa6cdb76e/tree_sitter-0.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4780ba8f3894f2dea869fad2995c2aceab3fd5ab9e6a27c45475d2acd7f7e84e", size = 131959, upload-time = "2024-10-24T15:30:37.646Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/94/0f7c5580d2adff3b57d36f1998725b0caf6cf1af50ceafc00c6cdbc2fef6/tree_sitter-0.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b609460b8e3e256361fb12e94fae5b728cb835b16f0f9d590b5aadbf9d109b", size = 557582, upload-time = "2024-10-24T15:30:39.019Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/8a/f73ff06959d43fd47fc283cbcc4d8efa6550b2cc431d852b184504992447/tree_sitter-0.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d070d8eaeaeb36cf535f55e5578fddbfc3bf53c1980f58bf1a99d57466b3b5", size = 570891, upload-time = "2024-10-24T15:30:40.432Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/86/bbda5ad09b88051ff7bf3275622a2f79bc4f728b4c283ff8b93b8fcdf36d/tree_sitter-0.23.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:878580b2ad5054c410ba3418edca4d34c81cc26706114d8f5b5541688bc2d785", size = 562343, upload-time = "2024-10-24T15:30:43.045Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/55/b404fa49cb5c2926ad6fe1cac033dd486ef69f1afeb7828452d21e1e05c1/tree_sitter-0.23.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:29224bdc2a3b9af535b7725e249d3ee291b2e90708e82832e73acc175e40dc48", size = 574407, upload-time = "2024-10-24T15:30:45.018Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/c8/eea2104443ab973091107ef3e730683bd8e6cb51dd025cef853d3fff9dae/tree_sitter-0.23.2-cp313-cp313-win_amd64.whl", hash = "sha256:c58d89348162fbc3aea1fe6511a66ee189fc0e4e4bbe937026f29e4ecef17763", size = 117854, upload-time = "2024-10-24T15:30:47.817Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/4d/1728d9ce32a1d851081911b7e47830f5e740431f2bb920f54bb8c26175bc/tree_sitter-0.23.2-cp313-cp313-win_arm64.whl", hash = "sha256:0ff2037be5edab7801de3f6a721b9cf010853f612e2008ee454e0e0badb225a6", size = 102492, upload-time = "2024-10-24T15:30:48.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/ab/b39173a47d498cc6276e303c865f4a222134ceae890bd3c1b29427489805/tree_sitter-0.23.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a5db8e585205faef8bf219da77d8993e2ef04d08eda2e3c8ad7e4df8297ee344", size = 139550, upload-time = "2024-10-24T15:30:50.516Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/34/fa8f5b862dd7a6014fd5578810178e8f7601830cabb6d65d2aba050c2df1/tree_sitter-0.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9dbd110a30cf28be5da734ae4cd0e9031768228dbf6a79f2973962aa51de4ec7", size = 132686, upload-time = "2024-10-24T15:30:51.779Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/b9/ccdddf35705fc23395caa71557f767e0753d38afe4b5bb99efddbf62bb22/tree_sitter-0.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569514b9a996a0fd458b3a891c46ca125298be0c03cf82f2b6f0c13d5d8f25dc", size = 554958, upload-time = "2024-10-24T15:30:53.327Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/ba/20ae9079bdfc5cfac28b39d945a6c354c8e1385e73aec8142db6c53b635c/tree_sitter-0.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a357ed98a74e47787b812df99a74a2c35c0fe11e55c2095cc01d1cad144ef552", size = 568162, upload-time = "2024-10-24T15:30:54.667Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/00/b16bf6cf88c47c1b6c8e1cce1eb9e90badb5db9e5252ae0970d858d02592/tree_sitter-0.23.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c2dfb8e8f760f4cc67888d03ef9e2dbd3353245f67f5efba375c2a14d944ac0e", size = 560278, upload-time = "2024-10-24T15:30:56.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/8f/27ab9b96cc0261af78b080ec8a9846a38e216360ec38774ea27eba35bd3c/tree_sitter-0.23.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3ead958df87a21d706903987e665e9e0e5df7b2c5021ff69ea349826840adc6a", size = 571255, upload-time = "2024-10-24T15:30:58.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/e0/95a3d66a7e5bb229574484ab10c6dc99d1c7a32972b890d194076e30dc4f/tree_sitter-0.23.2-cp39-cp39-win_amd64.whl", hash = "sha256:611cae16be332213c0e6ece72c0bfca202e30ff320a8b309b1526c6cb79ee4ba", size = 118232, upload-time = "2024-10-24T15:30:59.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/b5/9eaf794fc71490573ab14a366affca415bc1ddbf86a14d78e54583db4254/tree_sitter-0.23.2-cp39-cp39-win_arm64.whl", hash = "sha256:b848e0fdd522fbb8888cdb4f4d93f8fad97ae10d70c122fb922e51363c7febcd", size = 102787, upload-time = "2024-10-24T15:31:01.084Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.14'",
|
||||
"python_full_version == '3.13.*'",
|
||||
"python_full_version == '3.12.*'",
|
||||
"python_full_version == '3.11.*'",
|
||||
"python_full_version == '3.10.*'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/66/7c/0350cfc47faadc0d3cf7d8237a4e34032b3014ddf4a12ded9933e1648b55/tree-sitter-0.25.2.tar.gz", hash = "sha256:fe43c158555da46723b28b52e058ad444195afd1db3ca7720c59a254544e9c20", size = 177961, upload-time = "2025-09-25T17:37:59.751Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/d4/f7ffb855cb039b7568aba4911fbe42e4c39c0e4398387c8e0d8251489992/tree_sitter-0.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72a510931c3c25f134aac2daf4eb4feca99ffe37a35896d7150e50ac3eee06c7", size = 146749, upload-time = "2025-09-25T17:37:16.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/58/f8a107f9f89700c0ab2930f1315e63bdedccbb5fd1b10fcbc5ebadd54ac8/tree_sitter-0.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:44488e0e78146f87baaa009736886516779253d6d6bac3ef636ede72bc6a8234", size = 137766, upload-time = "2025-09-25T17:37:18.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/fb/357158d39f01699faea466e8fd5a849f5a30252c68414bddc20357a9ac79/tree_sitter-0.25.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c2f8e7d6b2f8489d4a9885e3adcaef4bc5ff0a275acd990f120e29c4ab3395c5", size = 599809, upload-time = "2025-09-25T17:37:19.169Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/a4/68ae301626f2393a62119481cb660eb93504a524fc741a6f1528a4568cf6/tree_sitter-0.25.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20b570690f87f1da424cd690e51cc56728d21d63f4abd4b326d382a30353acc7", size = 627676, upload-time = "2025-09-25T17:37:20.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/fe/4c1bef37db5ca8b17ca0b3070f2dff509468a50b3af18f17665adcab42b9/tree_sitter-0.25.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a0ec41b895da717bc218a42a3a7a0bfcfe9a213d7afaa4255353901e0e21f696", size = 624281, upload-time = "2025-09-25T17:37:21.823Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/30/3283cb7fa251cae2a0bf8661658021a789810db3ab1b0569482d4a3671fd/tree_sitter-0.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:7712335855b2307a21ae86efe949c76be36c6068d76df34faa27ce9ee40ff444", size = 127295, upload-time = "2025-09-25T17:37:22.977Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/90/ceb05e6de281aebe82b68662890619580d4ffe09283ebd2ceabcf5df7b4a/tree_sitter-0.25.2-cp310-cp310-win_arm64.whl", hash = "sha256:a925364eb7fbb9cdce55a9868f7525a1905af512a559303bd54ef468fd88cb37", size = 113991, upload-time = "2025-09-25T17:37:23.854Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/22/88a1e00b906d26fa8a075dd19c6c3116997cb884bf1b3c023deb065a344d/tree_sitter-0.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ca72d841215b6573ed0655b3a5cd1133f9b69a6fa561aecad40dca9029d75b", size = 146752, upload-time = "2025-09-25T17:37:24.775Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/1c/22cc14f3910017b7a76d7358df5cd315a84fe0c7f6f7b443b49db2e2790d/tree_sitter-0.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc0351cfe5022cec5a77645f647f92a936b38850346ed3f6d6babfbeeeca4d26", size = 137765, upload-time = "2025-09-25T17:37:26.103Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/0c/d0de46ded7d5b34631e0f630d9866dab22d3183195bf0f3b81de406d6622/tree_sitter-0.25.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1799609636c0193e16c38f366bda5af15b1ce476df79ddaae7dd274df9e44266", size = 604643, upload-time = "2025-09-25T17:37:27.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/38/b735a58c1c2f60a168a678ca27b4c1a9df725d0bf2d1a8a1c571c033111e/tree_sitter-0.25.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e65ae456ad0d210ee71a89ee112ac7e72e6c2e5aac1b95846ecc7afa68a194c", size = 632229, upload-time = "2025-09-25T17:37:28.463Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/f6/cda1e1e6cbff5e28d8433578e2556d7ba0b0209d95a796128155b97e7693/tree_sitter-0.25.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:49ee3c348caa459244ec437ccc7ff3831f35977d143f65311572b8ba0a5f265f", size = 629861, upload-time = "2025-09-25T17:37:29.593Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/19/427e5943b276a0dd74c2a1f1d7a7393443f13d1ee47dedb3f8127903c080/tree_sitter-0.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:56ac6602c7d09c2c507c55e58dc7026b8988e0475bd0002f8a386cce5e8e8adc", size = 127304, upload-time = "2025-09-25T17:37:30.549Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/d9/eef856dc15f784d85d1397a17f3ee0f82df7778efce9e1961203abfe376a/tree_sitter-0.25.2-cp311-cp311-win_arm64.whl", hash = "sha256:b3d11a3a3ac89bb8a2543d75597f905a9926f9c806f40fcca8242922d1cc6ad5", size = 113990, upload-time = "2025-09-25T17:37:31.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/9e/20c2a00a862f1c2897a436b17edb774e831b22218083b459d0d081c9db33/tree_sitter-0.25.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ddabfff809ffc983fc9963455ba1cecc90295803e06e140a4c83e94c1fa3d960", size = 146941, upload-time = "2025-09-25T17:37:34.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/04/8512e2062e652a1016e840ce36ba1cc33258b0dcc4e500d8089b4054afec/tree_sitter-0.25.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c0c0ab5f94938a23fe81928a21cc0fac44143133ccc4eb7eeb1b92f84748331c", size = 137699, upload-time = "2025-09-25T17:37:36.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/8a/d48c0414db19307b0fb3bb10d76a3a0cbe275bb293f145ee7fba2abd668e/tree_sitter-0.25.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dd12d80d91d4114ca097626eb82714618dcdfacd6a5e0955216c6485c350ef99", size = 607125, upload-time = "2025-09-25T17:37:37.725Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/d1/b95f545e9fc5001b8a78636ef942a4e4e536580caa6a99e73dd0a02e87aa/tree_sitter-0.25.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b43a9e4c89d4d0839de27cd4d6902d33396de700e9ff4c5ab7631f277a85ead9", size = 635418, upload-time = "2025-09-25T17:37:38.922Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/4d/b734bde3fb6f3513a010fa91f1f2875442cdc0382d6a949005cd84563d8f/tree_sitter-0.25.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbb1706407c0e451c4f8cc016fec27d72d4b211fdd3173320b1ada7a6c74c3ac", size = 631250, upload-time = "2025-09-25T17:37:40.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/f2/5f654994f36d10c64d50a192239599fcae46677491c8dd53e7579c35a3e3/tree_sitter-0.25.2-cp312-cp312-win_amd64.whl", hash = "sha256:6d0302550bbe4620a5dc7649517c4409d74ef18558276ce758419cf09e578897", size = 127156, upload-time = "2025-09-25T17:37:41.132Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/23/148c468d410efcf0a9535272d81c258d840c27b34781d625f1f627e2e27d/tree_sitter-0.25.2-cp312-cp312-win_arm64.whl", hash = "sha256:0c8b6682cac77e37cfe5cf7ec388844957f48b7bd8d6321d0ca2d852994e10d5", size = 113984, upload-time = "2025-09-25T17:37:42.074Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/67/67492014ce32729b63d7ef318a19f9cfedd855d677de5773476caf771e96/tree_sitter-0.25.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0628671f0de69bb279558ef6b640bcfc97864fe0026d840f872728a86cd6b6cd", size = 146926, upload-time = "2025-09-25T17:37:43.041Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/9c/a278b15e6b263e86c5e301c82a60923fa7c59d44f78d7a110a89a413e640/tree_sitter-0.25.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f5ddcd3e291a749b62521f71fc953f66f5fd9743973fd6dd962b092773569601", size = 137712, upload-time = "2025-09-25T17:37:44.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/9a/423bba15d2bf6473ba67846ba5244b988cd97a4b1ea2b146822162256794/tree_sitter-0.25.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd88fbb0f6c3a0f28f0a68d72df88e9755cf5215bae146f5a1bdc8362b772053", size = 607873, upload-time = "2025-09-25T17:37:45.477Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/4c/b430d2cb43f8badfb3a3fa9d6cd7c8247698187b5674008c9d67b2a90c8e/tree_sitter-0.25.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b878e296e63661c8e124177cc3084b041ba3f5936b43076d57c487822426f614", size = 636313, upload-time = "2025-09-25T17:37:46.68Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/27/5f97098dbba807331d666a0997662e82d066e84b17d92efab575d283822f/tree_sitter-0.25.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d77605e0d353ba3fe5627e5490f0fbfe44141bafa4478d88ef7954a61a848dae", size = 631370, upload-time = "2025-09-25T17:37:47.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/3c/87caaed663fabc35e18dc704cd0e9800a0ee2f22bd18b9cbe7c10799895d/tree_sitter-0.25.2-cp313-cp313-win_amd64.whl", hash = "sha256:463c032bd02052d934daa5f45d183e0521ceb783c2548501cf034b0beba92c9b", size = 127157, upload-time = "2025-09-25T17:37:48.967Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/23/f8467b408b7988aff4ea40946a4bd1a2c1a73d17156a9d039bbaff1e2ceb/tree_sitter-0.25.2-cp313-cp313-win_arm64.whl", hash = "sha256:b3f63a1796886249bd22c559a5944d64d05d43f2be72961624278eff0dcc5cb8", size = 113975, upload-time = "2025-09-25T17:37:49.922Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/e3/d9526ba71dfbbe4eba5e51d89432b4b333a49a1e70712aa5590cd22fc74f/tree_sitter-0.25.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:65d3c931013ea798b502782acab986bbf47ba2c452610ab0776cf4a8ef150fc0", size = 146776, upload-time = "2025-09-25T17:37:50.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/97/4bd4ad97f85a23011dd8a535534bb1035c4e0bac1234d58f438e15cff51f/tree_sitter-0.25.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:bda059af9d621918efb813b22fb06b3fe00c3e94079c6143fcb2c565eb44cb87", size = 137732, upload-time = "2025-09-25T17:37:51.877Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/19/1e968aa0b1b567988ed522f836498a6a9529a74aab15f09dd9ac1e41f505/tree_sitter-0.25.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eac4e8e4c7060c75f395feec46421eb61212cb73998dbe004b7384724f3682ab", size = 609456, upload-time = "2025-09-25T17:37:52.925Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/b6/cf08f4f20f4c9094006ef8828555484e842fc468827ad6e56011ab668dbd/tree_sitter-0.25.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:260586381b23be33b6191a07cea3d44ecbd6c01aa4c6b027a0439145fcbc3358", size = 636772, upload-time = "2025-09-25T17:37:54.647Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/e2/d42d55bf56360987c32bc7b16adb06744e425670b823fb8a5786a1cea991/tree_sitter-0.25.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7d2ee1acbacebe50ba0f85fff1bc05e65d877958f00880f49f9b2af38dce1af0", size = 631522, upload-time = "2025-09-25T17:37:55.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/87/af9604ebe275a9345d88c3ace0cf2a1341aa3f8ef49dd9fc11662132df8a/tree_sitter-0.25.2-cp314-cp314-win_amd64.whl", hash = "sha256:4973b718fcadfb04e59e746abfbb0288694159c6aeecd2add59320c03368c721", size = 130864, upload-time = "2025-09-25T17:37:57.453Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/6e/e64621037357acb83d912276ffd30a859ef117f9c680f2e3cb955f47c680/tree_sitter-0.25.2-cp314-cp314-win_arm64.whl", hash = "sha256:b8d4429954a3beb3e844e2872610d2a4800ba4eb42bb1990c6a4b1949b18459f", size = 117470, upload-time = "2025-09-25T17:37:58.431Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-javascript"
|
||||
version = "0.23.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.9.2' and python_full_version < '3.10'",
|
||||
"python_full_version < '3.9.2'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cd/dc/1c55c33cc6bbe754359b330534cf9f261c1b9b2c26ddf23aef3c5fa67759/tree_sitter_javascript-0.23.1.tar.gz", hash = "sha256:b2059ce8b150162cda05a457ca3920450adbf915119c04b8c67b5241cd7fcfed", size = 110058, upload-time = "2024-11-10T05:40:42.357Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/20/d3/c67d7d49967344b51208ad19f105233be1afdf07d3dcb35b471900265227/tree_sitter_javascript-0.23.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6ca583dad4bd79d3053c310b9f7208cd597fd85f9947e4ab2294658bb5c11e35", size = 59333, upload-time = "2024-11-10T05:40:31.988Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/db/ea0ee1547679d1750e80a0c4bc60b3520b166eeaf048764cfdd1ba3fd5e5/tree_sitter_javascript-0.23.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:94100e491a6a247aa4d14caf61230c171b6376c863039b6d9cd71255c2d815ec", size = 61071, upload-time = "2024-11-10T05:40:33.458Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/6e/07c4857e08be37bfb55bfb269863df8ec908b2f6a3f1893cd852b893ecab/tree_sitter_javascript-0.23.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6bc1055b061c5055ec58f39ee9b2e9efb8e6e0ae970838af74da0afb811f0a", size = 96999, upload-time = "2024-11-10T05:40:34.869Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/f5/4de730afe8b9422845bc2064020a8a8f49ebd1695c04261c38d1b3e3edec/tree_sitter_javascript-0.23.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:056dc04fb6b24293f8c5fec43c14e7e16ba2075b3009c643abf8c85edc4c7c3c", size = 94020, upload-time = "2024-11-10T05:40:35.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/0a/f980520da86c4eff8392867840a945578ef43372c9d4a37922baa6b121fe/tree_sitter_javascript-0.23.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a11ca1c0f736da42967586b568dff8a465ee148a986c15ebdc9382806e0ce871", size = 92927, upload-time = "2024-11-10T05:40:37.92Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/5c/36a98d512aa1d1082409d6b7eda5d26b820bd4477a54100ad9f62212bc55/tree_sitter_javascript-0.23.1-cp39-abi3-win_amd64.whl", hash = "sha256:041fa22b34250ea6eb313d33104d5303f79504cb259d374d691e38bbdc49145b", size = 58824, upload-time = "2024-11-10T05:40:39.903Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/79/ceb21988e6de615355a63eebcf806cd2a0fe875bec27b429d58b63e7fb5f/tree_sitter_javascript-0.23.1-cp39-abi3-win_arm64.whl", hash = "sha256:eb28130cd2fb30d702d614cbf61ef44d1c7f6869e7d864a9cc17111e370be8f7", size = 57027, upload-time = "2024-11-10T05:40:40.841Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-javascript"
|
||||
version = "0.25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.14'",
|
||||
"python_full_version == '3.13.*'",
|
||||
"python_full_version == '3.12.*'",
|
||||
"python_full_version == '3.11.*'",
|
||||
"python_full_version == '3.10.*'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/59/e0/e63103c72a9d3dfd89a31e02e660263ad84b7438e5f44ee82e443e65bbde/tree_sitter_javascript-0.25.0.tar.gz", hash = "sha256:329b5414874f0588a98f1c291f1b28138286617aa907746ffe55adfdcf963f38", size = 132338, upload-time = "2025-09-01T07:13:44.792Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/df/5106ac250cd03661ebc3cc75da6b3d9f6800a3606393a0122eca58038104/tree_sitter_javascript-0.25.0-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b70f887fb269d6e58c349d683f59fa647140c410cfe2bee44a883b20ec92e3dc", size = 64052, upload-time = "2025-09-01T07:13:36.865Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/8f/6b4b2bc90d8ab3955856ce852cc9d1e82c81d7ab9646385f0e75ffd5b5d3/tree_sitter_javascript-0.25.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:8264a996b8845cfce06965152a013b5d9cbb7d199bc3503e12b5682e62bb1de1", size = 66440, upload-time = "2025-09-01T07:13:37.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/c4/7da74ecdcd8a398f88bd003a87c65403b5fe0e958cdd43fbd5fd4a398fcf/tree_sitter_javascript-0.25.0-cp310-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9dc04ba91fc8583344e57c1f1ed5b2c97ecaaf47480011b92fbeab8dda96db75", size = 99728, upload-time = "2025-09-01T07:13:38.755Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/c8/97da3af4796495e46421e9344738addb3602fa6426ea695be3fcbadbee37/tree_sitter_javascript-0.25.0-cp310-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:199d09985190852e0912da2b8d26c932159be314bc04952cf917ed0e4c633e6b", size = 106072, upload-time = "2025-09-01T07:13:39.798Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/be/c964e8130be08cc9bd6627d845f0e4460945b158429d39510953bbcb8fcc/tree_sitter_javascript-0.25.0-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dfcf789064c58dc13c0a4edb550acacfc6f0f280577f1e7a00de3e89fc7f8ddc", size = 104388, upload-time = "2025-09-01T07:13:40.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/89/9b773dee0f8961d1bb8d7baf0a204ab587618df19897c1ef260916f318ec/tree_sitter_javascript-0.25.0-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b852d3aee8a36186dbcc32c798b11b4869f9b5041743b63b65c2ef793db7a54", size = 98377, upload-time = "2025-09-01T07:13:41.838Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/dc/d90cb1790f8cec9b4878d278ad9faf7c8f893189ce0f855304fd704fc274/tree_sitter_javascript-0.25.0-cp310-abi3-win_amd64.whl", hash = "sha256:e5ed840f5bd4a3f0272e441d19429b26eedc257abe5574c8546da6b556865e3c", size = 62975, upload-time = "2025-09-01T07:13:42.828Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/1f/f9eba1038b7d4394410f3c0a6ec2122b590cd7acb03f196e52fa57ebbe72/tree_sitter_javascript-0.25.0-cp310-abi3-win_arm64.whl", hash = "sha256:622a69d677aa7f6ee2931d8c77c981a33f0ebb6d275aa9d43d3397c879a9bb0b", size = 61668, upload-time = "2025-09-01T07:13:43.803Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-typescript"
|
||||
version = "0.23.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1e/fc/bb52958f7e399250aee093751e9373a6311cadbe76b6e0d109b853757f35/tree_sitter_typescript-0.23.2.tar.gz", hash = "sha256:7b167b5827c882261cb7a50dfa0fb567975f9b315e87ed87ad0a0a3aedb3834d", size = 773053, upload-time = "2024-11-11T02:36:11.396Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/28/95/4c00680866280e008e81dd621fd4d3f54aa3dad1b76b857a19da1b2cc426/tree_sitter_typescript-0.23.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3cd752d70d8e5371fdac6a9a4df9d8924b63b6998d268586f7d374c9fba2a478", size = 286677, upload-time = "2024-11-11T02:35:58.839Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/2f/1f36fda564518d84593f2740d5905ac127d590baf5c5753cef2a88a89c15/tree_sitter_typescript-0.23.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:c7cc1b0ff5d91bac863b0e38b1578d5505e718156c9db577c8baea2557f66de8", size = 302008, upload-time = "2024-11-11T02:36:00.733Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/2d/975c2dad292aa9994f982eb0b69cc6fda0223e4b6c4ea714550477d8ec3a/tree_sitter_typescript-0.23.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b1eed5b0b3a8134e86126b00b743d667ec27c63fc9de1b7bb23168803879e31", size = 351987, upload-time = "2024-11-11T02:36:02.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/d1/a71c36da6e2b8a4ed5e2970819b86ef13ba77ac40d9e333cb17df6a2c5db/tree_sitter_typescript-0.23.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e96d36b85bcacdeb8ff5c2618d75593ef12ebaf1b4eace3477e2bdb2abb1752c", size = 344960, upload-time = "2024-11-11T02:36:04.443Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/cb/f57b149d7beed1a85b8266d0c60ebe4c46e79c9ba56bc17b898e17daf88e/tree_sitter_typescript-0.23.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8d4f0f9bcb61ad7b7509d49a1565ff2cc363863644a234e1e0fe10960e55aea0", size = 340245, upload-time = "2024-11-11T02:36:06.473Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/ab/dd84f0e2337296a5f09749f7b5483215d75c8fa9e33738522e5ed81f7254/tree_sitter_typescript-0.23.2-cp39-abi3-win_amd64.whl", hash = "sha256:3f730b66396bc3e11811e4465c41ee45d9e9edd6de355a58bbbc49fa770da8f9", size = 278015, upload-time = "2024-11-11T02:36:07.631Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/e4/81f9a935789233cf412a0ed5fe04c883841d2c8fb0b7e075958a35c65032/tree_sitter_typescript-0.23.2-cp39-abi3-win_arm64.whl", hash = "sha256:05db58f70b95ef0ea126db5560f3775692f609589ed6f8dd0af84b7f19f1cbb7", size = 274052, upload-time = "2024-11-11T02:36:09.514Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "triton"
|
||||
version = "3.4.0"
|
||||
|
|
|
|||
Loading…
Reference in a new issue