From 910ecd6e42d6f2b65bc99f9e9129c7bb6cbadef4 Mon Sep 17 00:00:00 2001 From: Yiannis Charalambous Date: Thu, 14 Sep 2023 16:16:10 +0100 Subject: [PATCH 1/5] Added loading_hints config value that allows hiding of loading anim --- config.json | 1 + esbmc_ai_lib/config.py | 24 +++++++++++++++++++ esbmc_ai_lib/loading_widget.py | 10 ++++++-- tests/regtest/test_ast.py | 44 ++++++++++++++++++++++++++++++++++ 4 files changed, 77 insertions(+), 2 deletions(-) create mode 100644 tests/regtest/test_ast.py diff --git a/config.json b/config.json index cf81400..5495800 100644 --- a/config.json +++ b/config.json @@ -20,6 +20,7 @@ "consecutive_prompt_delay": 20, "temp_auto_clean": false, "temp_file_dir": "./temp", + "loading_hints": true, "chat_modes": { "user_chat": { "temperature": 1.0, diff --git a/esbmc_ai_lib/config.py b/esbmc_ai_lib/config.py index 043ddf5..f34f474 100644 --- a/esbmc_ai_lib/config.py +++ b/esbmc_ai_lib/config.py @@ -35,6 +35,8 @@ consecutive_prompt_delay: float = 20.0 ai_model: AIModel = AIModels.GPT_3.value +loading_hints: bool = False + cfg_path: str = "./config.json" @@ -146,6 +148,21 @@ def _load_config_value( return default, False +def _load_config_bool( + config_file: dict, + name: str, + default: bool = False, +) -> bool: + value, _ = _load_config_value(config_file, name, default) + if isinstance(value, bool): + return value + else: + raise TypeError( + f"Error: config invalid {name} value: {value} " + + "Make sure it is a bool value." + ) + + def _load_config_real_number( config_file: dict, name: str, default: object = None ) -> Union[int, float]: @@ -197,6 +214,13 @@ def load_config(file_path: str) -> None: temp_file_dir, ) + global loading_hints + loading_hints = _load_config_bool( + config_file, + "loading_hints", + True, + ) + # Load the custom ai configs. _load_custom_ai(config_file["ai_custom"]) diff --git a/esbmc_ai_lib/loading_widget.py b/esbmc_ai_lib/loading_widget.py index bdd143a..c0c37af 100644 --- a/esbmc_ai_lib/loading_widget.py +++ b/esbmc_ai_lib/loading_widget.py @@ -10,11 +10,14 @@ from time import sleep from itertools import cycle from threading import Thread +from typing import Optional + +from esbmc_ai_lib import config class LoadingWidget(object): done: bool = False - thread: Thread + thread: Optional[Thread] loading_text: str animation: list[str] anim_speed: float @@ -53,6 +56,8 @@ def _animate(self) -> None: terminal.flush() def start(self, text: str = "Please Wait") -> None: + if not config.loading_hints: + return self.done = False self.loading_text = text self.thread = Thread(target=self._animate) @@ -62,7 +67,8 @@ def start(self, text: str = "Please Wait") -> None: def stop(self) -> None: self.done = True # Block until end. - self.thread.join() + if self.thread: + self.thread.join() _widgets: list[LoadingWidget] = [] diff --git a/tests/regtest/test_ast.py b/tests/regtest/test_ast.py new file mode 100644 index 0000000..72a02c4 --- /dev/null +++ b/tests/regtest/test_ast.py @@ -0,0 +1,44 @@ +source_code = """struct linear +{ + int value; +}; + +typedef struct linear LinearTypeDef; + +typedef struct +{ + int x; + int y; +} Point; + +Point a; +Point *b; + +int c; + +char *d; + +typedef enum Types +{ + ONE, + TWO, + THREE +} Typest; + +enum Types e = ONE; + +Typest f = TWO; + +union Combines +{ + int a; + int b; + int c; +}; + +typedef union Combines CombinesTypeDef; + +enum extra { A, B, C}; + +typedef enum extra ExtraEnum;""" +# TODO From 9174f598be423e4a8410e74539ec2e908a3a6217 Mon Sep 17 00:00:00 2001 From: Yiannis Charalambous Date: Tue, 26 Sep 2023 23:55:11 +0100 Subject: [PATCH 2/5] Add config allow successful parameter to allow running esbmc-ai on verification success --- config.json | 1 + esbmc_ai_lib/config.py | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/config.json b/config.json index 5495800..310830e 100644 --- a/config.json +++ b/config.json @@ -2,6 +2,7 @@ "ai_model": "gpt-3.5-turbo-16k", "ai_custom": {}, "esbmc_path": "./esbmc", + "allow_successful": true, "esbmc_params": [ "--interval-analysis", "--goto-unwind", diff --git a/esbmc_ai_lib/config.py b/esbmc_ai_lib/config.py index f34f474..4802fbf 100644 --- a/esbmc_ai_lib/config.py +++ b/esbmc_ai_lib/config.py @@ -36,6 +36,7 @@ ai_model: AIModel = AIModels.GPT_3.value loading_hints: bool = False +allow_successful: bool = False cfg_path: str = "./config.json" @@ -214,6 +215,13 @@ def load_config(file_path: str) -> None: temp_file_dir, ) + global allow_successful + allow_successful = _load_config_bool( + config_file, + "allow_successful", + False, + ) + global loading_hints loading_hints = _load_config_bool( config_file, From 0cf21420cd7acaaaddd46bb7f66fb2d831d4c720 Mon Sep 17 00:00:00 2001 From: Yiannis Charalambous Date: Tue, 26 Sep 2023 23:55:47 +0100 Subject: [PATCH 3/5] Add return type hints to fix code command execute --- esbmc_ai_lib/commands/fix_code_command.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/esbmc_ai_lib/commands/fix_code_command.py b/esbmc_ai_lib/commands/fix_code_command.py index 25180df..b4082dc 100644 --- a/esbmc_ai_lib/commands/fix_code_command.py +++ b/esbmc_ai_lib/commands/fix_code_command.py @@ -2,6 +2,7 @@ from os import get_terminal_size from time import sleep +from typing import Tuple from typing_extensions import override from langchain.schema import AIMessage, HumanMessage @@ -31,7 +32,9 @@ def __init__(self) -> None: self.anim = create_loading_widget() @override - def execute(self, file_name: str, source_code: str, esbmc_output: str): + def execute( + self, file_name: str, source_code: str, esbmc_output: str + ) -> Tuple[bool, str]: wait_time: int = int(config.consecutive_prompt_delay) # Create time left animation to show how much time left between API calls # This is done by creating a list of all the numbers to be displayed and From 88cf07287591e9713b4c189ee62a59503f8e7b41 Mon Sep 17 00:00:00 2001 From: Yiannis Charalambous Date: Tue, 26 Sep 2023 23:56:09 +0100 Subject: [PATCH 4/5] Added sample optimize-code factorial --- samples/optimize-code/fact_01.c | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 samples/optimize-code/fact_01.c diff --git a/samples/optimize-code/fact_01.c b/samples/optimize-code/fact_01.c new file mode 100644 index 0000000..504ef40 --- /dev/null +++ b/samples/optimize-code/fact_01.c @@ -0,0 +1,26 @@ +#include + +int factorial(int n) +{ + if (n <= 0) + { + return 1; + } + else + { + int result = 1; + while (n > 0) + { + result *= n; + n--; + } + return result; + } +} + +int main() +{ + int num = 10; + printf("Factorial of %d is %d\n", num, factorial(num)); + return 0; +} From 97e815b99a4beaa22b4fe2ecb76557a0e04c7af0 Mon Sep 17 00:00:00 2001 From: Yiannis Charalambous Date: Tue, 26 Sep 2023 23:56:56 +0100 Subject: [PATCH 5/5] ESBMC-AI can now be used with VERIFICATION SUCCESSFUL samples --- esbmc_ai_lib/__main__.py | 24 ++++++++++---- .../commands/optimize_code_command.py | 33 ++++++++++++------- esbmc_ai_lib/user_chat.py | 13 ++++++++ 3 files changed, 53 insertions(+), 17 deletions(-) diff --git a/esbmc_ai_lib/__main__.py b/esbmc_ai_lib/__main__.py index 3d341c9..f3dfad0 100755 --- a/esbmc_ai_lib/__main__.py +++ b/esbmc_ai_lib/__main__.py @@ -155,6 +155,9 @@ def init_commands() -> None: fix_code_command.on_solution_signal.add_listener(chat.set_solution) fix_code_command.on_solution_signal.add_listener(update_solution) + optimize_code_command.on_solution_signal.add_listener(chat.set_solution) + optimize_code_command.on_solution_signal.add_listener(update_solution) + def _run_command_mode( command: ChatCommand, @@ -174,14 +177,15 @@ def _run_command_mode( sys.exit(1) else: print(solution) - # elif command == verify_code_command: - # raise NotImplementedError() elif command == optimize_code_command: - optimize_code_command.execute( + error, solution = optimize_code_command.execute( file_path=get_main_source_file_path(), source_code=source_code, function_names=args, ) + + print(solution) + sys.exit(1 if error else 0) else: command.execute() sys.exit(0) @@ -302,12 +306,13 @@ def main() -> None: # ESBMC will output 0 for verification success and 1 for verification # failed, if anything else gets thrown, it's an ESBMC error. - if exit_code == 0: + if not config.allow_successful and exit_code == 0: print("Success!") print(esbmc_output) sys.exit(0) - elif exit_code != 1: + elif exit_code != 0 and exit_code != 1: print(f"ESBMC exit code: {exit_code}") + print(f"ESBMC Output:\n\n{esbmc_err_output}") sys.exit(1) # Command mode: Check if command is called and call it. @@ -396,11 +401,18 @@ def main() -> None: continue elif command == optimize_code_command.command_name: # Optimize Code command - optimize_code_command.execute( + error, solution = optimize_code_command.execute( file_path=get_main_source_file_path(), source_code=get_main_source_file().content, function_names=command_args, ) + + if error: + # Print error + print("\n" + solution + "\n") + else: + print(f"\nOptimizations Completed:\n```c\n{solution}```\n") + continue else: # Commands without parameters or returns are handled automatically. diff --git a/esbmc_ai_lib/commands/optimize_code_command.py b/esbmc_ai_lib/commands/optimize_code_command.py index 9125b61..4c4a5ba 100644 --- a/esbmc_ai_lib/commands/optimize_code_command.py +++ b/esbmc_ai_lib/commands/optimize_code_command.py @@ -3,7 +3,7 @@ import os import sys from os import get_terminal_size -from typing import Iterable, Optional +from typing import Iterable, Optional, Tuple from typing_extensions import override from string import Template from random import randint @@ -13,6 +13,7 @@ from esbmc_ai_lib.frontend.c_types import is_primitive_type from esbmc_ai_lib.frontend.esbmc_code_generator import ESBMCCodeGenerator from esbmc_ai_lib.esbmc_util import esbmc_load_source_code +from esbmc_ai_lib.msg_bus import Signal from esbmc_ai_lib.solution_generator import SolutionGenerator from .chat_command import ChatCommand from .. import config @@ -31,6 +32,7 @@ def __init__(self) -> None: command_name="optimize-code", help_message="(EXPERIMENTAL) Optimizes the code of a specific function or the entire file if a function is not specified. Usage: optimize-code [function_name]", ) + self.on_solution_signal: Signal = Signal() def _get_functions_list( self, @@ -280,8 +282,19 @@ def get_function_from_collection( @override def execute( - self, file_path: str, source_code: str, function_names: list[str] - ) -> None: + self, + file_path: str, + source_code: str, + function_names: list[str], + ) -> Tuple[bool, str]: + """Executes the Optimize Code command. The command takes the following inputs: + * file_path: The path of the source code file. + * source_code: The source code file contents. + * function_names: List of function names to optimize. Main is always excluded. + + Returns a `Tuple[bool, str]` which is the flag if there was an error, and the + source code from the LLM. + """ clang_ast: ast.ClangAST = ast.ClangAST( file_path=file_path, source_code=source_code, @@ -323,7 +336,7 @@ def execute( function_name=fn_name, ) - new_source_code: str = SolutionGenerator.get_code_from_solution( + optimized_source_code: str = SolutionGenerator.get_code_from_solution( response.message.content ) @@ -335,19 +348,17 @@ def execute( # Check equivalence equal: bool = self.check_function_pequivalence( original_source_code=source_code, - new_source_code=new_source_code, + new_source_code=optimized_source_code, function_name=fn_name, ) if equal: - new_source_code = response.message.content + # If equal, then return with explanation. + new_source_code = optimized_source_code break elif attempt == max_retries - 1: - print("Failed all attempts...") - return + return True, "Failed all attempts..." else: print("Failed attempt", attempt) - print("\nOptimizations Completed:\n") - print(new_source_code) - print() + return False, new_source_code diff --git a/esbmc_ai_lib/user_chat.py b/esbmc_ai_lib/user_chat.py index e736dd0..ea7deb6 100644 --- a/esbmc_ai_lib/user_chat.py +++ b/esbmc_ai_lib/user_chat.py @@ -59,6 +59,19 @@ def set_solution(self, source_code: str) -> None: message=AIMessage(content="Understood"), protected=True ) + def set_optimized_solution(self, source_code: str) -> None: + self.solution = source_code + self.push_to_message_stack( + message=HumanMessage( + content=f"Here is the optimized code:\n\n{source_code}" + ), + protected=True, + ) + + self.push_to_message_stack( + message=AIMessage(content="Understood"), protected=True + ) + @override def compress_message_stack(self) -> None: """Uses ConversationSummaryMemory from Langchain to summarize the conversation of all the non-protected